--- a/src/Doc/Isar_Ref/Document_Preparation.thy Thu Jan 19 13:55:38 2023 +0000
+++ b/src/Doc/Isar_Ref/Document_Preparation.thy Thu Jan 19 17:53:05 2023 +0100
@@ -117,6 +117,9 @@
@{antiquotation_def ML_functor} & : & \<open>antiquotation\<close> \\
@{antiquotation_def ML_functor_def} & : & \<open>antiquotation\<close> \\
@{antiquotation_def ML_functor_ref} & : & \<open>antiquotation\<close> \\
+ \end{matharray}
+
+ \begin{matharray}{rcl}
@{antiquotation_def emph} & : & \<open>antiquotation\<close> \\
@{antiquotation_def bold} & : & \<open>antiquotation\<close> \\
@{antiquotation_def verbatim} & : & \<open>antiquotation\<close> \\
@@ -126,6 +129,9 @@
@{antiquotation_def "file"} & : & \<open>antiquotation\<close> \\
@{antiquotation_def "url"} & : & \<open>antiquotation\<close> \\
@{antiquotation_def "cite"} & : & \<open>antiquotation\<close> \\
+ @{antiquotation_def "nocite"} & : & \<open>antiquotation\<close> \\
+ @{antiquotation_def "citet"} & : & \<open>antiquotation\<close> \\
+ @{antiquotation_def "citep"} & : & \<open>antiquotation\<close> \\
@{command_def "print_antiquotations"}\<open>\<^sup>*\<close> & : & \<open>context \<rightarrow>\<close> \\
\end{matharray}
--- a/src/Pure/General/antiquote.scala Thu Jan 19 13:55:38 2023 +0000
+++ b/src/Pure/General/antiquote.scala Thu Jan 19 17:53:05 2023 +0100
@@ -38,6 +38,11 @@
val antiquote: Parser[Antiquote] =
txt ^^ (x => Text(x)) | (control ^^ (x => Control(x)) | antiq ^^ (x => Antiq(x)))
+
+ def antiquote_special(special: Symbol.Symbol => Boolean): Parser[Antiquote] =
+ many1(s => !special(s)) ^^ Text.apply |
+ one(special) ~ opt(cartouche) ^^
+ { case x ~ Some(y) => Control(x + y) case x ~ None => Control(x) }
}
--- a/src/Pure/PIDE/document.scala Thu Jan 19 13:55:38 2023 +0000
+++ b/src/Pure/PIDE/document.scala Thu Jan 19 17:53:05 2023 +0100
@@ -217,12 +217,13 @@
def starts(
commands: Iterator[Command],
- offset: Text.Offset = 0
- ) : Iterator[(Command, Text.Offset)] = {
- var i = offset
+ init: Int = 0,
+ count: Command => Int = _.length
+ ) : Iterator[(Command, Int)] = {
+ var i = init
for (command <- commands) yield {
val start = i
- i += command.length
+ i += count(command)
(command, start)
}
}
@@ -243,6 +244,13 @@
}
final class Commands private(val commands: Linear_Set[Command]) {
+ lazy val start_lines: Map[Document_ID.Command, Int] =
+ (for {
+ (command, line) <-
+ Node.Commands.starts(commands.iterator, init = 1,
+ count = cmd => Library.count_newlines(cmd.source))
+ } yield command.id -> line).toMap
+
lazy val load_commands: List[Command] =
commands.iterator.filter(cmd => cmd.blobs.nonEmpty).toList
@@ -301,6 +309,8 @@
else "node"
def commands: Linear_Set[Command] = _commands.commands
+ def command_start_line(command: Command): Option[Int] =
+ _commands.start_lines.get(command.id)
def load_commands: List[Command] = _commands.load_commands
def load_commands_changed(doc_blobs: Blobs): Boolean =
load_commands.exists(_.blobs_changed(doc_blobs))
@@ -674,8 +684,10 @@
if (command_node.commands.contains(command)) Some((command_node, command)) else None
}
- def find_command_position(id: Document_ID.Generic, offset: Symbol.Offset)
- : Option[Line.Node_Position] =
+ def find_command_position(
+ id: Document_ID.Generic,
+ offset: Symbol.Offset
+ ): Option[Line.Node_Position] = {
for ((node, command) <- find_command(id))
yield {
val name = command.node_name.node
@@ -686,6 +698,15 @@
val pos = sources_iterator.foldLeft(Line.Position.zero)(_.advance(_))
Line.Node_Position(name, pos)
}
+ }
+
+ def find_command_line(id: Document_ID.Generic, offset: Symbol.Offset): Option[Int] =
+ for {
+ (node, command) <- find_command(id)
+ range = Text.Range(0, command.chunk.decode(offset))
+ text <- range.try_substring(command.source)
+ line <- node.command_start_line(command)
+ } yield line + Library.count_newlines(text)
def current_command(other_node_name: Node.Name, offset: Text.Offset): Option[Command] =
if (other_node_name.is_theory) {
--- a/src/Pure/Thy/bibtex.ML Thu Jan 19 13:55:38 2023 +0000
+++ b/src/Pure/Thy/bibtex.ML Thu Jan 19 17:53:05 2023 +0100
@@ -83,13 +83,16 @@
else ();
in cite_command ctxt get_kind (args, "") end;
+val cite_keywords =
+ Thy_Header.bootstrap_keywords
+ |> Keyword.add_keywords (map (fn a => ((a, Position.none), Keyword.no_spec)) ["in", "using"]);
+
fun cite_command_embedded ctxt get_kind input =
let
- val keywords = Thy_Header.get_keywords' ctxt;
val parser =
Scan.option (Parse.embedded_input --| Parse.$$$ "in") -- parse_citations --
- Scan.optional (Parse.command_name "using" |-- Parse.!!! Parse.name) "";
- val args = Parse.read_embedded ctxt keywords parser input;
+ Scan.optional (Parse.$$$ "using" |-- Parse.!!! Parse.name) "";
+ val args = Parse.read_embedded ctxt cite_keywords parser input;
in cite_command ctxt get_kind args end;
fun cite_command_parser get_kind =
--- a/src/Pure/Thy/bibtex.scala Thu Jan 19 13:55:38 2023 +0000
+++ b/src/Pure/Thy/bibtex.scala Thu Jan 19 17:53:05 2023 +0100
@@ -14,6 +14,8 @@
import scala.util.parsing.input.Reader
import scala.util.matching.Regex
+import isabelle.{Token => Isar_Token}
+
object Bibtex {
/** file format **/
@@ -178,7 +180,7 @@
}
if (chunk.is_malformed && err_line == 0) { err_line = line }
offset = end_offset
- line += chunk.source.count(_ == '\n')
+ line += Library.count_newlines(chunk.source)
}
val err_pos =
@@ -385,7 +387,7 @@
}
case class Chunk(kind: String, tokens: List[Token]) {
- val source = tokens.map(_.source).mkString
+ val source: String = tokens.map(_.source).mkString
private val content: Option[List[Token]] =
tokens match {
@@ -433,7 +435,7 @@
case class Item(kind: String, end: String, delim: Delimited) extends Line_Context
case class Delimited(quoted: Boolean, depth: Int)
- val Closed = Delimited(false, 0)
+ val Closed: Delimited = Delimited(false, 0)
private def token(kind: Token.Kind.Value)(source: String): Token = Token(kind, source)
private def keyword(source: String): Token = Token(Token.Kind.KEYWORD, source)
@@ -469,7 +471,7 @@
require(if (delim.quoted) delim.depth > 0 else delim.depth >= 0,
"bad delimiter depth")
- def apply(in: Input) = {
+ def apply(in: Input): ParseResult[(String, Delimited)] = {
val start = in.offset
val end = in.source.length
@@ -526,7 +528,7 @@
private val ident = identifier ^^ token(Token.Kind.IDENT)
- val other_token = "[=#,]".r ^^ keyword | (nat | (ident | space))
+ val other_token: Parser[Token] = "[=#,]".r ^^ keyword | (nat | (ident | space))
/* body */
@@ -721,6 +723,17 @@
/** cite commands and antiquotations **/
+ /* cite commands */
+
+ def cite_commands(options: Options): List[String] =
+ Library.space_explode(',', options.string("document_cite_commands"))
+
+ val CITE = "cite"
+ val NOCITE = "nocite"
+
+
+ /* update old forms */
+
def cite_antiquotation(name: String, body: String): String =
"""\<^""" + name + """>\<open>""" + body + """\<close>"""
@@ -733,7 +746,6 @@
private val Cite_Command = """\\(cite|nocite|citet|citep)((?:\[[^\]]*\])?)\{([^}]*)\}""".r
private val Cite_Macro = """\[\s*cite_macro\s*=\s*"?(\w+)"?\]\s*""".r
- private val CITE = "cite"
def update_cite_commands(str: String): String =
Cite_Command.replaceAllIn(str, { m =>
@@ -766,4 +778,94 @@
else cite_antiquotation(CITE, body2 + " using " + quote(name))
}
}
+
+
+ /* parse within raw source */
+
+ object Cite {
+ def unapply(tree: XML.Tree): Option[Inner] =
+ tree match {
+ case XML.Elem(Markup(Markup.Latex_Cite.name, props), body) =>
+ val kind = Markup.Kind.unapply(props).getOrElse(CITE)
+ val citations = Markup.Citations.get(props)
+ val pos = props.filter(Markup.position_property)
+ Some(Inner(kind, citations, body, pos))
+ case _ => None
+ }
+
+ sealed case class Inner(kind: String, citation: String, location: XML.Body, pos: Position.T) {
+ def nocite: Inner = copy(kind = NOCITE, location = Nil)
+
+ override def toString: String = citation
+ }
+
+ sealed case class Outer(kind: String, body: String, start: Isar_Token.Pos) {
+ def pos: Position.T = start.position()
+
+ def parse: Option[Inner] = {
+ val tokens = Isar_Token.explode(Parsers.keywords, body)
+ Parsers.parse_all(Parsers.inner(pos), Isar_Token.reader(tokens, start)) match {
+ case Parsers.Success(res, _) => Some(res)
+ case _ => None
+ }
+ }
+
+ def errors: List[String] =
+ if (parse.isDefined) Nil
+ else List("Malformed citation" + Position.here(pos))
+
+ override def toString: String =
+ parse match {
+ case Some(inner) => inner.toString
+ case None => "<malformed>"
+ }
+ }
+
+ object Parsers extends Parse.Parsers {
+ val keywords: Keyword.Keywords = Thy_Header.bootstrap_keywords + "in" + "using"
+
+ val location: Parser[String] = embedded ~ $$$("in") ^^ { case x ~ _ => x } | success("")
+ val citations: Parser[String] = rep1sep(name, $$$("and")) ^^ (x => x.mkString(","))
+ val kind: Parser[String] = $$$("using") ~! name ^^ { case _ ~ x => x } | success(CITE)
+
+ def inner(pos: Position.T): Parser[Inner] =
+ location ~ citations ~ kind ^^
+ { case x ~ y ~ z => Inner(z, y, XML.string(x), pos) }
+ }
+
+ def parse(
+ cite_commands: List[String],
+ text: String,
+ start: Isar_Token.Pos = Isar_Token.Pos.start
+ ): List[Outer] = {
+ val controls = cite_commands.map(s => Symbol.control_prefix + s + Symbol.control_suffix)
+ val special = (controls ::: controls.map(Symbol.decode)).toSet
+
+ val Parsers = Antiquote.Parsers
+ Parsers.parseAll(Parsers.rep(Parsers.antiquote_special(special)), text) match {
+ case Parsers.Success(ants, _) =>
+ var pos = start
+ val result = new mutable.ListBuffer[Outer]
+ for (ant <- ants) {
+ ant match {
+ case Antiquote.Control(source) =>
+ for {
+ head <- Symbol.iterator(source).nextOption
+ kind <- Symbol.control_name(Symbol.encode(head))
+ } {
+ val rest = source.substring(head.length)
+ val (body, pos1) =
+ if (rest.isEmpty) (rest, pos)
+ else (Scan.Parsers.cartouche_content(rest), pos.advance(Symbol.open))
+ result += Outer(kind, body, pos1)
+ }
+ case _ =>
+ }
+ pos = pos.advance(ant.source)
+ }
+ result.toList
+ case _ => error("Unexpected failure parsing special antiquotations:\n" + text)
+ }
+ }
+ }
}
--- a/src/Pure/Thy/document_build.scala Thu Jan 19 13:55:38 2023 +0000
+++ b/src/Pure/Thy/document_build.scala Thu Jan 19 17:53:05 2023 +0100
@@ -141,11 +141,16 @@
s2 <- Library.try_unsuffix("\" ...", s1)
} yield s2
- sealed case class Document_Latex(name: Document.Node.Name, body: XML.Body) {
+ sealed case class Document_Latex(
+ name: Document.Node.Name,
+ body: XML.Body,
+ line_pos: Properties.T => Option[Int]
+ ) {
def content: File.Content_XML = File.content(Path.basic(tex_name(name)), body)
def file_pos: String = File.symbolic_path(name.path)
def write(latex_output: Latex.Output, dir: Path): Unit =
- content.output(latex_output.make(_, file_pos = file_pos)).write(dir)
+ content.output(latex_output.make(_, file_pos = file_pos, line_pos = line_pos))
+ .write(dir)
}
def context(
@@ -228,13 +233,29 @@
lazy val document_latex: List[Document_Latex] =
for (name <- all_document_theories)
yield {
+ val selected = document_selection(name)
+
val body =
- if (document_selection(name)) {
+ if (selected) {
val entry = session_context(name.theory, Export.DOCUMENT_LATEX, permissive = true)
YXML.parse_body(entry.text)
}
else Nil
- Document_Latex(name, body)
+
+ def line_pos(props: Properties.T): Option[Int] =
+ Position.Line.unapply(props) orElse {
+ if (selected) {
+ for {
+ snapshot <- session_context.document_snapshot
+ id <- Position.Id.unapply(props)
+ offset <- Position.Offset.unapply(props)
+ line <- snapshot.find_command_line(id, offset)
+ } yield line
+ }
+ else None
+ }
+
+ Document_Latex(name, body, line_pos)
}
--- a/src/Pure/Thy/export.scala Thu Jan 19 13:55:38 2023 +0000
+++ b/src/Pure/Thy/export.scala Thu Jan 19 17:53:05 2023 +0100
@@ -346,7 +346,7 @@
val database_context: Database_Context,
session_background: Sessions.Background,
db_hierarchy: List[Session_Database],
- document_snapshot: Option[Document.Snapshot]
+ val document_snapshot: Option[Document.Snapshot]
) extends AutoCloseable {
session_context =>
--- a/src/Pure/Thy/latex.scala Thu Jan 19 13:55:38 2023 +0000
+++ b/src/Pure/Thy/latex.scala Thu Jan 19 17:53:05 2023 +0100
@@ -46,21 +46,6 @@
else name
- /* cite: references to bibliography */
-
- object Cite {
- sealed case class Value(kind: String, citation: String, location: XML.Body)
- def unapply(tree: XML.Tree): Option[Value] =
- tree match {
- case XML.Elem(Markup(Markup.Latex_Cite.name, props), body) =>
- val kind = Markup.Kind.unapply(props).getOrElse("cite")
- val citations = Markup.Citations.get(props)
- Some(Value(kind, citations, body))
- case _ => None
- }
- }
-
-
/* index entries */
def index_escape(str: String): String = {
@@ -235,11 +220,15 @@
}
}
- def cite(value: Cite.Value): Text = {
- latex_macro0(value.kind) :::
- (if (value.location.isEmpty) Nil
- else XML.string("[") ::: value.location ::: XML.string("]")) :::
- XML.string("{" + value.citation + "}")
+ def cite(inner: Bibtex.Cite.Inner): Text = {
+ val body =
+ latex_macro0(inner.kind) :::
+ (if (inner.location.isEmpty) Nil
+ else XML.string("[") ::: inner.location ::: XML.string("]")) :::
+ XML.string("{" + inner.citation + "}")
+
+ if (inner.pos.isEmpty) body
+ else List(XML.Elem(Markup.Latex_Output(inner.pos), body))
}
def index_item(item: Index_Item.Value): String = {
@@ -261,7 +250,11 @@
error("Unknown latex markup element " + quote(elem.name) + Position.here(pos) +
":\n" + XML.string_of_tree(elem))
- def make(latex_text: Text, file_pos: String = ""): String = {
+ def make(
+ latex_text: Text,
+ file_pos: String = "",
+ line_pos: Properties.T => Option[Int] = Position.Line.unapply
+ ): String = {
var line = 1
val result = new mutable.ListBuffer[String]
val positions = new mutable.ListBuffer[String] ++= init_position(file_pos)
@@ -271,16 +264,18 @@
def traverse(xml: XML.Body): Unit = {
xml.foreach {
case XML.Text(s) =>
- line += s.count(_ == '\n')
+ line += Library.count_newlines(s)
result += s
case elem @ XML.Elem(markup, body) =>
val a = Markup.Optional_Argument.get(markup.properties)
traverse {
markup match {
case Markup.Document_Latex(props) =>
- for (l <- Position.Line.unapply(props) if positions.nonEmpty) {
- val s = position(Value.Int(line), Value.Int(l))
- if (positions.last != s) positions += s
+ if (positions.nonEmpty) {
+ for (l <- line_pos(props)) {
+ val s = position(Value.Int(line), Value.Int(l))
+ if (positions.last != s) positions += s
+ }
}
body
case Markup.Latex_Output(_) => XML.string(latex_output(body))
@@ -293,7 +288,7 @@
case Markup.Latex_Tag(name) => latex_tag(name, body)
case Markup.Latex_Cite(_) =>
elem match {
- case Cite(value) => cite(value)
+ case Bibtex.Cite(inner) => cite(inner)
case _ => unknown_elem(elem, file_position)
}
case Markup.Latex_Index_Entry(_) =>
--- a/src/Pure/Thy/thy_header.scala Thu Jan 19 13:55:38 2023 +0000
+++ b/src/Pure/Thy/thy_header.scala Thu Jan 19 17:53:05 2023 +0100
@@ -59,7 +59,7 @@
(THEORY, Keyword.Spec(kind = Keyword.THY_BEGIN, tags = List("theory"))),
("ML", Keyword.Spec(kind = Keyword.THY_DECL, tags = List("ML"))))
- private val bootstrap_keywords =
+ val bootstrap_keywords: Keyword.Keywords =
Keyword.Keywords.empty.add_keywords(bootstrap_header)
val bootstrap_syntax: Outer_Syntax =
--- a/src/Pure/Tools/update.scala Thu Jan 19 13:55:38 2023 +0000
+++ b/src/Pure/Tools/update.scala Thu Jan 19 17:53:05 2023 +0100
@@ -14,7 +14,7 @@
def update_xml(options: Options, xml: XML.Body): XML.Body = {
val update_path_cartouches = options.bool("update_path_cartouches")
val update_cite = options.bool("update_cite")
- val cite_commands = Library.space_explode(',', options.string("document_cite_commands"))
+ val cite_commands = Bibtex.cite_commands(options)
def upd(lang: Markup.Language, ts: XML.Body): XML.Body =
ts flatMap {
@@ -95,7 +95,7 @@
session <- sessions_structure.build_topological_order
if build_results(session).ok && !exclude(session)
} {
- progress.echo("Session " + session + " ...")
+ progress.echo("Updating " + session + " ...")
val session_options = sessions_structure(session).options
val proper_session_theory =
build_results.deps(session).proper_session_theories.map(_.theory).toSet
@@ -118,7 +118,7 @@
val source1 = XML.content(update_xml(session_options, xml))
if (source1 != snapshot.node.source) {
val path = Path.explode(node_name.node)
- progress.echo("Updating " + quote(File.standard_path(path)))
+ progress.echo("File " + quote(File.standard_path(path)))
File.write(path, source1)
}
}
--- a/src/Pure/library.scala Thu Jan 19 13:55:38 2023 +0000
+++ b/src/Pure/library.scala Thu Jan 19 17:53:05 2023 +0100
@@ -94,6 +94,8 @@
/* lines */
+ def count_newlines(str: String): Int = str.count(_ == '\n')
+
def terminate_lines(lines: IterableOnce[String]): String = {
val it = lines.iterator
if (it.isEmpty) "" else it.mkString("", "\n", "\n")
--- a/src/Tools/jEdit/src/document_dockable.scala Thu Jan 19 13:55:38 2023 +0000
+++ b/src/Tools/jEdit/src/document_dockable.scala Thu Jan 19 17:53:05 2023 +0100
@@ -318,6 +318,7 @@
Wrap_Panel(List(reset_button, purge_button))
private val theories = new Theories_Status(view, document = true)
+ private val theories_pane = new ScrollPane(theories.gui)
private def refresh_theories(): Unit = {
val domain = PIDE.editor.document_theories().toSet
@@ -328,7 +329,7 @@
private val input_page =
new TabbedPane.Page("Input", new BorderPanel {
layout(theories_controls) = BorderPanel.Position.North
- layout(theories.gui) = BorderPanel.Position.Center
+ layout(theories_pane) = BorderPanel.Position.Center
}, "Selection and status of document theories")
private val output_controls =
--- a/src/Tools/jEdit/src/pretty_tooltip.scala Thu Jan 19 13:55:38 2023 +0000
+++ b/src/Tools/jEdit/src/pretty_tooltip.scala Thu Jan 19 17:53:05 2023 +0100
@@ -252,7 +252,7 @@
val formatted = Pretty.formatted(info.info, margin = margin, metric = metric)
val lines =
XML.traverse_text(formatted)(if (XML.text_length(formatted) == 0) 0 else 1)(
- (n: Int, s: String) => n + s.iterator.count(_ == '\n'))
+ (n: Int, s: String) => n + Library.count_newlines(s))
val h = painter.getLineHeight * lines + geometry.deco_height
val margin1 =