--- a/src/Pure/General/completion.scala Sun Nov 29 07:57:50 2020 +0000
+++ b/src/Pure/General/completion.scala Sun Nov 29 21:58:40 2020 +0100
@@ -153,7 +153,7 @@
YXML.string_of_tree(Semantic.Info(pos, Names(if (total > 0) total else names.length, names)))
def report_theories(pos: Position.T, thys: List[String], total: Int = 0): String =
- report_names(pos, thys.map(name => (name, ("theory", name))), total)
+ report_names(pos, thys.map(name => (name, (Markup.THEORY, name))), total)
object Semantic
{
--- a/src/Pure/General/path.scala Sun Nov 29 07:57:50 2020 +0000
+++ b/src/Pure/General/path.scala Sun Nov 29 21:58:40 2020 +0100
@@ -203,6 +203,7 @@
def xz: Path = ext("xz")
def tex: Path = ext("tex")
def pdf: Path = ext("pdf")
+ def thy: Path = ext("thy")
def backup: Path =
{
@@ -256,9 +257,27 @@
def file_name: String = expand.base.implode
- /* source position */
+ /* implode wrt. given directories */
- def position: Position.T = Position.File(implode)
+ def implode_symbolic: String =
+ {
+ val directories =
+ Library.space_explode(':', Isabelle_System.getenv("ISABELLE_DIRECTORIES")).reverse
+ val full_name = expand.implode
+ directories.view.flatMap(a =>
+ try {
+ val b = Path.explode(a).expand.implode
+ if (full_name == b) Some(a)
+ else {
+ Library.try_unprefix(b + "/", full_name) match {
+ case Some(name) => Some(a + "/" + name)
+ case None => None
+ }
+ }
+ } catch { case ERROR(_) => None }).headOption.getOrElse(implode)
+ }
+
+ def position: Position.T = Position.File(implode_symbolic)
/* platform files */
--- a/src/Pure/PIDE/command.scala Sun Nov 29 07:57:50 2020 +0000
+++ b/src/Pure/PIDE/command.scala Sun Nov 29 21:58:40 2020 +0100
@@ -261,7 +261,8 @@
def accumulate(
self_id: Document_ID.Generic => Boolean,
- other_id: Document_ID.Generic => Option[(Symbol.Text_Chunk.Id, Symbol.Text_Chunk)],
+ other_id: (Document.Node.Name, Document_ID.Generic) =>
+ Option[(Symbol.Text_Chunk.Id, Symbol.Text_Chunk)],
message: XML.Elem,
xml_cache: XML.Cache): State =
message match {
@@ -293,7 +294,8 @@
val target =
if (self_id(id) && command.chunks.isDefinedAt(chunk_name))
Some((chunk_name, command.chunks(chunk_name)))
- else if (chunk_name == Symbol.Text_Chunk.Default) other_id(id)
+ else if (chunk_name == Symbol.Text_Chunk.Default)
+ other_id(command.node_name, id)
else None
(target, atts) match {
@@ -415,14 +417,14 @@
// inlined errors
case Thy_Header.THEORY =>
val reader = Scan.char_reader(Token.implode(span.content))
- val header = resources.check_thy_reader(node_name, reader)
+ val header = resources.check_thy(node_name, reader)
val imports_pos = header.imports_pos
val raw_imports =
try {
- val read_imports = Thy_Header.read(reader, Token.Pos.none).imports
+ val read_imports = Thy_Header.read(node_name, reader).imports.map(_._1)
if (imports_pos.length == read_imports.length) read_imports else error("")
}
- catch { case _: Throwable => List.fill(imports_pos.length)("") }
+ catch { case _: Throwable => List.fill(header.imports.length)("") }
val errors =
for { ((import_name, pos), s) <- imports_pos zip raw_imports if !can_import(import_name) }
--- a/src/Pure/PIDE/document.scala Sun Nov 29 07:57:50 2020 +0000
+++ b/src/Pure/PIDE/document.scala Sun Nov 29 21:58:40 2020 +0100
@@ -752,9 +752,14 @@
id == st.command.id ||
(execs.get(id) match { case Some(st1) => st1.command.id == st.command.id case None => false })
- private def other_id(id: Document_ID.Generic)
+ private def other_id(node_name: Node.Name, id: Document_ID.Generic)
: Option[(Symbol.Text_Chunk.Id, Symbol.Text_Chunk)] =
- lookup_id(id).map(st => ((Symbol.Text_Chunk.Id(st.command.id), st.command.chunk)))
+ {
+ for {
+ st <- lookup_id(id)
+ if st.command.node_name == node_name
+ } yield (Symbol.Text_Chunk.Id(st.command.id), st.command.chunk)
+ }
private def redirection(st: Command.State): Graph[Document_ID.Command, Unit] =
(commands_redirection /: st.markups.redirection_iterator)({ case (graph, id) =>
--- a/src/Pure/PIDE/headless.scala Sun Nov 29 07:57:50 2020 +0000
+++ b/src/Pure/PIDE/headless.scala Sun Nov 29 21:58:40 2020 +0100
@@ -600,7 +600,7 @@
progress.expose_interrupt()
val text0 = File.read(path)
val text = if (unicode_symbols) Symbol.decode(text0) else text0
- val node_header = resources.check_thy_reader(node_name, Scan.char_reader(text))
+ val node_header = resources.check_thy(node_name, Scan.char_reader(text))
new Resources.Theory(node_name, node_header, text, true)
}
--- a/src/Pure/PIDE/markup.scala Sun Nov 29 07:57:50 2020 +0000
+++ b/src/Pure/PIDE/markup.scala Sun Nov 29 21:58:40 2020 +0100
@@ -267,6 +267,7 @@
/* misc entities */
+ val THEORY = "theory"
val CLASS = "class"
val TYPE_NAME = "type_name"
val FIXED = "fixed"
@@ -379,6 +380,8 @@
val CARTOUCHE = "cartouche"
val COMMENT = "comment"
+ val LOAD_COMMAND = "load_command"
+
/* comments */
--- a/src/Pure/PIDE/protocol_message.ML Sun Nov 29 07:57:50 2020 +0000
+++ b/src/Pure/PIDE/protocol_message.ML Sun Nov 29 21:58:40 2020 +0100
@@ -15,6 +15,8 @@
structure Protocol_Message: PROTOCOL_MESSAGE =
struct
+(* message markers *)
+
fun marker_text a text =
Output.physical_writeln ("\f" ^ a ^ " = " ^ encode_lines text);
@@ -22,6 +24,8 @@
marker_text a (YXML.string_of_body (XML.Encode.properties props));
+(* inlined messages *)
+
fun command_positions id =
let
fun attribute (a, b) =
--- a/src/Pure/PIDE/resources.scala Sun Nov 29 07:57:50 2020 +0000
+++ b/src/Pure/PIDE/resources.scala Sun Nov 29 21:58:40 2020 +0100
@@ -60,8 +60,6 @@
def is_hidden(name: Document.Node.Name): Boolean =
!name.is_theory || name.theory == Sessions.root_name || File_Format.registry.is_theory(name)
- def thy_path(path: Path): Path = path.ext("thy")
-
/* file-system operations */
@@ -147,7 +145,7 @@
def find_theory_node(theory: String): Option[Document.Node.Name] =
{
- val thy_file = thy_path(Path.basic(Long_Name.base_name(theory)))
+ val thy_file = Path.basic(Long_Name.base_name(theory)).thy
val session = session_base.theory_qualifier(theory)
val dirs =
sessions_structure.get(session) match {
@@ -161,7 +159,7 @@
def import_name(qualifier: String, dir: String, s: String): Document.Node.Name =
{
val theory = theory_name(qualifier, Thy_Header.import_name(s))
- def theory_node = make_theory_node(dir, thy_path(Path.explode(s)), theory)
+ def theory_node = make_theory_node(dir, Path.explode(s).thy, theory)
if (!Thy_Header.is_base_name(s)) theory_node
else if (session_base.loaded_theory(theory)) loaded_theory_node(theory)
@@ -216,42 +214,26 @@
else error ("Cannot load theory file " + path)
}
- def check_thy_reader(node_name: Document.Node.Name, reader: Reader[Char],
- start: Token.Pos = Token.Pos.command, strict: Boolean = true): Document.Node.Header =
+ def check_thy(node_name: Document.Node.Name, reader: Reader[Char],
+ command: Boolean = true, strict: Boolean = true): Document.Node.Header =
{
if (node_name.is_theory && reader.source.length > 0) {
try {
- val header = Thy_Header.read(reader, start, strict).check_keywords
-
- val base_name = node_name.theory_base_name
- if (Long_Name.is_qualified(header.name)) {
- error("Bad theory name " + quote(header.name) + " with qualification" +
- Position.here(header.pos))
- }
- if (base_name != header.name) {
- error("Bad theory name " + quote(header.name) +
- " for file " + thy_path(Path.basic(base_name)) + Position.here(header.pos) +
- Completion.report_theories(header.pos, List(base_name)))
- }
-
- val imports_pos =
- header.imports_pos.map({ case (s, pos) =>
+ val header = Thy_Header.read(node_name, reader, command = command, strict = strict)
+ val imports =
+ header.imports.map({ case (s, pos) =>
val name = import_name(node_name, s)
if (Sessions.exclude_theory(name.theory_base_name))
error("Bad theory name " + quote(name.theory_base_name) + Position.here(pos))
(name, pos)
})
- Document.Node.Header(imports_pos, header.keywords, header.abbrevs)
+ Document.Node.Header(imports, header.keywords, header.abbrevs)
}
catch { case exn: Throwable => Document.Node.bad_header(Exn.message(exn)) }
}
else Document.Node.no_header
}
- def check_thy(name: Document.Node.Name, start: Token.Pos = Token.Pos.command,
- strict: Boolean = true): Document.Node.Header =
- with_thy_reader(name, check_thy_reader(name, _, start, strict))
-
/* special header */
@@ -351,7 +333,9 @@
progress.expose_interrupt()
val header =
- try { check_thy(name, Token.Pos.file(name.node)).cat_errors(message) }
+ try {
+ with_thy_reader(name, check_thy(name, _, command = false)).cat_errors(message)
+ }
catch { case ERROR(msg) => cat_error(msg, message) }
val entry = Document.Node.Entry(name, header)
dependencies1.require_thys(adjunct, header.imports_pos,
--- a/src/Pure/Thy/thy_header.scala Sun Nov 29 07:57:50 2020 +0000
+++ b/src/Pure/Thy/thy_header.scala Sun Nov 29 21:58:40 2020 +0100
@@ -162,7 +162,7 @@
{ case None => Nil case Some(_ ~ xs) => xs }) ~
(opt($$$(ABBREVS) ~! abbrevs) ^^
{ case None => Nil case Some(_ ~ xs) => xs }) ~
- $$$(BEGIN) ^^ { case a ~ b ~ c ~ d ~ _ => Thy_Header(a, b, c, d).map(Symbol.decode) }
+ $$$(BEGIN) ^^ { case a ~ b ~ c ~ d ~ _ => Thy_Header(a._1, a._2, b, c, d) }
val heading =
(command(CHAPTER) |
@@ -213,44 +213,61 @@
}
}
- def read(reader: Reader[Char], start: Token.Pos, strict: Boolean = true): Thy_Header =
+ def read(node_name: Document.Node.Name, reader: Reader[Char],
+ command: Boolean = true,
+ strict: Boolean = true): Thy_Header =
{
val (_, tokens0) = read_tokens(reader, true)
val text = Scan.reader_decode_utf8(reader, Token.implode(tokens0))
- val (drop_tokens, tokens) = read_tokens(Scan.char_reader(text), strict)
- val pos = (start /: drop_tokens)(_.advance(_))
+ val (skip_tokens, tokens) = read_tokens(Scan.char_reader(text), strict)
+ val pos =
+ if (command) Token.Pos.command
+ else (Token.Pos.file(node_name.node) /: skip_tokens)(_ advance _)
- Parser.parse_header(tokens, pos)
+ Parser.parse_header(tokens, pos).map(Symbol.decode).check(node_name)
}
}
sealed case class Thy_Header(
- name_pos: (String, Position.T),
- imports_pos: List[(String, Position.T)],
+ name: String,
+ pos: Position.T,
+ imports: List[(String, Position.T)],
keywords: Thy_Header.Keywords,
abbrevs: Thy_Header.Abbrevs)
{
- def name: String = name_pos._1
- def pos: Position.T = name_pos._2
- def imports: List[String] = imports_pos.map(_._1)
-
def map(f: String => String): Thy_Header =
- Thy_Header((f(name), pos),
- imports_pos.map({ case (a, b) => (f(a), b) }),
+ Thy_Header(f(name), pos,
+ imports.map({ case (a, b) => (f(a), b) }),
keywords.map({ case (a, spec) => (f(a), spec.map(f)) }),
abbrevs.map({ case (a, b) => (f(a), f(b)) }))
- def check_keywords: Thy_Header =
+ def check(node_name: Document.Node.Name): Thy_Header =
{
+ val base_name = node_name.theory_base_name
+ if (Long_Name.is_qualified(name)) {
+ error("Bad theory name " + quote(name) + " with qualification" + Position.here(pos))
+ }
+ if (base_name != name) {
+ error("Bad theory name " + quote(name) + " for file " + Path.basic(base_name).thy +
+ Position.here(pos) + Completion.report_theories(pos, List(base_name)))
+ }
+
for ((_, spec) <- keywords) {
if (spec.kind != Keyword.THY_LOAD && spec.load_command.nonEmpty) {
error("Illegal load command specification for kind: " + quote(spec.kind) +
Position.here(spec.kind_pos))
}
if (!Command_Span.load_commands.exists(_.name == spec.load_command)) {
+ val completion =
+ for {
+ load_command <- Command_Span.load_commands
+ name = load_command.name
+ if name.startsWith(spec.load_command)
+ } yield (name, (Markup.LOAD_COMMAND, name))
error("Unknown load command specification: " + quote(spec.load_command) +
- Position.here(spec.load_command_pos))
+ Position.here(spec.load_command_pos) +
+ Completion.report_names(spec.load_command_pos, completion))
}
}
this
--- a/src/Pure/Tools/build_job.scala Sun Nov 29 07:57:50 2020 +0000
+++ b/src/Pure/Tools/build_job.scala Sun Nov 29 21:58:40 2020 +0100
@@ -170,7 +170,7 @@
{
val theory_name = snapshot.node_name.theory
val args = Protocol.Export.Args(theory_name = theory_name, name = name)
- val bytes = Bytes(YXML.string_of_body(xml))
+ val bytes = Bytes(Symbol.encode(YXML.string_of_body(xml)))
if (!bytes.is_empty) export_consumer(session_name, args, bytes)
}
def export_text(name: String, text: String): Unit =
--- a/src/Pure/Tools/doc.scala Sun Nov 29 07:57:50 2020 +0000
+++ b/src/Pure/Tools/doc.scala Sun Nov 29 21:58:40 2020 +0100
@@ -7,9 +7,6 @@
package isabelle
-import scala.util.matching.Regex
-
-
object Doc
{
/* dirs */
@@ -41,8 +38,8 @@
}
else None
- private val Section_Entry = new Regex("""^(\S.*)\s*$""")
- private val Doc_Entry = new Regex("""^\s+(\S+)\s+(.+)\s*$""")
+ private val Section_Entry = """^(\S.*)\s*$""".r
+ private val Doc_Entry = """^\s+(\S+)\s+(.+)\s*$""".r
private def release_notes(): List[Entry] =
Section("Release Notes", true) ::
--- a/src/Tools/VSCode/src/vscode_model.scala Sun Nov 29 07:57:50 2020 +0000
+++ b/src/Tools/VSCode/src/vscode_model.scala Sun Nov 29 21:58:40 2020 +0100
@@ -99,7 +99,7 @@
def node_header: Document.Node.Header =
resources.special_header(node_name) getOrElse
- resources.check_thy_reader(node_name, Scan.char_reader(content.text))
+ resources.check_thy(node_name, Scan.char_reader(content.text))
/* perspective */
--- a/src/Tools/jEdit/src/document_model.scala Sun Nov 29 07:57:50 2020 +0000
+++ b/src/Tools/jEdit/src/document_model.scala Sun Nov 29 21:58:40 2020 +0100
@@ -413,7 +413,7 @@
def node_header: Document.Node.Header =
PIDE.resources.special_header(node_name) getOrElse
- PIDE.resources.check_thy_reader(node_name, Scan.char_reader(content.text), strict = false)
+ PIDE.resources.check_thy(node_name, Scan.char_reader(content.text), strict = false)
/* content */
@@ -485,7 +485,7 @@
PIDE.resources.special_header(node_name) getOrElse
JEdit_Lib.buffer_lock(buffer) {
- PIDE.resources.check_thy_reader(node_name, JEdit_Lib.buffer_reader(buffer), strict = false)
+ PIDE.resources.check_thy(node_name, JEdit_Lib.buffer_reader(buffer), strict = false)
}
}