--- a/src/Pure/General/position.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/General/position.scala Sun Mar 15 23:46:00 2015 +0100
@@ -21,6 +21,7 @@
val End_Offset = new Properties.Int(Markup.END_OFFSET)
val File = new Properties.String(Markup.FILE)
val Id = new Properties.Long(Markup.ID)
+ val Id_String = new Properties.String(Markup.ID)
val Def_Line = new Properties.Int(Markup.DEF_LINE)
val Def_Offset = new Properties.Int(Markup.DEF_OFFSET)
@@ -101,11 +102,8 @@
/* here: user output */
- def yxml_markup(pos: T, str: String): String =
- YXML.string_of_tree(XML.Elem(Markup(Markup.POSITION, pos), List(XML.Text(str))))
-
def here(pos: T): String =
- yxml_markup(pos,
+ Markup(Markup.POSITION, pos).markup(
(Line.unapply(pos), File.unapply(pos)) match {
case (Some(i), None) => " (line " + i.toString + ")"
case (Some(i), Some(name)) => " (line " + i.toString + " of " + quote(name) + ")"
@@ -114,7 +112,7 @@
})
def here_undelimited(pos: T): String =
- yxml_markup(pos,
+ Markup(Markup.POSITION, pos).markup(
(Line.unapply(pos), File.unapply(pos)) match {
case (Some(i), None) => "line " + i.toString
case (Some(i), Some(name)) => "line " + i.toString + " of " + quote(name)
--- a/src/Pure/Isar/keyword.ML Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/Isar/keyword.ML Sun Mar 15 23:46:00 2015 +0100
@@ -249,4 +249,3 @@
fun is_printed keywords = is_theory_goal keywords orf is_proof keywords;
end;
-
--- a/src/Pure/Isar/keyword.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/Isar/keyword.scala Sun Mar 15 23:46:00 2015 +0100
@@ -39,7 +39,7 @@
val PRF_SCRIPT = "prf_script"
- /* categories */
+ /* command categories */
val vacous = Set(DIAG, DOCUMENT_HEADING, DOCUMENT_BODY, DOCUMENT_RAW)
@@ -50,6 +50,11 @@
val document_raw = Set(DOCUMENT_RAW)
val document = Set(DOCUMENT_HEADING, DOCUMENT_BODY, DOCUMENT_RAW)
+ val theory_begin = Set(THY_BEGIN)
+ val theory_end = Set(THY_END)
+
+ val theory_load = Set(THY_LOAD)
+
val theory = Set(THY_BEGIN, THY_END, THY_LOAD, THY_DECL, THY_DECL_BLOCK, THY_GOAL)
val theory_block = Set(THY_BEGIN, THY_DECL_BLOCK)
@@ -139,6 +144,12 @@
def command_kind(name: String): Option[String] = commands.get(name).map(_._1)
+ def is_command_kind(name: String, pred: String => Boolean): Boolean =
+ command_kind(name) match {
+ case Some(kind) => pred(kind)
+ case None => false
+ }
+
/* load commands */
@@ -155,4 +166,3 @@
load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) })
}
}
-
--- a/src/Pure/Isar/outer_syntax.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/Isar/outer_syntax.scala Sun Mar 15 23:46:00 2015 +0100
@@ -123,7 +123,9 @@
}
- /* load commands */
+ /* command categories */
+
+ def is_theory_begin(name: String): Boolean = keywords.is_command_kind(name, Keyword.theory_begin)
def load_command(name: String): Option[List[String]] = keywords.load_command(name)
def load_commands_in(text: String): Boolean = keywords.load_commands_in(text)
@@ -235,7 +237,7 @@
case "subsubsection" => Some(3)
case _ =>
keywords.command_kind(command.name) match {
- case Some(kind) if Keyword.theory(kind) && kind != Keyword.THY_END => Some(4)
+ case Some(kind) if Keyword.theory(kind) && !Keyword.theory_end(kind) => Some(4)
case _ => None
}
}
@@ -284,7 +286,7 @@
/* result structure */
val spans = parse_spans(text)
- spans.foreach(span => add(Command(Document_ID.none, node_name, None, span)))
+ spans.foreach(span => add(Command(Document_ID.none, node_name, Command.no_blobs, span)))
result()
}
}
--- a/src/Pure/Isar/token.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/Isar/token.scala Sun Mar 15 23:46:00 2015 +0100
@@ -157,14 +157,17 @@
object Pos
{
- val none: Pos = new Pos(0, 0, "")
- def file(file: String): Pos = new Pos(0, 0, file)
+ val none: Pos = new Pos(0, 0, "", "")
+ val start: Pos = new Pos(1, 1, "", "")
+ def file(file: String): Pos = new Pos(1, 1, file, "")
+ def id(id: String): Pos = new Pos(0, 1, "", id)
}
final class Pos private[Token](
val line: Int,
val offset: Symbol.Offset,
- val file: String)
+ val file: String,
+ val id: String)
extends scala.util.parsing.input.Position
{
def column = 0
@@ -179,14 +182,15 @@
if (offset1 > 0) offset1 += 1
}
if (line1 == line && offset1 == offset) this
- else new Pos(line1, offset1, file)
+ else new Pos(line1, offset1, file, id)
}
private def position(end_offset: Symbol.Offset): Position.T =
(if (line > 0) Position.Line(line) else Nil) :::
(if (offset > 0) Position.Offset(offset) else Nil) :::
(if (end_offset > 0) Position.End_Offset(end_offset) else Nil) :::
- (if (file != "") Position.File(file) else Nil)
+ (if (file != "") Position.File(file) else Nil) :::
+ (if (id != "") Position.Id_String(id) else Nil)
def position(): Position.T = position(0)
def position(token: Token): Position.T = position(advance(token).offset)
@@ -203,8 +207,8 @@
def atEnd = tokens.isEmpty
}
- def reader(tokens: List[Token], file: String): Reader =
- new Token_Reader(tokens, Pos.file(file))
+ def reader(tokens: List[Token], start: Token.Pos): Reader =
+ new Token_Reader(tokens, start)
}
@@ -212,8 +216,7 @@
{
def is_command: Boolean = kind == Token.Kind.COMMAND
def is_command_kind(keywords: Keyword.Keywords, pred: String => Boolean): Boolean =
- is_command &&
- (keywords.command_kind(source) match { case Some(k) => pred(k) case None => false })
+ is_command && keywords.is_command_kind(source, pred)
def is_keyword: Boolean = kind == Token.Kind.KEYWORD
def is_delimiter: Boolean = is_keyword && !Symbol.is_ascii_identifier(source)
def is_ident: Boolean = kind == Token.Kind.IDENT
--- a/src/Pure/PIDE/command.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/PIDE/command.scala Sun Mar 15 23:46:00 2015 +0100
@@ -10,13 +10,16 @@
import scala.collection.mutable
import scala.collection.immutable.SortedMap
+import scala.util.parsing.input.CharSequenceReader
object Command
{
type Edit = (Option[Command], Option[Command])
+
type Blob = Exn.Result[(Document.Node.Name, Option[(SHA1.Digest, Symbol.Text_Chunk)])]
-
+ type Blobs_Info = (List[Blob], Int)
+ val no_blobs: Blobs_Info = (Nil, -1)
/** accumulated results from prover **/
@@ -253,17 +256,15 @@
def apply(
id: Document_ID.Command,
node_name: Document.Node.Name,
- blobs: Option[(List[Blob], Int)],
+ blobs_info: Blobs_Info,
span: Command_Span.Span): Command =
{
val (source, span1) = span.compact_source
- val (blobs_list, blobs_index) = blobs.getOrElse((Nil, -1))
- new Command(
- id, node_name, blobs_list, blobs_index, span1, source, Results.empty, Markup_Tree.empty)
+ new Command(id, node_name, blobs_info, span1, source, Results.empty, Markup_Tree.empty)
}
val empty: Command =
- Command(Document_ID.none, Document.Node.Name.empty, None, Command_Span.empty)
+ Command(Document_ID.none, Document.Node.Name.empty, no_blobs, Command_Span.empty)
def unparsed(
id: Document_ID.Command,
@@ -272,7 +273,7 @@
markup: Markup_Tree): Command =
{
val (source1, span1) = Command_Span.unparsed(source).compact_source
- new Command(id, Document.Node.Name.empty, Nil, -1, span1, source1, results, markup)
+ new Command(id, Document.Node.Name.empty, no_blobs, span1, source1, results, markup)
}
def unparsed(source: String): Command =
@@ -309,7 +310,7 @@
}
- /* resolve inlined files */
+ /* blobs: inlined errors and auxiliary files */
private def clean_tokens(tokens: List[Token]): List[(Token, Int)] =
{
@@ -348,24 +349,40 @@
case _ => (Nil, -1)
}
- def resolve_files(
- resources: Resources,
- syntax: Prover.Syntax,
- node_name: Document.Node.Name,
- span: Command_Span.Span,
- get_blob: Document.Node.Name => Option[Document.Blob])
- : (List[Command.Blob], Int) =
+ def blobs_info(
+ resources: Resources,
+ syntax: Prover.Syntax,
+ get_blob: Document.Node.Name => Option[Document.Blob],
+ can_import: Document.Node.Name => Boolean,
+ node_name: Document.Node.Name,
+ span: Command_Span.Span): Blobs_Info =
{
- val (files, index) = span_files(syntax, span)
- val blobs =
- files.map(file =>
- (Exn.capture {
- val name =
- Document.Node.Name(resources.append(node_name.master_dir, Path.explode(file)))
- val blob = get_blob(name).map(blob => ((blob.bytes.sha1_digest, blob.chunk)))
- (name, blob)
- }).user_error)
- (blobs, index)
+ span.kind match {
+ // inlined errors
+ case Command_Span.Command_Span(name, _) if syntax.is_theory_begin(name) =>
+ val header =
+ resources.check_thy_reader("", node_name,
+ new CharSequenceReader(span.source), Token.Pos.id(Markup.COMMAND))
+ val import_errors =
+ for ((imp, pos) <- header.imports if !can_import(imp)) yield {
+ val name = imp.node
+ "Bad theory import " + Markup.Path(name).markup(quote(name)) + Position.here(pos)
+ }
+ ((header.errors ::: import_errors).map(msg => Exn.Exn(ERROR(msg)): Command.Blob), -1)
+
+ // auxiliary files
+ case _ =>
+ val (files, index) = span_files(syntax, span)
+ val blobs =
+ files.map(file =>
+ (Exn.capture {
+ val name =
+ Document.Node.Name(resources.append(node_name.master_dir, Path.explode(file)))
+ val blob = get_blob(name).map(blob => ((blob.bytes.sha1_digest, blob.chunk)))
+ (name, blob)
+ }).user_error)
+ (blobs, index)
+ }
}
}
@@ -373,8 +390,7 @@
final class Command private(
val id: Document_ID.Command,
val node_name: Document.Node.Name,
- val blobs: List[Command.Blob],
- val blobs_index: Int,
+ val blobs_info: Command.Blobs_Info,
val span: Command_Span.Span,
val source: String,
val init_results: Command.Results,
@@ -403,6 +419,9 @@
/* blobs */
+ def blobs: List[Command.Blob] = blobs_info._1
+ def blobs_index: Int = blobs_info._2
+
def blobs_names: List[Document.Node.Name] =
for (Exn.Res((name, _)) <- blobs) yield name
--- a/src/Pure/PIDE/command_span.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/PIDE/command_span.scala Sun Mar 15 23:46:00 2015 +0100
@@ -28,23 +28,24 @@
{
def length: Int = (0 /: content)(_ + _.source.length)
+ def source: String =
+ content match {
+ case List(tok) => tok.source
+ case toks => toks.map(_.source).mkString
+ }
+
def compact_source: (String, Span) =
{
- val source: String =
- content match {
- case List(tok) => tok.source
- case toks => toks.map(_.source).mkString
- }
-
+ val src = source
val content1 = new mutable.ListBuffer[Token]
var i = 0
for (Token(kind, s) <- content) {
val n = s.length
- val s1 = source.substring(i, i + n)
+ val s1 = src.substring(i, i + n)
content1 += Token(kind, s1)
i += n
}
- (source, Span(kind, content1.toList))
+ (src, Span(kind, content1.toList))
}
}
--- a/src/Pure/PIDE/document.ML Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/PIDE/document.ML Sun Mar 15 23:46:00 2015 +0100
@@ -365,7 +365,7 @@
(* commands *)
-fun define_command command_id name command_blobs blobs_index toks =
+fun define_command command_id name blobs_digests blobs_index toks =
map_state (fn (versions, blobs, commands, execution) =>
let
val id = Document_ID.print command_id;
@@ -375,17 +375,19 @@
(fn () =>
let
val (tokens, _) = fold_map Token.make toks (Position.id id);
- val pos =
- Token.pos_of (nth tokens blobs_index)
- handle General.Subscript => Position.none;
val _ =
- if Position.is_reported pos then
- Position.reports
- ((pos, Markup.language_path) :: maps (blob_reports pos) command_blobs)
- else ();
+ if blobs_index < 0
+ then (*inlined errors*)
+ map_filter Exn.get_exn blobs_digests
+ |> List.app (Output.error_message o Runtime.exn_message)
+ else (*auxiliary files*)
+ let val pos = Token.pos_of (nth tokens blobs_index) in
+ Position.reports
+ ((pos, Markup.language_path) :: maps (blob_reports pos) blobs_digests)
+ end;
in tokens end) ());
val commands' =
- Inttab.update_new (command_id, (name, command_blobs, blobs_index, span)) commands
+ Inttab.update_new (command_id, (name, blobs_digests, blobs_index, span)) commands
handle Inttab.DUP dup => err_dup "command" dup;
val _ =
Position.setmp_thread_data (Position.id_only id)
--- a/src/Pure/PIDE/document.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/PIDE/document.scala Sun Mar 15 23:46:00 2015 +0100
@@ -256,6 +256,8 @@
Node.is_no_perspective_command(perspective) &&
commands.isEmpty
+ def has_header: Boolean = header != Node.no_header
+
def commands: Linear_Set[Command] = _commands.commands
def load_commands: List[Command] = _commands.load_commands
--- a/src/Pure/PIDE/markup.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/PIDE/markup.scala Sun Mar 15 23:46:00 2015 +0100
@@ -504,4 +504,7 @@
sealed case class Markup(name: String, properties: Properties.T)
-
+{
+ def markup(s: String): String =
+ YXML.string_of_tree(XML.Elem(this, List(XML.Text(s))))
+}
--- a/src/Pure/PIDE/protocol.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/PIDE/protocol.scala Sun Mar 15 23:46:00 2015 +0100
@@ -382,6 +382,29 @@
def define_blob(digest: SHA1.Digest, bytes: Bytes): Unit =
protocol_command_bytes("Document.define_blob", Bytes(digest.toString), bytes)
+ private def resolve_id(id: String, body: XML.Body): XML.Body =
+ {
+ def resolve_property(p: (String, String)): (String, String) =
+ if (p._1 == Markup.ID && p._2 == Markup.COMMAND) (Markup.ID, id) else p
+
+ def resolve_markup(markup: Markup): Markup =
+ Markup(markup.name, markup.properties.map(resolve_property))
+
+ def resolve_tree(t: XML.Tree): XML.Tree =
+ t match {
+ case XML.Wrapped_Elem(markup, ts1, ts2) =>
+ XML.Wrapped_Elem(resolve_markup(markup), ts1.map(resolve_tree _), ts2.map(resolve_tree _))
+ case XML.Elem(markup, ts) =>
+ XML.Elem(resolve_markup(markup), ts.map(resolve_tree _))
+ case text => text
+ }
+ body.map(resolve_tree _)
+ }
+
+ private def resolve_id(id: String, s: String): XML.Body =
+ try { resolve_id(id, YXML.parse_body(s)) }
+ catch { case ERROR(_) => XML.Encode.string(s) }
+
def define_command(command: Command)
{
val blobs_yxml =
@@ -390,7 +413,7 @@
variant(List(
{ case Exn.Res((a, b)) =>
(Nil, pair(string, option(string))((a.node, b.map(p => p._1.toString)))) },
- { case Exn.Exn(e) => (Nil, string(Exn.message(e))) }))
+ { case Exn.Exn(e) => (Nil, resolve_id(command.id.toString, Exn.message(e))) }))
YXML.string_of_body(pair(list(encode_blob), int)(command.blobs, command.blobs_index))
}
--- a/src/Pure/PIDE/prover.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/PIDE/prover.scala Sun Mar 15 23:46:00 2015 +0100
@@ -20,6 +20,7 @@
def ++ (other: Syntax): Syntax
def add_keywords(keywords: Thy_Header.Keywords): Syntax
def parse_spans(input: CharSequence): List[Command_Span.Span]
+ def is_theory_begin(name: String): Boolean
def load_command(name: String): Option[List[String]]
def load_commands_in(text: String): Boolean
}
--- a/src/Pure/PIDE/resources.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/PIDE/resources.scala Sun Mar 15 23:46:00 2015 +0100
@@ -86,12 +86,12 @@
}
}
- def check_thy_reader(qualifier: String, node_name: Document.Node.Name, reader: Reader[Char])
- : Document.Node.Header =
+ def check_thy_reader(qualifier: String, node_name: Document.Node.Name,
+ reader: Reader[Char], start: Token.Pos): Document.Node.Header =
{
if (reader.source.length > 0) {
try {
- val header = Thy_Header.read(reader, node_name.node).decode_symbols
+ val header = Thy_Header.read(reader, start).decode_symbols
val base_name = Long_Name.base_name(node_name.theory)
val (name, pos) = header.name
@@ -108,8 +108,9 @@
else Document.Node.no_header
}
- def check_thy(qualifier: String, name: Document.Node.Name): Document.Node.Header =
- with_thy_reader(name, check_thy_reader(qualifier, name, _))
+ def check_thy(qualifier: String, name: Document.Node.Name, start: Token.Pos)
+ : Document.Node.Header =
+ with_thy_reader(name, check_thy_reader(qualifier, name, _, start))
def check_file(file: String): Boolean =
try {
--- a/src/Pure/System/options.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/System/options.scala Sun Mar 15 23:46:00 2015 +0100
@@ -110,7 +110,7 @@
{
val toks = Token.explode(syntax.keywords, File.read(file))
val ops =
- parse_all(rep(parser), Token.reader(toks, file.implode)) match {
+ parse_all(rep(parser), Token.reader(toks, Token.Pos.file(file.implode))) match {
case Success(result, _) => result
case bad => error(bad.toString)
}
--- a/src/Pure/Thy/thy_header.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/Thy/thy_header.scala Sun Mar 15 23:46:00 2015 +0100
@@ -124,7 +124,7 @@
/* read -- lazy scanning */
- def read(reader: Reader[Char], file: String): Thy_Header =
+ def read(reader: Reader[Char], start: Token.Pos): Thy_Header =
{
val token = Token.Parsers.token(bootstrap_keywords)
val toks = new mutable.ListBuffer[Token]
@@ -140,14 +140,14 @@
}
scan_to_begin(reader)
- parse(commit(header), Token.reader(toks.toList, file)) match {
+ parse(commit(header), Token.reader(toks.toList, start)) match {
case Success(result, _) => result
case bad => error(bad.toString)
}
}
- def read(source: CharSequence, file: String): Thy_Header =
- read(new CharSequenceReader(source), file)
+ def read(source: CharSequence, start: Token.Pos): Thy_Header =
+ read(new CharSequenceReader(source), start)
}
--- a/src/Pure/Thy/thy_info.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/Thy/thy_info.scala Sun Mar 15 23:46:00 2015 +0100
@@ -134,7 +134,7 @@
try {
if (initiators.contains(name)) error(cycle_msg(initiators))
val header =
- try { resources.check_thy(session, name).cat_errors(message) }
+ try { resources.check_thy(session, name, Token.Pos.file(name.node)).cat_errors(message) }
catch { case ERROR(msg) => cat_error(msg, message) }
Dep(name, header) :: require_thys(session, name :: initiators, required1, header.imports)
}
--- a/src/Pure/Thy/thy_syntax.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/Thy/thy_syntax.scala Sun Mar 15 23:46:00 2015 +0100
@@ -63,7 +63,7 @@
- /** header edits: structure and outer syntax **/
+ /** header edits: graph structure and outer syntax **/
private def header_edits(
resources: Resources,
@@ -143,8 +143,8 @@
@tailrec private def chop_common(
cmds: List[Command],
- blobs_spans: List[((List[Command.Blob], Int), Command_Span.Span)])
- : (List[Command], List[((List[Command.Blob], Int), Command_Span.Span)]) =
+ blobs_spans: List[(Command.Blobs_Info, Command_Span.Span)])
+ : (List[Command], List[(Command.Blobs_Info, Command_Span.Span)]) =
{
(cmds, blobs_spans) match {
case (cmd :: cmds, (blobs, span) :: rest) if cmd.blobs == blobs && cmd.span == span =>
@@ -157,14 +157,15 @@
resources: Resources,
syntax: Prover.Syntax,
get_blob: Document.Node.Name => Option[Document.Blob],
- name: Document.Node.Name,
+ can_import: Document.Node.Name => Boolean,
+ node_name: Document.Node.Name,
commands: Linear_Set[Command],
first: Command, last: Command): Linear_Set[Command] =
{
val cmds0 = commands.iterator(first, last).toList
val blobs_spans0 =
- syntax.parse_spans(cmds0.iterator.map(_.source).mkString).
- map(span => (Command.resolve_files(resources, syntax, name, span, get_blob), span))
+ syntax.parse_spans(cmds0.iterator.map(_.source).mkString).map(span =>
+ (Command.blobs_info(resources, syntax, get_blob, can_import, node_name, span), span))
val (cmds1, blobs_spans1) = chop_common(cmds0, blobs_spans0)
@@ -180,75 +181,12 @@
val hook = commands.prev(cmd)
val inserted =
blobs_spans2.map({ case (blobs, span) =>
- Command(Document_ID.make(), name, Some(blobs), span) })
+ Command(Document_ID.make(), node_name, blobs, span) })
(commands /: cmds2)(_ - _).append_after(hook, inserted)
}
}
- /* recover command spans after edits */
-
- // FIXME somewhat slow
- private def recover_spans(
- resources: Resources,
- syntax: Prover.Syntax,
- get_blob: Document.Node.Name => Option[Document.Blob],
- name: Document.Node.Name,
- perspective: Command.Perspective,
- commands: Linear_Set[Command]): Linear_Set[Command] =
- {
- val visible = perspective.commands.toSet
-
- def next_invisible_command(cmds: Linear_Set[Command], from: Command): Command =
- cmds.iterator(from).dropWhile(cmd => !cmd.is_proper || visible(cmd))
- .find(_.is_proper) getOrElse cmds.last
-
- @tailrec def recover(cmds: Linear_Set[Command]): Linear_Set[Command] =
- cmds.find(_.is_unparsed) match {
- case Some(first_unparsed) =>
- val first = next_invisible_command(cmds.reverse, first_unparsed)
- val last = next_invisible_command(cmds, first_unparsed)
- recover(reparse_spans(resources, syntax, get_blob, name, cmds, first, last))
- case None => cmds
- }
- recover(commands)
- }
-
-
- /* consolidate unfinished spans */
-
- private def consolidate_spans(
- resources: Resources,
- syntax: Prover.Syntax,
- get_blob: Document.Node.Name => Option[Document.Blob],
- reparse_limit: Int,
- name: Document.Node.Name,
- perspective: Command.Perspective,
- commands: Linear_Set[Command]): Linear_Set[Command] =
- {
- if (perspective.commands.isEmpty) commands
- else {
- commands.find(_.is_unfinished) match {
- case Some(first_unfinished) =>
- val visible = perspective.commands.toSet
- commands.reverse.find(visible) match {
- case Some(last_visible) =>
- val it = commands.iterator(last_visible)
- var last = last_visible
- var i = 0
- while (i < reparse_limit && it.hasNext) {
- last = it.next
- i += last.length
- }
- reparse_spans(resources, syntax, get_blob, name, commands, first_unfinished, last)
- case None => commands
- }
- case None => commands
- }
- }
- }
-
-
/* main */
def diff_commands(old_cmds: Linear_Set[Command], new_cmds: Linear_Set[Command])
@@ -265,9 +203,35 @@
resources: Resources,
syntax: Prover.Syntax,
get_blob: Document.Node.Name => Option[Document.Blob],
+ can_import: Document.Node.Name => Boolean,
reparse_limit: Int,
node: Document.Node, edit: Document.Edit_Text): Document.Node =
{
+ /* recover command spans after edits */
+ // FIXME somewhat slow
+ def recover_spans(
+ name: Document.Node.Name,
+ perspective: Command.Perspective,
+ commands: Linear_Set[Command]): Linear_Set[Command] =
+ {
+ val is_visible = perspective.commands.toSet
+
+ def next_invisible(cmds: Linear_Set[Command], from: Command): Command =
+ cmds.iterator(from).dropWhile(cmd => !cmd.is_proper || is_visible(cmd))
+ .find(_.is_proper) getOrElse cmds.last
+
+ @tailrec def recover(cmds: Linear_Set[Command]): Linear_Set[Command] =
+ cmds.find(_.is_unparsed) match {
+ case Some(first_unparsed) =>
+ val first = next_invisible(cmds.reverse, first_unparsed)
+ val last = next_invisible(cmds, first_unparsed)
+ recover(
+ reparse_spans(resources, syntax, get_blob, can_import, name, cmds, first, last))
+ case None => cmds
+ }
+ recover(commands)
+ }
+
edit match {
case (_, Document.Node.Clear()) => node.clear
@@ -277,8 +241,7 @@
if (name.is_theory) {
val commands0 = node.commands
val commands1 = edit_text(text_edits, commands0)
- val commands2 =
- recover_spans(resources, syntax, get_blob, name, node.perspective.visible, commands1)
+ val commands2 = recover_spans(name, node.perspective.visible, commands1)
node.update_commands(commands2)
}
else node
@@ -290,11 +253,33 @@
val perspective: Document.Node.Perspective_Command =
Document.Node.Perspective(required, visible_overlay, overlays)
if (node.same_perspective(text_perspective, perspective)) node
- else
- node.update_perspective(text_perspective, perspective).
- update_commands(
- consolidate_spans(resources, syntax, get_blob, reparse_limit,
- name, visible, node.commands))
+ else {
+ /* consolidate unfinished spans */
+ val is_visible = visible.commands.toSet
+ val commands = node.commands
+ val commands1 =
+ if (is_visible.isEmpty) commands
+ else {
+ commands.find(_.is_unfinished) match {
+ case Some(first_unfinished) =>
+ commands.reverse.find(is_visible) match {
+ case Some(last_visible) =>
+ val it = commands.iterator(last_visible)
+ var last = last_visible
+ var i = 0
+ while (i < reparse_limit && it.hasNext) {
+ last = it.next
+ i += last.length
+ }
+ reparse_spans(resources, syntax, get_blob, can_import,
+ name, commands, first_unfinished, last)
+ case None => commands
+ }
+ case None => commands
+ }
+ }
+ node.update_perspective(text_perspective, perspective).update_commands(commands1)
+ }
}
}
@@ -305,10 +290,13 @@
doc_blobs: Document.Blobs,
edits: List[Document.Edit_Text]): Session.Change =
{
+ val (syntax_changed, nodes0, doc_edits0) = header_edits(resources, previous, edits)
+
def get_blob(name: Document.Node.Name) =
doc_blobs.get(name) orElse previous.nodes(name).get_blob
- val (syntax_changed, nodes0, doc_edits0) = header_edits(resources, previous, edits)
+ def can_import(name: Document.Node.Name): Boolean =
+ resources.loaded_theories(name.theory) || nodes0(name).has_header
val (doc_edits, version) =
if (edits.isEmpty) (Nil, Document.Version.make(previous.nodes))
@@ -338,14 +326,15 @@
val node1 =
if (reparse_set(name) && commands.nonEmpty)
node.update_commands(
- reparse_spans(resources, syntax, get_blob,
- name, commands, commands.head, commands.last))
+ reparse_spans(resources, syntax, get_blob, can_import, name,
+ commands, commands.head, commands.last))
else node
val node2 =
- (node1 /: edits)(text_edit(resources, syntax, get_blob, reparse_limit, _, _))
+ (node1 /: edits)(
+ text_edit(resources, syntax, get_blob, can_import, reparse_limit, _, _))
val node3 =
if (reparse_set.contains(name))
- text_edit(resources, syntax, get_blob, reparse_limit,
+ text_edit(resources, syntax, get_blob, can_import, reparse_limit,
node2, (name, node2.edit_perspective))
else node2
--- a/src/Pure/Tools/build.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Pure/Tools/build.scala Sun Mar 15 23:46:00 2015 +0100
@@ -260,8 +260,9 @@
def parse_entries(root: Path): List[(String, Session_Entry)] =
{
val toks = Token.explode(root_syntax.keywords, File.read(root))
+ val start = Token.Pos.file(root.implode)
- parse_all(rep(chapter | session_entry), Token.reader(toks, root.implode)) match {
+ parse_all(rep(chapter | session_entry), Token.reader(toks, start)) match {
case Success(result, _) =>
var chapter = chapter_default
val entries = new mutable.ListBuffer[(String, Session_Entry)]
--- a/src/Tools/jEdit/src/document_model.scala Sun Mar 15 22:00:15 2015 +0100
+++ b/src/Tools/jEdit/src/document_model.scala Sun Mar 15 23:46:00 2015 +0100
@@ -79,7 +79,8 @@
if (is_theory) {
JEdit_Lib.buffer_lock(buffer) {
Exn.capture {
- PIDE.resources.check_thy_reader("", node_name, JEdit_Lib.buffer_reader(buffer))
+ PIDE.resources.check_thy_reader("", node_name,
+ JEdit_Lib.buffer_reader(buffer), Token.Pos.file(node_name.node))
} match {
case Exn.Res(header) => header
case Exn.Exn(exn) => Document.Node.bad_header(Exn.message(exn))