hybrid use of command blobs: inlined errors and auxiliary files;
static check of theory imports;
--- a/src/Pure/Isar/outer_syntax.scala Sun Mar 15 14:46:01 2015 +0100
+++ b/src/Pure/Isar/outer_syntax.scala Sun Mar 15 19:21:15 2015 +0100
@@ -123,7 +123,9 @@
}
- /* load commands */
+ /* command categories */
+
+ def is_theory_begin(name: String): Boolean = keywords.is_command_kind(name, Keyword.theory_begin)
def load_command(name: String): Option[List[String]] = keywords.load_command(name)
def load_commands_in(text: String): Boolean = keywords.load_commands_in(text)
@@ -284,7 +286,7 @@
/* result structure */
val spans = parse_spans(text)
- spans.foreach(span => add(Command(Document_ID.none, node_name, None, span)))
+ spans.foreach(span => add(Command(Document_ID.none, node_name, Command.no_blobs, span)))
result()
}
}
--- a/src/Pure/PIDE/command.scala Sun Mar 15 14:46:01 2015 +0100
+++ b/src/Pure/PIDE/command.scala Sun Mar 15 19:21:15 2015 +0100
@@ -15,8 +15,10 @@
object Command
{
type Edit = (Option[Command], Option[Command])
+
type Blob = Exn.Result[(Document.Node.Name, Option[(SHA1.Digest, Symbol.Text_Chunk)])]
-
+ type Blobs_Info = (List[Blob], Int)
+ val no_blobs: Blobs_Info = (Nil, -1)
/** accumulated results from prover **/
@@ -253,17 +255,15 @@
def apply(
id: Document_ID.Command,
node_name: Document.Node.Name,
- blobs: Option[(List[Blob], Int)],
+ blobs_info: Blobs_Info,
span: Command_Span.Span): Command =
{
val (source, span1) = span.compact_source
- val (blobs_list, blobs_index) = blobs.getOrElse((Nil, -1))
- new Command(
- id, node_name, blobs_list, blobs_index, span1, source, Results.empty, Markup_Tree.empty)
+ new Command(id, node_name, blobs_info, span1, source, Results.empty, Markup_Tree.empty)
}
val empty: Command =
- Command(Document_ID.none, Document.Node.Name.empty, None, Command_Span.empty)
+ Command(Document_ID.none, Document.Node.Name.empty, no_blobs, Command_Span.empty)
def unparsed(
id: Document_ID.Command,
@@ -272,7 +272,7 @@
markup: Markup_Tree): Command =
{
val (source1, span1) = Command_Span.unparsed(source).compact_source
- new Command(id, Document.Node.Name.empty, Nil, -1, span1, source1, results, markup)
+ new Command(id, Document.Node.Name.empty, no_blobs, span1, source1, results, markup)
}
def unparsed(source: String): Command =
@@ -309,7 +309,7 @@
}
- /* resolve inlined files */
+ /* blobs: inlined errors and auxiliary files */
private def clean_tokens(tokens: List[Token]): List[(Token, Int)] =
{
@@ -348,23 +348,36 @@
case _ => (Nil, -1)
}
- def resolve_files(
+ def blobs_info(
resources: Resources,
syntax: Prover.Syntax,
get_blob: Document.Node.Name => Option[Document.Blob],
+ can_import: Document.Node.Name => Boolean,
node_name: Document.Node.Name,
- span: Command_Span.Span): (List[Command.Blob], Int) =
+ header: Document.Node.Header,
+ span: Command_Span.Span): Blobs_Info =
{
- val (files, index) = span_files(syntax, span)
- val blobs =
- files.map(file =>
- (Exn.capture {
- val name =
- Document.Node.Name(resources.append(node_name.master_dir, Path.explode(file)))
- val blob = get_blob(name).map(blob => ((blob.bytes.sha1_digest, blob.chunk)))
- (name, blob)
- }).user_error)
- (blobs, index)
+ span.kind match {
+ // inlined errors
+ case Command_Span.Command_Span(name, _) if syntax.is_theory_begin(name) =>
+ val import_errors =
+ for ((imp, pos) <- header.imports if !can_import(imp))
+ yield "Bad theory import " + quote(imp.node) + Position.here(pos)
+ ((header.errors ::: import_errors).map(msg => Exn.Exn(ERROR(msg)): Command.Blob), -1)
+
+ // auxiliary files
+ case _ =>
+ val (files, index) = span_files(syntax, span)
+ val blobs =
+ files.map(file =>
+ (Exn.capture {
+ val name =
+ Document.Node.Name(resources.append(node_name.master_dir, Path.explode(file)))
+ val blob = get_blob(name).map(blob => ((blob.bytes.sha1_digest, blob.chunk)))
+ (name, blob)
+ }).user_error)
+ (blobs, index)
+ }
}
}
@@ -372,8 +385,7 @@
final class Command private(
val id: Document_ID.Command,
val node_name: Document.Node.Name,
- val blobs: List[Command.Blob],
- val blobs_index: Int,
+ val blobs_info: Command.Blobs_Info,
val span: Command_Span.Span,
val source: String,
val init_results: Command.Results,
@@ -402,6 +414,9 @@
/* blobs */
+ def blobs: List[Command.Blob] = blobs_info._1
+ def blobs_index: Int = blobs_info._2
+
def blobs_names: List[Document.Node.Name] =
for (Exn.Res((name, _)) <- blobs) yield name
--- a/src/Pure/PIDE/document.ML Sun Mar 15 14:46:01 2015 +0100
+++ b/src/Pure/PIDE/document.ML Sun Mar 15 19:21:15 2015 +0100
@@ -365,7 +365,7 @@
(* commands *)
-fun define_command command_id name command_blobs blobs_index toks =
+fun define_command command_id name blobs_digests blobs_index toks =
map_state (fn (versions, blobs, commands, execution) =>
let
val id = Document_ID.print command_id;
@@ -375,17 +375,19 @@
(fn () =>
let
val (tokens, _) = fold_map Token.make toks (Position.id id);
- val pos =
- Token.pos_of (nth tokens blobs_index)
- handle General.Subscript => Position.none;
val _ =
- if Position.is_reported pos then
- Position.reports
- ((pos, Markup.language_path) :: maps (blob_reports pos) command_blobs)
- else ();
+ if blobs_index < 0
+ then (*inlined errors*)
+ map_filter Exn.get_exn blobs_digests
+ |> List.app (Output.error_message o Runtime.exn_message)
+ else (*auxiliary files*)
+ let val pos = Token.pos_of (nth tokens blobs_index) in
+ Position.reports
+ ((pos, Markup.language_path) :: maps (blob_reports pos) blobs_digests)
+ end;
in tokens end) ());
val commands' =
- Inttab.update_new (command_id, (name, command_blobs, blobs_index, span)) commands
+ Inttab.update_new (command_id, (name, blobs_digests, blobs_index, span)) commands
handle Inttab.DUP dup => err_dup "command" dup;
val _ =
Position.setmp_thread_data (Position.id_only id)
--- a/src/Pure/PIDE/document.scala Sun Mar 15 14:46:01 2015 +0100
+++ b/src/Pure/PIDE/document.scala Sun Mar 15 19:21:15 2015 +0100
@@ -256,6 +256,8 @@
Node.is_no_perspective_command(perspective) &&
commands.isEmpty
+ def has_header: Boolean = header != Node.no_header
+
def commands: Linear_Set[Command] = _commands.commands
def load_commands: List[Command] = _commands.load_commands
--- a/src/Pure/PIDE/prover.scala Sun Mar 15 14:46:01 2015 +0100
+++ b/src/Pure/PIDE/prover.scala Sun Mar 15 19:21:15 2015 +0100
@@ -20,6 +20,7 @@
def ++ (other: Syntax): Syntax
def add_keywords(keywords: Thy_Header.Keywords): Syntax
def parse_spans(input: CharSequence): List[Command_Span.Span]
+ def is_theory_begin(name: String): Boolean
def load_command(name: String): Option[List[String]]
def load_commands_in(text: String): Boolean
}
--- a/src/Pure/Thy/thy_syntax.scala Sun Mar 15 14:46:01 2015 +0100
+++ b/src/Pure/Thy/thy_syntax.scala Sun Mar 15 19:21:15 2015 +0100
@@ -143,8 +143,8 @@
@tailrec private def chop_common(
cmds: List[Command],
- blobs_spans: List[((List[Command.Blob], Int), Command_Span.Span)])
- : (List[Command], List[((List[Command.Blob], Int), Command_Span.Span)]) =
+ blobs_spans: List[(Command.Blobs_Info, Command_Span.Span)])
+ : (List[Command], List[(Command.Blobs_Info, Command_Span.Span)]) =
{
(cmds, blobs_spans) match {
case (cmd :: cmds, (blobs, span) :: rest) if cmd.blobs == blobs && cmd.span == span =>
@@ -157,14 +157,17 @@
resources: Resources,
syntax: Prover.Syntax,
get_blob: Document.Node.Name => Option[Document.Blob],
+ can_import: Document.Node.Name => Boolean,
node_name: Document.Node.Name,
+ header: Document.Node.Header,
commands: Linear_Set[Command],
first: Command, last: Command): Linear_Set[Command] =
{
val cmds0 = commands.iterator(first, last).toList
val blobs_spans0 =
- syntax.parse_spans(cmds0.iterator.map(_.source).mkString).
- map(span => (Command.resolve_files(resources, syntax, get_blob, node_name, span), span))
+ syntax.parse_spans(cmds0.iterator.map(_.source).mkString).map(span =>
+ (Command.blobs_info(resources, syntax, get_blob, can_import, node_name, header, span),
+ span))
val (cmds1, blobs_spans1) = chop_common(cmds0, blobs_spans0)
@@ -180,7 +183,7 @@
val hook = commands.prev(cmd)
val inserted =
blobs_spans2.map({ case (blobs, span) =>
- Command(Document_ID.make(), node_name, Some(blobs), span) })
+ Command(Document_ID.make(), node_name, blobs, span) })
(commands /: cmds2)(_ - _).append_after(hook, inserted)
}
}
@@ -193,7 +196,9 @@
resources: Resources,
syntax: Prover.Syntax,
get_blob: Document.Node.Name => Option[Document.Blob],
- name: Document.Node.Name,
+ can_import: Document.Node.Name => Boolean,
+ node_name: Document.Node.Name,
+ header: Document.Node.Header,
perspective: Command.Perspective,
commands: Linear_Set[Command]): Linear_Set[Command] =
{
@@ -208,7 +213,8 @@
case Some(first_unparsed) =>
val first = next_invisible_command(cmds.reverse, first_unparsed)
val last = next_invisible_command(cmds, first_unparsed)
- recover(reparse_spans(resources, syntax, get_blob, name, cmds, first, last))
+ recover(reparse_spans(
+ resources, syntax, get_blob, can_import, node_name, header, cmds, first, last))
case None => cmds
}
recover(commands)
@@ -221,8 +227,10 @@
resources: Resources,
syntax: Prover.Syntax,
get_blob: Document.Node.Name => Option[Document.Blob],
+ can_import: Document.Node.Name => Boolean,
reparse_limit: Int,
- name: Document.Node.Name,
+ node_name: Document.Node.Name,
+ header: Document.Node.Header,
perspective: Command.Perspective,
commands: Linear_Set[Command]): Linear_Set[Command] =
{
@@ -240,7 +248,8 @@
last = it.next
i += last.length
}
- reparse_spans(resources, syntax, get_blob, name, commands, first_unfinished, last)
+ reparse_spans(resources, syntax, get_blob, can_import, node_name,
+ header, commands, first_unfinished, last)
case None => commands
}
case None => commands
@@ -265,6 +274,7 @@
resources: Resources,
syntax: Prover.Syntax,
get_blob: Document.Node.Name => Option[Document.Blob],
+ can_import: Document.Node.Name => Boolean,
reparse_limit: Int,
node: Document.Node, edit: Document.Edit_Text): Document.Node =
{
@@ -278,7 +288,8 @@
val commands0 = node.commands
val commands1 = edit_text(text_edits, commands0)
val commands2 =
- recover_spans(resources, syntax, get_blob, name, node.perspective.visible, commands1)
+ recover_spans(resources, syntax, get_blob, can_import, name,
+ node.header, node.perspective.visible, commands1)
node.update_commands(commands2)
}
else node
@@ -293,8 +304,8 @@
else
node.update_perspective(text_perspective, perspective).
update_commands(
- consolidate_spans(resources, syntax, get_blob, reparse_limit,
- name, visible, node.commands))
+ consolidate_spans(resources, syntax, get_blob, can_import, reparse_limit,
+ name, node.header, visible, node.commands))
}
}
@@ -305,10 +316,13 @@
doc_blobs: Document.Blobs,
edits: List[Document.Edit_Text]): Session.Change =
{
+ val (syntax_changed, nodes0, doc_edits0) = header_edits(resources, previous, edits)
+
def get_blob(name: Document.Node.Name) =
doc_blobs.get(name) orElse previous.nodes(name).get_blob
- val (syntax_changed, nodes0, doc_edits0) = header_edits(resources, previous, edits)
+ def can_import(name: Document.Node.Name): Boolean =
+ resources.loaded_theories(name.theory) || nodes0(name).has_header
val (doc_edits, version) =
if (edits.isEmpty) (Nil, Document.Version.make(previous.nodes))
@@ -338,14 +352,15 @@
val node1 =
if (reparse_set(name) && commands.nonEmpty)
node.update_commands(
- reparse_spans(resources, syntax, get_blob,
- name, commands, commands.head, commands.last))
+ reparse_spans(resources, syntax, get_blob, can_import, name,
+ node.header, commands, commands.head, commands.last))
else node
val node2 =
- (node1 /: edits)(text_edit(resources, syntax, get_blob, reparse_limit, _, _))
+ (node1 /: edits)(
+ text_edit(resources, syntax, get_blob, can_import, reparse_limit, _, _))
val node3 =
if (reparse_set.contains(name))
- text_edit(resources, syntax, get_blob, reparse_limit,
+ text_edit(resources, syntax, get_blob, can_import, reparse_limit,
node2, (name, node2.edit_perspective))
else node2