# HG changeset patch # User wenzelm # Date 1606510763 -3600 # Node ID 5f9d661550811c2bba111626685ce6de1809dcd3 # Parent 049a71febf056205ede904871e8d91caa091c2c0 clarified theory keywords: loaded_files are determined statically in Scala, but ML needs to do it semantically; propagate blob src_path from Scala to ML; clarified signature; diff -r 049a71febf05 -r 5f9d66155081 src/HOL/SMT_Examples/boogie.ML --- a/src/HOL/SMT_Examples/boogie.ML Fri Nov 27 19:56:30 2020 +0100 +++ b/src/HOL/SMT_Examples/boogie.ML Fri Nov 27 21:59:23 2020 +0100 @@ -268,10 +268,10 @@ val _ = Outer_Syntax.command \<^command_keyword>\boogie_file\ "prove verification condition from .b2i file" - (Resources.provide_parse_files "boogie_file" >> (fn files => + (Resources.provide_parse_file >> (fn get_file => Toplevel.theory (fn thy => let - val ([{lines, ...}], thy') = files thy; + val ({lines, ...}, thy') = get_file thy; val _ = boogie_prove thy' lines; in thy' end))) diff -r 049a71febf05 -r 5f9d66155081 src/HOL/SPARK/Tools/spark_commands.ML --- a/src/HOL/SPARK/Tools/spark_commands.ML Fri Nov 27 19:56:30 2020 +0100 +++ b/src/HOL/SPARK/Tools/spark_commands.ML Fri Nov 27 21:59:23 2020 +0100 @@ -94,13 +94,13 @@ val _ = Outer_Syntax.command \<^command_keyword>\spark_open_vcg\ "open a new SPARK environment and load a SPARK-generated .vcg file" - (Resources.provide_parse_files "spark_open_vcg" -- Parse.parname + (Resources.provide_parse_files (Resources.extensions ["vcg", "fdl", "rls"]) -- Parse.parname >> (Toplevel.theory o spark_open Fdl_Lexer.vcg_header)); val _ = Outer_Syntax.command \<^command_keyword>\spark_open\ "open a new SPARK environment and load a SPARK-generated .siv file" - (Resources.provide_parse_files "spark_open" -- Parse.parname + (Resources.provide_parse_files (Resources.extensions ["siv", "fdl", "rls"]) -- Parse.parname >> (Toplevel.theory o spark_open Fdl_Lexer.siv_header)); val pfun_type = Scan.option diff -r 049a71febf05 -r 5f9d66155081 src/Pure/Isar/keyword.ML --- a/src/Pure/Isar/keyword.ML Fri Nov 27 19:56:30 2020 +0100 +++ b/src/Pure/Isar/keyword.ML Fri Nov 27 21:59:23 2020 +0100 @@ -38,7 +38,7 @@ val prf_script_asm_goal: string val before_command: string val quasi_command: string - type spec = (string * string list) * string list + type spec = string * string list val no_spec: spec val before_command_spec: spec val quasi_command_spec: spec @@ -57,7 +57,6 @@ val dest_commands: keywords -> string list val command_markup: keywords -> string -> Markup.T option val command_kind: keywords -> string -> string option - val command_files: keywords -> string -> Path.T -> Path.T list val command_tags: keywords -> string -> string list val is_vacuous: keywords -> string -> bool val is_diag: keywords -> string -> bool @@ -132,27 +131,24 @@ (* specifications *) -type spec = (string * string list) * string list; +type spec = string * string list; -val no_spec: spec = (("", []), []); -val before_command_spec: spec = ((before_command, []), []); -val quasi_command_spec: spec = ((quasi_command, []), []); -val document_heading_spec: spec = (("document_heading", []), ["document"]); -val document_body_spec: spec = (("document_body", []), ["document"]); +val no_spec: spec = ("", []); +val before_command_spec: spec = (before_command, []); +val quasi_command_spec: spec = (quasi_command, []); +val document_heading_spec: spec = ("document_heading", ["document"]); +val document_body_spec: spec = ("document_body", ["document"]); type entry = {pos: Position.T, id: serial, kind: string, - files: string list, (*extensions of embedded files*) tags: string list}; -fun check_spec pos ((kind, files), tags) : entry = +fun check_spec pos (kind, tags) : entry = if not (member (op =) command_kinds kind) then error ("Unknown outer syntax keyword kind " ^ quote kind) - else if not (null files) andalso kind <> thy_load then - error ("Illegal specification of files for " ^ quote kind) - else {pos = pos, id = serial (), kind = kind, files = files, tags = tags}; + else {pos = pos, id = serial (), kind = kind, tags = tags}; (** keyword tables **) @@ -191,7 +187,7 @@ Symtab.merge (K true) (commands1, commands2)); val add_keywords = - fold (fn ((name, pos), spec as ((kind, _), _)) => map_keywords (fn (minor, major, commands) => + fold (fn ((name, pos), spec as (kind, _)) => map_keywords (fn (minor, major, commands) => if kind = "" orelse kind = before_command orelse kind = quasi_command then let val minor' = Scan.extend_lexicon (Symbol.explode name) minor; @@ -225,14 +221,6 @@ fun command_kind keywords = Option.map #kind o lookup_command keywords; -fun command_files keywords name path = - (case lookup_command keywords name of - NONE => [] - | SOME {kind, files, ...} => - if kind <> thy_load then [] - else if null files then [path] - else map (fn ext => Path.ext ext path) files); - fun command_tags keywords name = (case lookup_command keywords name of SOME {tags, ...} => tags diff -r 049a71febf05 -r 5f9d66155081 src/Pure/ML/ml_file.ML --- a/src/Pure/ML/ml_file.ML Fri Nov 27 19:56:30 2020 +0100 +++ b/src/Pure/ML/ml_file.ML Fri Nov 27 21:59:23 2020 +0100 @@ -6,18 +6,18 @@ signature ML_FILE = sig - val command: string -> bool option -> (theory -> Token.file list) -> + val command: string -> bool option -> (theory -> Token.file) -> Toplevel.transition -> Toplevel.transition - val ML: bool option -> (theory -> Token.file list) -> Toplevel.transition -> Toplevel.transition - val SML: bool option -> (theory -> Token.file list) -> Toplevel.transition -> Toplevel.transition + val ML: bool option -> (theory -> Token.file) -> Toplevel.transition -> Toplevel.transition + val SML: bool option -> (theory -> Token.file) -> Toplevel.transition -> Toplevel.transition end; structure ML_File: ML_FILE = struct -fun command environment debug files = Toplevel.generic_theory (fn gthy => +fun command environment debug get_file = Toplevel.generic_theory (fn gthy => let - val [file: Token.file] = files (Context.theory_of gthy); + val file = get_file (Context.theory_of gthy); val provide = Resources.provide_file file; val source = Token.file_source file; diff -r 049a71febf05 -r 5f9d66155081 src/Pure/PIDE/command.ML --- a/src/Pure/PIDE/command.ML Fri Nov 27 19:56:30 2020 +0100 +++ b/src/Pure/PIDE/command.ML Fri Nov 27 21:59:23 2020 +0100 @@ -6,11 +6,11 @@ signature COMMAND = sig - type blob = (string * (SHA1.digest * string list) option) Exn.result + type blob = {file_node: string, src_path: Path.T, content: (SHA1.digest * string list) option} val read_file: Path.T -> Position.T -> Path.T -> Token.file val read_thy: Toplevel.state -> theory val read: Keyword.keywords -> theory -> Path.T-> (unit -> theory) -> - blob list * int -> Token.T list -> Toplevel.transition + blob Exn.result list * int -> Token.T list -> Toplevel.transition type eval val eval_command_id: eval -> Document_ID.command val eval_exec_id: eval -> Document_ID.exec @@ -20,7 +20,7 @@ val eval_result_command: eval -> Toplevel.transition val eval_result_state: eval -> Toplevel.state val eval: Keyword.keywords -> Path.T -> (unit -> theory) -> - blob list * int -> Document_ID.command -> Token.T list -> eval -> eval + blob Exn.result list * int -> Document_ID.command -> Token.T list -> eval -> eval type print type print_fn = Toplevel.transition -> Toplevel.state -> unit val print0: {pri: int, print_fn: print_fn} -> eval -> print @@ -53,8 +53,7 @@ (* read *) -type blob = - (string * (SHA1.digest * string list) option) Exn.result; (*file node name, digest, lines*) +type blob = {file_node: string, src_path: Path.T, content: (SHA1.digest * string list) option}; fun read_file_node file_node master_dir pos src_path = let @@ -89,29 +88,21 @@ | SOME exec_id => Position.put_id exec_id); in {src_path = src_path, lines = lines, digest = digest, pos = file_pos} end -fun resolve_files keywords master_dir (blobs, blobs_index) toks = +fun resolve_files master_dir (blobs, blobs_index) toks = (case Outer_Syntax.parse_spans toks of [Command_Span.Span (Command_Span.Command_Span (cmd, _), _)] => (case try (nth toks) blobs_index of SOME tok => let val pos = Token.pos_of tok; - val path = Path.explode (Token.content_of tok) - handle ERROR msg => error (msg ^ Position.here pos); - fun make_file src_path (Exn.Res (file_node, NONE)) = + fun make_file (Exn.Res {file_node, src_path, content = NONE}) = Exn.interruptible_capture (fn () => read_file_node file_node master_dir pos src_path) () - | make_file src_path (Exn.Res (file_node, SOME (digest, lines))) = + | make_file (Exn.Res {file_node, src_path, content = SOME (digest, lines)}) = (Position.report pos Markup.language_path; Exn.Res (blob_file src_path lines digest file_node)) - | make_file _ (Exn.Exn e) = Exn.Exn e; - val src_paths = Keyword.command_files keywords cmd path; - val files = - if null blobs then - map2 make_file src_paths (map (K (Exn.Res ("", NONE))) src_paths) - else if length src_paths = length blobs then - map2 make_file src_paths blobs - else error ("Misalignment of inlined files" ^ Position.here pos); + | make_file (Exn.Exn e) = Exn.Exn e; + val files = map make_file blobs; in toks |> map_index (fn (i, tok) => if i = blobs_index then Token.put_files files tok else tok) @@ -157,7 +148,7 @@ Position.here_list verbatim); in if exists #1 token_reports then Toplevel.malformed pos "Malformed command syntax" - else Outer_Syntax.parse_span thy init (resolve_files keywords master_dir blobs_info span) + else Outer_Syntax.parse_span thy init (resolve_files master_dir blobs_info span) end; end; diff -r 049a71febf05 -r 5f9d66155081 src/Pure/PIDE/command.scala --- a/src/Pure/PIDE/command.scala Fri Nov 27 19:56:30 2020 +0100 +++ b/src/Pure/PIDE/command.scala Fri Nov 27 21:59:23 2020 +0100 @@ -18,6 +18,7 @@ sealed case class Blob( name: Document.Node.Name, + src_path: Path, content: Option[(SHA1.Digest, Symbol.Text_Chunk)]) type Blobs_Info = (List[Exn.Result[Blob]], Int) @@ -441,9 +442,10 @@ val blobs = files.map(file => (Exn.capture { - val name = Document.Node.Name(resources.append(node_name, Path.explode(file))) + val src_path = Path.explode(file) + val name = Document.Node.Name(resources.append(node_name, src_path)) val content = get_blob(name).map(blob => (blob.bytes.sha1_digest, blob.chunk)) - Blob(name, content) + Blob(name, src_path, content) }).user_error) (blobs, index) } diff -r 049a71febf05 -r 5f9d66155081 src/Pure/PIDE/document.ML --- a/src/Pure/PIDE/document.ML Fri Nov 27 19:56:30 2020 +0100 +++ b/src/Pure/PIDE/document.ML Fri Nov 27 21:59:23 2020 +0100 @@ -18,7 +18,7 @@ type state val init_state: state val define_blob: string -> string -> state -> state - type blob_digest = (string * string option) Exn.result + type blob_digest = {file_node: string, src_path: Path.T, digest: string option} Exn.result type command = {command_id: Document_ID.command, name: string, @@ -315,7 +315,7 @@ (** main state -- document structure and execution process **) -type blob_digest = (string * string option) Exn.result; (*file node name, raw digest*) +type blob_digest = {file_node: string, src_path: Path.T, digest: string option} Exn.result; type execution = {version_id: Document_ID.version, (*static version id*) @@ -398,11 +398,11 @@ | SOME content => content); fun resolve_blob state (blob_digest: blob_digest) = - blob_digest |> Exn.map_res (fn (file_node, raw_digest) => - (file_node, Option.map (the_blob state) raw_digest)); + blob_digest |> Exn.map_res (fn {file_node, src_path, digest} => + {file_node = file_node, src_path = src_path, content = Option.map (the_blob state) digest}); fun blob_reports pos (blob_digest: blob_digest) = - (case blob_digest of Exn.Res (file_node, _) => [(pos, Markup.path file_node)] | _ => []); + (case blob_digest of Exn.Res {file_node, ...} => [(pos, Markup.path file_node)] | _ => []); (* commands *) @@ -478,7 +478,7 @@ val blobs' = (commands', Symtab.empty) |-> Inttab.fold (fn (_, (_, blobs, _, _)) => blobs |> - fold (fn Exn.Res (_, SOME b) => Symtab.update (b, the_blob state b) | _ => I)); + fold (fn Exn.Res {digest = SOME b, ...} => Symtab.update (b, the_blob state b) | _ => I)); in (versions', blobs', commands', execution) end); diff -r 049a71febf05 -r 5f9d66155081 src/Pure/PIDE/protocol.ML --- a/src/Pure/PIDE/protocol.ML Fri Nov 27 19:56:30 2020 +0100 +++ b/src/Pure/PIDE/protocol.ML Fri Nov 27 21:59:23 2020 +0100 @@ -38,10 +38,13 @@ blobs_xml |> let val message = YXML.string_of_body o Protocol_Message.command_positions id; + val blob = + triple string string (option string) + #> (fn (a, b, c) => {file_node = a, src_path = Path.explode b, digest = c}); in pair (list (variant - [fn ([], a) => Exn.Res (pair string (option string) a), + [fn ([], a) => Exn.Res (blob a), fn ([], a) => Exn.Exn (ERROR (message a))])) int end; @@ -109,8 +112,7 @@ let val (master, (name, (imports, (keywords, errors)))) = pair string (pair string (pair (list string) - (pair (list (pair string - (pair (pair string (list string)) (list string)))) + (pair (list (pair string (pair string (list string)))) (list YXML.string_of_body)))) a; val imports' = map (rpair Position.none) imports; val keywords' = map (fn (x, y) => ((x, Position.none), y)) keywords; diff -r 049a71febf05 -r 5f9d66155081 src/Pure/PIDE/protocol.scala --- a/src/Pure/PIDE/protocol.scala Fri Nov 27 19:56:30 2020 +0100 +++ b/src/Pure/PIDE/protocol.scala Fri Nov 27 21:59:23 2020 +0100 @@ -317,8 +317,9 @@ { val encode_blob: T[Exn.Result[Command.Blob]] = variant(List( - { case Exn.Res(Command.Blob(a, b)) => - (Nil, pair(string, option(string))((a.node, b.map(p => p._1.toString)))) }, + { case Exn.Res(Command.Blob(a, b, c)) => + (Nil, triple(string, string, option(string))( + (a.node, b.implode, c.map(p => p._1.toString)))) }, { case Exn.Exn(e) => (Nil, string(Exn.message(e))) })) Symbol.encode_yxml(pair(list(encode_blob), int)(command.blobs, command.blobs_index)) @@ -397,11 +398,10 @@ { case Document.Node.Deps(header) => val master_dir = File.standard_url(name.master_dir) val imports = header.imports.map(_.node) - val keywords = - header.keywords.map({ case (a, Keyword.Spec(b, c, d)) => (a, ((b, c), d)) }) + val keywords = header.keywords.map({ case (a, Keyword.Spec(b, _, c)) => (a, (b, c)) }) (Nil, - pair(string, pair(string, pair(list(string), pair(list(pair(string, - pair(pair(string, list(string)), list(string)))), list(string)))))( + pair(string, pair(string, pair(list(string), + pair(list(pair(string, pair(string, list(string)))), list(string)))))( (master_dir, (name.theory, (imports, (keywords, header.errors)))))) }, { case Document.Node.Perspective(a, b, c) => (bool_atom(a) :: b.commands.map(cmd => long_atom(cmd.id)), diff -r 049a71febf05 -r 5f9d66155081 src/Pure/PIDE/resources.ML --- a/src/Pure/PIDE/resources.ML Fri Nov 27 19:56:30 2020 +0100 +++ b/src/Pure/PIDE/resources.ML Fri Nov 27 21:59:23 2020 +0100 @@ -37,10 +37,13 @@ val check_thy: Path.T -> string -> {master: Path.T * SHA1.digest, text: string, theory_pos: Position.T, imports: (string * Position.T) list, keywords: Thy_Header.keywords} - val parse_files: string -> (theory -> Token.file list) parser + val extensions: string list -> Path.T -> Path.T list + val parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list) parser + val parse_file: (theory -> Token.file) parser val provide: Path.T * SHA1.digest -> theory -> theory val provide_file: Token.file -> theory -> theory - val provide_parse_files: string -> (theory -> Token.file list * theory) parser + val provide_parse_files: (Path.T -> Path.T list) -> (theory -> Token.file list * theory) parser + val provide_parse_file: (theory -> Token.file * theory) parser val loaded_files_current: theory -> bool val check_path: Proof.context -> Path.T option -> string * Position.T -> Path.T val check_file: Proof.context -> Path.T option -> string * Position.T -> Path.T @@ -290,18 +293,22 @@ (* load files *) -fun parse_files cmd = +fun extensions exts path = map (fn ext => Path.ext ext path) exts; + +fun parse_files make_paths = Scan.ahead Parse.not_eof -- Parse.path >> (fn (tok, name) => fn thy => (case Token.get_files tok of [] => let - val keywords = Thy_Header.get_keywords thy; val master_dir = master_directory thy; val pos = Token.pos_of tok; - val src_paths = Keyword.command_files keywords cmd (Path.explode name); + val src_paths = make_paths (Path.explode name); in map (Command.read_file master_dir pos) src_paths end | files => map Exn.release files)); +val parse_file = parse_files single >> (fn f => f #> the_single); + + fun provide (src_path, id) = map_files (fn (master_dir, imports, provided) => if AList.defined (op =) provided src_path then @@ -310,13 +317,16 @@ fun provide_file (file: Token.file) = provide (#src_path file, #digest file); -fun provide_parse_files cmd = - parse_files cmd >> (fn files => fn thy => +fun provide_parse_files make_paths = + parse_files make_paths >> (fn files => fn thy => let val fs = files thy; val thy' = fold (fn {src_path, digest, ...} => provide (src_path, digest)) fs thy; in (fs, thy') end); +val provide_parse_file = provide_parse_files single >> (fn f => f #>> the_single); + + fun load_file thy src_path = let val full_path = check_file (master_directory thy) src_path; diff -r 049a71febf05 -r 5f9d66155081 src/Pure/Pure.thy --- a/src/Pure/Pure.thy Fri Nov 27 19:56:30 2020 +0100 +++ b/src/Pure/Pure.thy Fri Nov 27 21:59:23 2020 +0100 @@ -111,14 +111,14 @@ local val _ = Outer_Syntax.command \<^command_keyword>\external_file\ "formal dependency on external file" - (Resources.provide_parse_files "external_file" >> (fn files => Toplevel.theory (#2 o files))); + (Resources.provide_parse_file >> (fn get_file => Toplevel.theory (#2 o get_file))); val _ = Outer_Syntax.command \<^command_keyword>\bibtex_file\ "check bibtex database file in Prover IDE" - (Resources.provide_parse_files "bibtex_file" >> (fn files => + (Resources.provide_parse_file >> (fn get_file => Toplevel.theory (fn thy => let - val ([{lines, pos, ...}], thy') = files thy; + val ({lines, pos, ...}, thy') = get_file thy; val _ = Bibtex.check_database_output pos (cat_lines lines); in thy' end))); @@ -177,31 +177,31 @@ val _ = Outer_Syntax.command \<^command_keyword>\ML_file\ "read and evaluate Isabelle/ML file" - (Resources.parse_files "ML_file" --| semi >> ML_File.ML NONE); + (Resources.parse_file --| semi >> ML_File.ML NONE); val _ = Outer_Syntax.command \<^command_keyword>\ML_file_debug\ "read and evaluate Isabelle/ML file (with debugger information)" - (Resources.parse_files "ML_file_debug" --| semi >> ML_File.ML (SOME true)); + (Resources.parse_file --| semi >> ML_File.ML (SOME true)); val _ = Outer_Syntax.command \<^command_keyword>\ML_file_no_debug\ "read and evaluate Isabelle/ML file (no debugger information)" - (Resources.parse_files "ML_file_no_debug" --| semi >> ML_File.ML (SOME false)); + (Resources.parse_file --| semi >> ML_File.ML (SOME false)); val _ = Outer_Syntax.command \<^command_keyword>\SML_file\ "read and evaluate Standard ML file" - (Resources.parse_files "SML_file" --| semi >> ML_File.SML NONE); + (Resources.parse_file --| semi >> ML_File.SML NONE); val _ = Outer_Syntax.command \<^command_keyword>\SML_file_debug\ "read and evaluate Standard ML file (with debugger information)" - (Resources.parse_files "SML_file_debug" --| semi >> ML_File.SML (SOME true)); + (Resources.parse_file --| semi >> ML_File.SML (SOME true)); val _ = Outer_Syntax.command \<^command_keyword>\SML_file_no_debug\ "read and evaluate Standard ML file (no debugger information)" - (Resources.parse_files "SML_file_no_debug" --| semi >> ML_File.SML (SOME false)); + (Resources.parse_file --| semi >> ML_File.SML (SOME false)); val _ = Outer_Syntax.command \<^command_keyword>\SML_export\ "evaluate SML within Isabelle/ML environment" diff -r 049a71febf05 -r 5f9d66155081 src/Pure/Thy/thy_header.ML --- a/src/Pure/Thy/thy_header.ML Fri Nov 27 19:56:30 2020 +0100 +++ b/src/Pure/Thy/thy_header.ML Fri Nov 27 21:59:23 2020 +0100 @@ -85,9 +85,9 @@ ((subparagraphN, \<^here>), Keyword.document_heading_spec), ((textN, \<^here>), Keyword.document_body_spec), ((txtN, \<^here>), Keyword.document_body_spec), - ((text_rawN, \<^here>), ((Keyword.document_raw, []), ["document"])), - ((theoryN, \<^here>), ((Keyword.thy_begin, []), ["theory"])), - (("ML", \<^here>), ((Keyword.thy_decl, []), ["ML"]))]; + ((text_rawN, \<^here>), (Keyword.document_raw, ["document"])), + ((theoryN, \<^here>), (Keyword.thy_begin, ["theory"])), + (("ML", \<^here>), (Keyword.thy_decl, ["ML"]))]; (* theory data *) @@ -132,12 +132,14 @@ if name = Context.PureN then Scan.succeed [] else Parse.$$$ importsN |-- Parse.!!! (Scan.repeat1 Parse.theory_name); -val opt_files = - Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Parse.list1 Parse.name) --| Parse.$$$ ")") []; +fun loaded_files kind = + if kind = Keyword.thy_load then + Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Parse.list1 Parse.name) --| Parse.$$$ ")") [] + else Scan.succeed []; val keyword_spec = Parse.group (fn () => "outer syntax keyword specification") - (Parse.name -- opt_files -- Document_Source.old_tags); + ((Parse.name :-- loaded_files >> #1) -- Document_Source.old_tags); val keyword_decl = Scan.repeat1 Parse.string_position -- diff -r 049a71febf05 -r 5f9d66155081 src/Pure/Thy/thy_header.scala --- a/src/Pure/Thy/thy_header.scala Fri Nov 27 19:56:30 2020 +0100 +++ b/src/Pure/Thy/thy_header.scala Fri Nov 27 21:59:23 2020 +0100 @@ -131,12 +131,12 @@ { val header: Parser[Thy_Header] = { - val opt_files = + val loaded_files = $$$("(") ~! (rep1sep(name, $$$(",")) <~ $$$(")")) ^^ { case _ ~ x => x } | success(Nil) val keyword_spec = - atom("outer syntax keyword specification", _.is_name) ~ opt_files ~ tags ^^ + atom("outer syntax keyword specification", _.is_name) ~ loaded_files ~ tags ^^ { case x ~ y ~ z => Keyword.Spec(x, y, z) } val keyword_decl =