--- a/Admin/components/components.sha1 Sun Mar 10 00:22:38 2019 +0000
+++ b/Admin/components/components.sha1 Sun Mar 10 21:33:05 2019 +0100
@@ -78,6 +78,7 @@
bee32019e5d7cf096ef2ea1d836c732e9a7628cc isabelle_fonts-20181124.tar.gz
f249bc2c85bd2af9eee509de17187a766b74ab86 isabelle_fonts-20181129.tar.gz
928b5320073d04d93bcc5bc4347b6d01632b9d45 isabelle_fonts-20190210.tar.gz
+dfcdf9a757b9dc36cee87f82533b43c58ba84abe isabelle_fonts-20190309.tar.gz
0b2206f914336dec4923dd0479d8cee4b904f544 jdk-11+28.tar.gz
3e05213cad47dbef52804fe329395db9b4e57f39 jdk-11.0.2+9.tar.gz
71d19df63816e9be1c4c5eb44aea7a44cfadb319 jdk-11.tar.gz
--- a/Admin/components/main Sun Mar 10 00:22:38 2019 +0000
+++ b/Admin/components/main Sun Mar 10 21:33:05 2019 +0100
@@ -4,7 +4,7 @@
csdp-6.x
cvc4-1.5-4
e-2.0-2
-isabelle_fonts-20190210
+isabelle_fonts-20190309
jdk-11.0.2+9
jedit_build-20190224
jfreechart-1.5.0
--- a/etc/symbols Sun Mar 10 00:22:38 2019 +0000
+++ b/etc/symbols Sun Mar 10 21:33:05 2019 +0100
@@ -361,6 +361,7 @@
\<comment> code: 0x002015 group: document argument: space_cartouche font: Isabelle␣DejaVu␣Sans␣Mono
\<^cancel> code: 0x002326 group: document argument: cartouche font: Isabelle␣DejaVu␣Sans␣Mono
\<^latex> group: document argument: cartouche
+\<^marker> code: 0x002710 group: document argument: cartouche font: Isabelle␣DejaVu␣Sans␣Mono
\<open> code: 0x002039 group: punctuation font: Isabelle␣DejaVu␣Sans␣Mono abbrev: <<
\<close> code: 0x00203a group: punctuation font: Isabelle␣DejaVu␣Sans␣Mono abbrev: >>
\<^here> code: 0x002302 font: Isabelle␣DejaVu␣Sans␣Mono
--- a/src/HOL/Computational_Algebra/Euclidean_Algorithm.thy Sun Mar 10 00:22:38 2019 +0000
+++ b/src/HOL/Computational_Algebra/Euclidean_Algorithm.thy Sun Mar 10 21:33:05 2019 +0100
@@ -235,10 +235,12 @@
with less.prems have "y \<noteq> 0" "z \<noteq> 0" by auto
have normalized_factors_product:
"{p. p dvd a * b \<and> normalize p = p} =
- (\<lambda>(x,y). x * y) ` ({p. p dvd a \<and> normalize p = p} \<times> {p. p dvd b \<and> normalize p = p})" for a b
+ (\<lambda>(x,y). x * y) ` ({p. p dvd a \<and> normalize p = p} \<times> {p. p dvd b \<and> normalize p = p})"
+ for a b
proof safe
fix p assume p: "p dvd a * b" "normalize p = p"
- from dvd_productE[OF p(1)] guess x y . note xy = this
+ from p(1) obtain x y where xy: "p = x * y" "x dvd a" "y dvd b"
+ by (rule dvd_productE)
define x' y' where "x' = normalize x" and "y' = normalize y"
have "p = x' * y'"
by (subst p(2) [symmetric]) (simp add: xy x'_def y'_def normalize_mult)
--- a/src/Pure/Admin/build_fonts.scala Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Admin/build_fonts.scala Sun Mar 10 21:33:05 2019 +0100
@@ -30,6 +30,7 @@
0x203a, // single guillemet
0x204b, // FOOTNOTE
0x20ac, // Euro
+ 0x2710, // pencil
0xfb01, // ligature fi
0xfb02, // ligature fl
0xfffd, // replacement character
--- a/src/Pure/General/antiquote.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/General/antiquote.ML Sun Mar 10 21:33:05 2019 +0100
@@ -122,12 +122,12 @@
scan_nl || Scan.repeats1 scan_plain_txt @@@ scan_nl_opt;
val scan_text_comments =
- scan_nl || Scan.repeats1 (Comment.scan >> #2 || scan_plain_txt) @@@ scan_nl_opt;
+ scan_nl || Scan.repeats1 (Comment.scan_inner >> #2 || scan_plain_txt) @@@ scan_nl_opt;
val scan_antiq_body =
Scan.trace (Symbol_Pos.scan_string_qq err_prefix || Symbol_Pos.scan_string_bq err_prefix) >> #2 ||
Symbol_Pos.scan_cartouche err_prefix ||
- Comment.scan --
+ Comment.scan_inner --
Symbol_Pos.!!! (fn () => err_prefix ^ "bad formal comment in antiquote body") Scan.fail
>> K [] ||
Scan.one (fn (s, _) => s <> "}" andalso Symbol.not_eof s) >> single;
--- a/src/Pure/General/comment.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/General/comment.ML Sun Mar 10 21:33:05 2019 +0100
@@ -6,13 +6,15 @@
signature COMMENT =
sig
- datatype kind = Comment | Cancel | Latex
+ datatype kind = Comment | Cancel | Latex | Marker
val markups: kind -> Markup.T list
val is_symbol: Symbol.symbol -> bool
val scan_comment: (kind * Symbol_Pos.T list) scanner
val scan_cancel: (kind * Symbol_Pos.T list) scanner
val scan_latex: (kind * Symbol_Pos.T list) scanner
- val scan: (kind * Symbol_Pos.T list) scanner
+ val scan_marker: (kind * Symbol_Pos.T list) scanner
+ val scan_inner: (kind * Symbol_Pos.T list) scanner
+ val scan_outer: (kind * Symbol_Pos.T list) scanner
val read_body: Symbol_Pos.T list -> (kind option * Symbol_Pos.T list) list
end;
@@ -21,7 +23,7 @@
(* kinds *)
-datatype kind = Comment | Cancel | Latex;
+datatype kind = Comment | Cancel | Latex | Marker;
val kinds =
[(Comment,
@@ -32,7 +34,10 @@
markups = [Markup.language_text true]}),
(Latex,
{symbol = Symbol.latex, blanks = false,
- markups = [Markup.language_latex true, Markup.no_words]})];
+ markups = [Markup.language_latex true, Markup.no_words]}),
+ (Marker,
+ {symbol = Symbol.marker, blanks = false,
+ markups = [Markup.language_document_marker]})];
val get_kind = the o AList.lookup (op =) kinds;
val print_kind = quote o #symbol o get_kind;
@@ -69,7 +74,10 @@
val scan_comment = scan_strict Comment;
val scan_cancel = scan_strict Cancel;
val scan_latex = scan_strict Latex;
-val scan = scan_comment || scan_cancel || scan_latex;
+val scan_marker = scan_strict Marker;
+
+val scan_inner = scan_comment || scan_cancel || scan_latex;
+val scan_outer = scan_inner || scan_marker;
val scan_body =
Scan.many1 (fn (s, _) => not (is_symbol s) andalso Symbol.not_eof s) >> pair NONE ||
--- a/src/Pure/General/comment.scala Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/General/comment.scala Sun Mar 10 21:33:05 2019 +0100
@@ -14,12 +14,14 @@
val COMMENT = Value("formal comment")
val CANCEL = Value("canceled text")
val LATEX = Value("embedded LaTeX")
+ val MARKER = Value("document marker")
}
lazy val symbols: Set[Symbol.Symbol] =
Set(Symbol.comment, Symbol.comment_decoded,
Symbol.cancel, Symbol.cancel_decoded,
- Symbol.latex, Symbol.latex_decoded)
+ Symbol.latex, Symbol.latex_decoded,
+ Symbol.marker, Symbol.marker_decoded)
lazy val symbols_blanks: Set[Symbol.Symbol] =
Set(Symbol.comment, Symbol.comment_decoded)
--- a/src/Pure/General/symbol.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/General/symbol.ML Sun Mar 10 21:33:05 2019 +0100
@@ -18,6 +18,7 @@
val comment: symbol
val cancel: symbol
val latex: symbol
+ val marker: symbol
val is_char: symbol -> bool
val is_utf8: symbol -> bool
val is_symbolic: symbol -> bool
@@ -120,6 +121,7 @@
val comment = "\<comment>";
val cancel = "\<^cancel>";
val latex = "\<^latex>";
+val marker = "\<^marker>";
fun is_char s = size s = 1;
--- a/src/Pure/General/symbol.scala Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/General/symbol.scala Sun Mar 10 21:33:05 2019 +0100
@@ -496,6 +496,7 @@
val comment_decoded = decode(comment)
val cancel_decoded = decode(cancel)
val latex_decoded = decode(latex)
+ val marker_decoded = decode(marker)
val open_decoded = decode(open)
val close_decoded = decode(close)
@@ -583,10 +584,12 @@
val comment: Symbol = "\\<comment>"
val cancel: Symbol = "\\<^cancel>"
val latex: Symbol = "\\<^latex>"
+ val marker: Symbol = "\\<^marker>"
def comment_decoded: Symbol = symbols.comment_decoded
def cancel_decoded: Symbol = symbols.cancel_decoded
def latex_decoded: Symbol = symbols.latex_decoded
+ def marker_decoded: Symbol = symbols.marker_decoded
/* cartouches */
--- a/src/Pure/Isar/isar_cmd.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Isar/isar_cmd.ML Sun Mar 10 21:33:05 2019 +0100
@@ -180,19 +180,23 @@
structure Diag_State = Proof_Data
(
- type T = Toplevel.state;
- fun init _ = Toplevel.toplevel;
+ type T = Toplevel.state option;
+ fun init _ = NONE;
);
fun ml_diag verbose source = Toplevel.keep (fn state =>
let
val opt_ctxt =
try Toplevel.generic_theory_of state
- |> Option.map (Context.proof_of #> Diag_State.put state);
+ |> Option.map (Context.proof_of #> Diag_State.put (SOME state));
val flags = ML_Compiler.verbose verbose ML_Compiler.flags;
in ML_Context.eval_source_in opt_ctxt flags source end);
-val diag_state = Diag_State.get;
+fun diag_state ctxt =
+ (case Diag_State.get ctxt of
+ SOME st => st
+ | NONE => Toplevel.init_toplevel ());
+
val diag_goal = Proof.goal o Toplevel.proof_of o diag_state;
val _ = Theory.setup
--- a/src/Pure/Isar/outer_syntax.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Isar/outer_syntax.ML Sun Mar 10 21:33:05 2019 +0100
@@ -22,10 +22,10 @@
val local_theory_to_proof: command_keyword -> string ->
(local_theory -> Proof.state) parser -> unit
val bootstrap: bool Config.T
- val parse_tokens: theory -> Token.T list -> Toplevel.transition list
- val parse: theory -> Position.T -> string -> Toplevel.transition list
val parse_spans: Token.T list -> Command_Span.span list
val make_span: Token.T list -> Command_Span.span
+ val parse_span: theory -> (unit -> theory) -> Token.T list -> Toplevel.transition
+ val parse_text: theory -> (unit -> theory) -> Position.T -> string -> Toplevel.transition list
val command_reports: theory -> Token.T -> Position.report_text list
val check_command: Proof.context -> command_keyword -> string
end;
@@ -172,59 +172,6 @@
(** toplevel parsing **)
-(* parse commands *)
-
-val bootstrap = Config.declare_bool ("Outer_Syntax.bootstrap", \<^here>) (K true);
-
-local
-
-val before_command =
- Scan.option (Parse.position (Parse.private >> K true || Parse.qualified >> K false));
-
-fun parse_command thy =
- Scan.ahead (before_command |-- Parse.position Parse.command) :|-- (fn (name, pos) =>
- let
- val keywords = Thy_Header.get_keywords thy;
- val command_tags = Parse.command -- Document_Source.tags;
- val tr0 =
- Toplevel.empty
- |> Toplevel.name name
- |> Toplevel.position pos
- |> Keyword.is_proof_open keywords name ? Toplevel.skip_proof_open
- |> Keyword.is_proof_close keywords name ? Toplevel.skip_proof_close;
- in
- (case lookup_commands thy name of
- SOME (Command {command_parser = Parser parse, ...}) =>
- Parse.!!! (command_tags |-- parse) >> (fn f => f tr0)
- | SOME (Command {command_parser = Restricted_Parser parse, ...}) =>
- before_command :|-- (fn restricted =>
- Parse.!!! (command_tags |-- parse restricted)) >> (fn f => f tr0)
- | NONE =>
- Scan.fail_with (fn _ => fn _ =>
- let
- val msg =
- if Config.get_global thy bootstrap
- then "missing theory context for command "
- else "undefined command ";
- in msg ^ quote (Markup.markup Markup.keyword1 name) end))
- end);
-
-in
-
-fun parse_tokens thy =
- filter Token.is_proper
- #> Source.of_list
- #> Source.source Token.stopper (Scan.bulk (fn xs => Parse.!!! (parse_command thy) xs))
- #> Source.exhaust;
-
-fun parse thy pos text =
- Symbol_Pos.explode (text, pos)
- |> Token.tokenize (Thy_Header.get_keywords thy) {strict = false}
- |> parse_tokens thy;
-
-end;
-
-
(* parse spans *)
local
@@ -266,6 +213,75 @@
| _ => Command_Span.Span (Command_Span.Malformed_Span, toks));
+(* parse commands *)
+
+val bootstrap = Config.declare_bool ("Outer_Syntax.bootstrap", \<^here>) (K true);
+
+local
+
+val before_command =
+ Scan.option (Parse.position (Parse.private >> K true || Parse.qualified >> K false));
+
+fun parse_command thy markers =
+ Scan.ahead (before_command |-- Parse.position Parse.command) :|-- (fn (name, pos) =>
+ let
+ val keywords = Thy_Header.get_keywords thy;
+ val tr0 =
+ Toplevel.empty
+ |> Toplevel.name name
+ |> Toplevel.position pos
+ |> Keyword.is_proof_open keywords name ? Toplevel.skip_proof_open
+ |> Keyword.is_proof_close keywords name ? Toplevel.skip_proof_close;
+ val command_markers =
+ Parse.command |-- Document_Source.tags >>
+ (fn tags => Toplevel.markers (map Document_Marker.legacy_tag tags @ markers) tr0);
+ in
+ (case lookup_commands thy name of
+ SOME (Command {command_parser = Parser parse, ...}) =>
+ Parse.!!! (command_markers -- parse) >> (op |>)
+ | SOME (Command {command_parser = Restricted_Parser parse, ...}) =>
+ before_command :|-- (fn restricted =>
+ Parse.!!! (command_markers -- parse restricted)) >> (op |>)
+ | NONE =>
+ Scan.fail_with (fn _ => fn _ =>
+ let
+ val msg =
+ if Config.get_global thy bootstrap
+ then "missing theory context for command "
+ else "undefined command ";
+ in msg ^ quote (Markup.markup Markup.keyword1 name) end))
+ end);
+
+in
+
+fun parse_span thy init span =
+ let
+ val range = Token.range_of span;
+ val core_range = Token.core_range_of span;
+
+ val markers = map Token.input_of (filter Token.is_document_marker span);
+ fun parse () =
+ filter Token.is_proper span
+ |> Source.of_list
+ |> Source.source Token.stopper (Scan.bulk (fn xs => Parse.!!! (parse_command thy markers) xs))
+ |> Source.exhaust;
+ in
+ (case parse () of
+ [tr] => Toplevel.modify_init init tr
+ | [] => Toplevel.ignored (#1 range)
+ | _ => Toplevel.malformed (#1 core_range) "Exactly one command expected")
+ handle ERROR msg => Toplevel.malformed (#1 core_range) msg
+ end;
+
+fun parse_text thy init pos text =
+ Symbol_Pos.explode (text, pos)
+ |> Token.tokenize (Thy_Header.get_keywords thy) {strict = false}
+ |> parse_spans
+ |> map (Command_Span.content #> parse_span thy init);
+
+end;
+
+
(* check commands *)
fun command_reports thy tok =
--- a/src/Pure/Isar/parse.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Isar/parse.ML Sun Mar 10 21:33:05 2019 +0100
@@ -96,6 +96,7 @@
val for_fixes: (binding * string option * mixfix) list parser
val ML_source: Input.source parser
val document_source: Input.source parser
+ val document_marker: Input.source parser
val const: string parser
val term: string parser
val prop: string parser
@@ -390,6 +391,10 @@
val ML_source = input (group (fn () => "ML source") text);
val document_source = input (group (fn () => "document source") text);
+val document_marker =
+ group (fn () => "document marker")
+ (RESET_VALUE (Scan.one Token.is_document_marker >> Token.input_of));
+
(* terms *)
--- a/src/Pure/Isar/parse.scala Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Isar/parse.scala Sun Mar 10 21:33:05 2019 +0100
@@ -80,7 +80,12 @@
tok.kind == Token.Kind.IDENT ||
tok.kind == Token.Kind.STRING)
- def tags: Parser[List[String]] = rep($$$("%") ~> tag_name)
+ def tag: Parser[String] = $$$("%") ~> tag_name
+ def tags: Parser[List[String]] = rep(tag)
+
+ def marker: Parser[String] = token("marker", _.is_marker) ^^ (_.content)
+
+ def annotation: Parser[Unit] = rep(tag | marker) ^^ { case _ => () }
/* wrappers */
--- a/src/Pure/Isar/token.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Isar/token.ML Sun Mar 10 21:33:05 2019 +0100
@@ -30,7 +30,6 @@
Declaration of declaration |
Files of file Exn.result list
val pos_of: T -> Position.T
- val range_of: T list -> Position.range
val adjust_offsets: (int -> int option) -> T -> T
val eof: T
val is_eof: T -> bool
@@ -46,6 +45,7 @@
val is_comment: T -> bool
val is_informal_comment: T -> bool
val is_formal_comment: T -> bool
+ val is_document_marker: T -> bool
val is_ignored: T -> bool
val is_begin_ignore: T -> bool
val is_end_ignore: T -> bool
@@ -53,6 +53,8 @@
val is_space: T -> bool
val is_blank: T -> bool
val is_newline: T -> bool
+ val range_of: T list -> Position.range
+ val core_range_of: T list -> Position.range
val content_of: T -> string
val source_of: T -> string
val input_of: T -> Input.source
@@ -190,11 +192,6 @@
fun pos_of (Token ((_, (pos, _)), _, _)) = pos;
fun end_pos_of (Token ((_, (_, pos)), _, _)) = pos;
-fun range_of (toks as tok :: _) =
- let val pos' = end_pos_of (List.last toks)
- in Position.range (pos_of tok, pos') end
- | range_of [] = Position.no_range;
-
fun adjust_offsets adjust (Token ((x, range), y, z)) =
Token ((x, apply2 (Position.adjust_offsets adjust) range), y, z);
@@ -245,6 +242,9 @@
fun is_formal_comment (Token (_, (Comment (SOME _), _), _)) = true
| is_formal_comment _ = false;
+fun is_document_marker (Token (_, (Comment (SOME Comment.Marker), _), _)) = true
+ | is_document_marker _ = false;
+
fun is_begin_ignore (Token (_, (Comment NONE, "<"), _)) = true
| is_begin_ignore _ = false;
@@ -267,6 +267,17 @@
| is_newline _ = false;
+(* range of tokens *)
+
+fun range_of (toks as tok :: _) =
+ let val pos' = end_pos_of (List.last toks)
+ in Position.range (pos_of tok, pos') end
+ | range_of [] = Position.no_range;
+
+val core_range_of =
+ drop_prefix is_ignored #> drop_suffix is_ignored #> range_of;
+
+
(* token content *)
fun content_of (Token (_, (_, x), _)) = x;
@@ -636,7 +647,7 @@
scan_verbatim >> token_range Verbatim ||
scan_cartouche >> token_range Cartouche ||
scan_comment >> token_range (Comment NONE) ||
- Comment.scan >> (fn (k, ss) => token (Comment (SOME k)) ss) ||
+ Comment.scan_outer >> (fn (k, ss) => token (Comment (SOME k)) ss) ||
scan_space >> token Space ||
(Scan.max token_leq
(Scan.max token_leq
--- a/src/Pure/Isar/token.scala Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Isar/token.scala Sun Mar 10 21:33:05 2019 +0100
@@ -304,6 +304,9 @@
def is_space: Boolean = kind == Token.Kind.SPACE
def is_informal_comment: Boolean = kind == Token.Kind.INFORMAL_COMMENT
def is_formal_comment: Boolean = kind == Token.Kind.FORMAL_COMMENT
+ def is_marker: Boolean =
+ kind == Token.Kind.FORMAL_COMMENT &&
+ (source.startsWith(Symbol.marker) || source.startsWith(Symbol.marker_decoded))
def is_comment: Boolean = is_informal_comment || is_formal_comment
def is_ignored: Boolean = is_space || is_informal_comment
def is_proper: Boolean = !is_space && !is_comment
--- a/src/Pure/Isar/toplevel.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Isar/toplevel.ML Sun Mar 10 21:33:05 2019 +0100
@@ -8,8 +8,8 @@
sig
exception UNDEF
type state
+ val init_toplevel: unit -> state
val theory_toplevel: theory -> state
- val toplevel: state
val is_toplevel: state -> bool
val is_theory: state -> bool
val is_proof: state -> bool
@@ -37,6 +37,7 @@
val type_error: transition -> string
val name: string -> transition -> transition
val position: Position.T -> transition -> transition
+ val markers: Input.source list -> transition -> transition
val timing: Time.time -> transition -> transition
val init_theory: (unit -> theory) -> transition -> transition
val is_init: transition -> bool
@@ -85,6 +86,7 @@
val reset_theory: state -> state option
val reset_proof: state -> state option
val reset_notepad: state -> state option
+ val fork_presentation: transition -> transition * transition
type result
val join_results: result -> (transition * state) list
val element_result: Keyword.keywords -> transition Thy_Element.element -> state -> result * state
@@ -101,6 +103,8 @@
(* datatype node *)
datatype node =
+ Toplevel
+ (*toplevel outside of theory body*) |
Theory of generic_theory
(*global or local theory*) |
Proof of Proof_Node.T * ((Proof.context -> generic_theory) * generic_theory)
@@ -112,68 +116,87 @@
val proof_node = fn Proof (prf, _) => SOME prf | _ => NONE;
val skipped_proof_node = fn Skipped_Proof _ => true | _ => false;
-fun cases_node f _ (Theory gthy) = f gthy
- | cases_node _ g (Proof (prf, _)) = g (Proof_Node.current prf)
- | cases_node f _ (Skipped_Proof (_, (gthy, _))) = f gthy;
+fun cases_node f _ _ Toplevel = f ()
+ | cases_node _ g _ (Theory gthy) = g gthy
+ | cases_node _ _ h (Proof (prf, _)) = h (Proof_Node.current prf)
+ | cases_node _ g _ (Skipped_Proof (_, (gthy, _))) = g gthy;
+
+fun cases_proper_node g h = cases_node (fn () => raise UNDEF) g h;
+
+val get_theory = cases_node (K NONE) (SOME o Context.theory_of) (SOME o Proof.theory_of);
(* datatype state *)
-type node_presentation = node * Proof.context; (*node with presentation context*)
-fun node_presentation0 node = (node, cases_node Context.proof_of Proof.context_of node);
+type node_presentation = node * Proof.context;
-datatype state = State of node_presentation option * node_presentation option; (*current, previous*)
+fun init_presentation () =
+ Proof_Context.init_global (Theory.get_pure_bootstrap ());
+
+fun node_presentation node =
+ (node, cases_node init_presentation Context.proof_of Proof.context_of node);
+
-fun theory_toplevel thy = State (SOME (node_presentation0 (Theory (Context.Theory thy))), NONE);
+datatype state =
+ State of node_presentation * theory option;
+ (*current node with presentation context, previous theory*)
-val toplevel = State (NONE, NONE);
+fun node_of (State ((node, _), _)) = node;
+fun previous_theory_of (State (_, prev_thy)) = prev_thy;
-fun is_toplevel (State (NONE, _)) = true
- | is_toplevel _ = false;
+fun init_toplevel () = State (node_presentation Toplevel, NONE);
+fun theory_toplevel thy = State (node_presentation (Theory (Context.Theory thy)), NONE);
-fun level (State (NONE, _)) = 0
- | level (State (SOME (Theory _, _), _)) = 0
- | level (State (SOME (Proof (prf, _), _), _)) = Proof.level (Proof_Node.current prf)
- | level (State (SOME (Skipped_Proof (d, _), _), _)) = d + 1; (*different notion of proof depth!*)
+
+fun level state =
+ (case node_of state of
+ Toplevel => 0
+ | Theory _ => 0
+ | Proof (prf, _) => Proof.level (Proof_Node.current prf)
+ | Skipped_Proof (d, _) => d + 1); (*different notion of proof depth!*)
-fun str_of_state (State (NONE, SOME (Theory (Context.Theory thy), _))) =
- "at top level, result theory " ^ quote (Context.theory_name thy)
- | str_of_state (State (NONE, _)) = "at top level"
- | str_of_state (State (SOME (Theory (Context.Theory _), _), _)) = "in theory mode"
- | str_of_state (State (SOME (Theory (Context.Proof _), _), _)) = "in local theory mode"
- | str_of_state (State (SOME (Proof _, _), _)) = "in proof mode"
- | str_of_state (State (SOME (Skipped_Proof _, _), _)) = "in skipped proof mode";
+fun str_of_state state =
+ (case node_of state of
+ Toplevel =>
+ (case previous_theory_of state of
+ NONE => "at top level"
+ | SOME thy => "at top level, result theory " ^ quote (Context.theory_name thy))
+ | Theory (Context.Theory _) => "in theory mode"
+ | Theory (Context.Proof _) => "in local theory mode"
+ | Proof _ => "in proof mode"
+ | Skipped_Proof _ => "in skipped proof mode");
(* current node *)
-fun node_of (State (NONE, _)) = raise UNDEF
- | node_of (State (SOME (node, _), _)) = node;
+fun is_toplevel state = (case node_of state of Toplevel => true | _ => false);
-fun is_theory state = not (is_toplevel state) andalso is_some (theory_node (node_of state));
-fun is_proof state = not (is_toplevel state) andalso is_some (proof_node (node_of state));
-fun is_skipped_proof state = not (is_toplevel state) andalso skipped_proof_node (node_of state);
+fun is_theory state =
+ not (is_toplevel state) andalso is_some (theory_node (node_of state));
-fun node_case f g state = cases_node f g (node_of state);
+fun is_proof state =
+ not (is_toplevel state) andalso is_some (proof_node (node_of state));
-fun previous_theory_of (State (_, NONE)) = NONE
- | previous_theory_of (State (_, SOME (prev, _))) =
- SOME (cases_node Context.theory_of Proof.theory_of prev);
+fun is_skipped_proof state =
+ not (is_toplevel state) andalso skipped_proof_node (node_of state);
-val context_of = node_case Context.proof_of Proof.context_of;
-val generic_theory_of = node_case I (Context.Proof o Proof.context_of);
-val theory_of = node_case Context.theory_of Proof.theory_of;
-val proof_of = node_case (fn _ => error "No proof state") I;
+fun proper_node_of state = if is_toplevel state then raise UNDEF else node_of state;
+fun proper_node_case f g state = cases_proper_node f g (proper_node_of state);
+
+val context_of = proper_node_case Context.proof_of Proof.context_of;
+val generic_theory_of = proper_node_case I (Context.Proof o Proof.context_of);
+val theory_of = proper_node_case Context.theory_of Proof.theory_of;
+val proof_of = proper_node_case (fn _ => error "No proof state") I;
fun proof_position_of state =
- (case node_of state of
+ (case proper_node_of state of
Proof (prf, _) => Proof_Node.position prf
| _ => ~1);
-fun is_end_theory (State (NONE, SOME (Theory (Context.Theory _), _))) = true
+fun is_end_theory (State ((Toplevel, _), SOME _)) = true
| is_end_theory _ = false;
-fun end_theory _ (State (NONE, SOME (Theory (Context.Theory thy), _))) = thy
+fun end_theory _ (State ((Toplevel, _), SOME thy)) = thy
| end_theory pos _ = error ("Malformed theory" ^ Position.here pos);
@@ -185,13 +208,7 @@
fun init _ = NONE;
);
-fun presentation_context0 state =
- (case state of
- State (SOME (_, ctxt), _) => ctxt
- | State (NONE, _) =>
- (case try Theory.get_pure () of
- SOME thy => Proof_Context.init_global thy
- | NONE => raise UNDEF));
+fun presentation_context0 (State ((_, pr_ctxt), _)) = pr_ctxt;
fun presentation_context (state as State (current, _)) =
presentation_context0 state
@@ -199,31 +216,31 @@
fun presentation_state ctxt =
(case Presentation_State.get ctxt of
- NONE => State (SOME (node_presentation0 (Theory (Context.Proof ctxt))), NONE)
+ NONE => State (node_presentation (Theory (Context.Proof ctxt)), NONE)
| SOME state => state);
(* print state *)
fun pretty_context state =
- (case try node_of state of
- NONE => []
- | SOME node =>
- let
- val gthy =
- (case node of
- Theory gthy => gthy
- | Proof (_, (_, gthy)) => gthy
- | Skipped_Proof (_, (_, gthy)) => gthy);
- val lthy = Context.cases Named_Target.theory_init I gthy;
- in Local_Theory.pretty lthy end);
+ if is_toplevel state then []
+ else
+ let
+ val gthy =
+ (case node_of state of
+ Toplevel => raise Match
+ | Theory gthy => gthy
+ | Proof (_, (_, gthy)) => gthy
+ | Skipped_Proof (_, (_, gthy)) => gthy);
+ val lthy = Context.cases Named_Target.theory_init I gthy;
+ in Local_Theory.pretty lthy end;
fun pretty_state state =
- (case try node_of state of
- NONE => []
- | SOME (Theory _) => []
- | SOME (Proof (prf, _)) => Proof.pretty_state (Proof_Node.current prf)
- | SOME (Skipped_Proof (d, _)) => [Pretty.str ("skipped proof: depth " ^ string_of_int d)]);
+ (case node_of state of
+ Toplevel => []
+ | Theory _ => []
+ | Proof (prf, _) => Proof.pretty_state (Proof_Node.current prf)
+ | Skipped_Proof (d, _) => [Pretty.str ("skipped proof: depth " ^ string_of_int d)]);
val string_of_state = pretty_state #> Pretty.chunks #> Pretty.string_of;
@@ -235,35 +252,6 @@
(** toplevel transitions **)
-(* node transactions -- maintaining stable checkpoints *)
-
-exception FAILURE of state * exn;
-
-fun apply_transaction f g node =
- let
- val node_pr = node_presentation0 node;
- val context = cases_node I (Context.Proof o Proof.context_of) node;
- fun state_error e node_pr' = (State (SOME node_pr', SOME node_pr), e);
-
- val (result, err) =
- node
- |> Runtime.controlled_execution (SOME context) f
- |> state_error NONE
- handle exn => state_error (SOME exn) node_pr;
- in
- (case err of
- NONE => tap g result
- | SOME exn => raise FAILURE (result, exn))
- end;
-
-val exit_transaction =
- apply_transaction
- ((fn Theory (Context.Theory thy) => Theory (Context.Theory (Theory.end_theory thy))
- | node => node) #> node_presentation0)
- (K ())
- #> (fn State (node_pr', _) => State (NONE, node_pr'));
-
-
(* primitive transitions *)
datatype trans =
@@ -278,16 +266,43 @@
local
-fun apply_tr _ (Init f) (State (NONE, _)) =
- let val node = Theory (Context.Theory (Runtime.controlled_execution NONE f ()))
- in State (SOME (node_presentation0 node), NONE) end
- | apply_tr _ Exit (State (SOME (node as Theory (Context.Theory _), _), _)) =
- exit_transaction node
- | apply_tr int (Keep f) state =
- Runtime.controlled_execution (try generic_theory_of state) (fn x => tap (f int) x) state
- | apply_tr int (Transaction (f, g)) (State (SOME (node, _), _)) =
- apply_transaction (fn x => f int x) g node
- | apply_tr _ _ _ = raise UNDEF;
+exception FAILURE of state * exn;
+
+fun apply f g node =
+ let
+ val node_pr = node_presentation node;
+ val context = cases_proper_node I (Context.Proof o Proof.context_of) node;
+ fun state_error e node_pr' = (State (node_pr', get_theory node), e);
+
+ val (result, err) =
+ node
+ |> Runtime.controlled_execution (SOME context) f
+ |> state_error NONE
+ handle exn => state_error (SOME exn) node_pr;
+ in
+ (case err of
+ NONE => tap g result
+ | SOME exn => raise FAILURE (result, exn))
+ end;
+
+fun apply_tr int trans state =
+ (case (trans, node_of state) of
+ (Init f, Toplevel) =>
+ Runtime.controlled_execution NONE (fn () =>
+ State (node_presentation (Theory (Context.Theory (f ()))), NONE)) ()
+ | (Exit, node as Theory (Context.Theory thy)) =>
+ let
+ val State ((node', pr_ctxt), _) =
+ node |> apply
+ (fn _ => node_presentation (Theory (Context.Theory (Theory.end_theory thy))))
+ (K ());
+ in State ((Toplevel, pr_ctxt), get_theory node') end
+ | (Keep f, node) =>
+ Runtime.controlled_execution (try generic_theory_of state)
+ (fn () => (f int state; State (node_presentation node, previous_theory_of state))) ()
+ | (Transaction _, Toplevel) => raise UNDEF
+ | (Transaction (f, g), node) => apply (fn x => f int x) g node
+ | _ => raise UNDEF);
fun apply_union _ [] state = raise FAILURE (state, UNDEF)
| apply_union int (tr :: trs) state =
@@ -297,9 +312,18 @@
| exn as FAILURE _ => raise exn
| exn => raise FAILURE (state, exn);
+fun apply_markers markers (state as State ((node, pr_ctxt), prev_thy)) =
+ let
+ val state' =
+ Runtime.controlled_execution (try generic_theory_of state)
+ (fn () => State ((node, fold Document_Marker.evaluate markers pr_ctxt), prev_thy)) ();
+ in (state', NONE) end
+ handle exn => (state, SOME exn);
+
in
-fun apply_trans int trs state = (apply_union int trs state, NONE)
+fun apply_trans int trans markers state =
+ (apply_union int trans state |> apply_markers markers)
handle FAILURE (alt_state, exn) => (alt_state, SOME exn) | exn => (state, SOME exn);
end;
@@ -308,18 +332,19 @@
(* datatype transition *)
datatype transition = Transition of
- {name: string, (*command name*)
- pos: Position.T, (*source position*)
- timing: Time.time, (*prescient timing information*)
- trans: trans list}; (*primitive transitions (union)*)
+ {name: string, (*command name*)
+ pos: Position.T, (*source position*)
+ markers: Input.source list, (*semantic document markers*)
+ timing: Time.time, (*prescient timing information*)
+ trans: trans list}; (*primitive transitions (union)*)
-fun make_transition (name, pos, timing, trans) =
- Transition {name = name, pos = pos, timing = timing, trans = trans};
+fun make_transition (name, pos, markers, timing, trans) =
+ Transition {name = name, pos = pos, markers = markers, timing = timing, trans = trans};
-fun map_transition f (Transition {name, pos, timing, trans}) =
- make_transition (f (name, pos, timing, trans));
+fun map_transition f (Transition {name, pos, markers, timing, trans}) =
+ make_transition (f (name, pos, markers, timing, trans));
-val empty = make_transition ("", Position.none, Time.zeroTime, []);
+val empty = make_transition ("", Position.none, [], Time.zeroTime, []);
(* diagnostics *)
@@ -338,20 +363,23 @@
(* modify transitions *)
-fun name name = map_transition (fn (_, pos, timing, trans) =>
- (name, pos, timing, trans));
+fun name name = map_transition (fn (_, pos, markers, timing, trans) =>
+ (name, pos, markers, timing, trans));
-fun position pos = map_transition (fn (name, _, timing, trans) =>
- (name, pos, timing, trans));
+fun position pos = map_transition (fn (name, _, markers, timing, trans) =>
+ (name, pos, markers, timing, trans));
+
+fun markers markers = map_transition (fn (name, pos, _, timing, trans) =>
+ (name, pos, markers, timing, trans));
-fun timing timing = map_transition (fn (name, pos, _, trans) =>
- (name, pos, timing, trans));
+fun timing timing = map_transition (fn (name, pos, markers, _, trans) =>
+ (name, pos, markers, timing, trans));
-fun add_trans tr = map_transition (fn (name, pos, timing, trans) =>
- (name, pos, timing, tr :: trans));
+fun add_trans tr = map_transition (fn (name, pos, markers, timing, trans) =>
+ (name, pos, markers, timing, tr :: trans));
-val reset_trans = map_transition (fn (name, pos, timing, _) =>
- (name, pos, timing, []));
+val reset_trans = map_transition (fn (name, pos, markers, timing, _) =>
+ (name, pos, markers, timing, []));
(* basic transitions *)
@@ -368,7 +396,7 @@
fun present_transaction f g = add_trans (Transaction (f, g));
fun transaction f = present_transaction f (K ());
-fun transaction0 f = present_transaction (node_presentation0 oo f) (K ());
+fun transaction0 f = present_transaction (node_presentation oo f) (K ());
fun keep f = add_trans (Keep (fn _ => f));
@@ -388,7 +416,7 @@
(* theory transitions *)
fun generic_theory f = transaction (fn _ =>
- (fn Theory gthy => node_presentation0 (Theory (f gthy))
+ (fn Theory gthy => node_presentation (Theory (f gthy))
| _ => raise UNDEF));
fun theory' f = transaction (fn int =>
@@ -397,7 +425,7 @@
|> Sign.new_group
|> f int
|> Sign.reset_group;
- in node_presentation0 (Theory (Context.Theory thy')) end
+ in node_presentation (Theory (Context.Theory thy')) end
| _ => raise UNDEF));
fun theory f = theory' (K f);
@@ -475,15 +503,15 @@
in (Theory gthy', ctxt') end
else raise UNDEF
end
- | Skipped_Proof (0, (gthy, _)) => node_presentation0 (Theory gthy)
+ | Skipped_Proof (0, (gthy, _)) => node_presentation (Theory gthy)
| _ => raise UNDEF));
local
-fun begin_proof init = transaction0 (fn int =>
+fun begin_proof init_proof = transaction0 (fn int =>
(fn Theory gthy =>
let
- val (finish, prf) = init int gthy;
+ val (finish, prf) = init_proof int gthy;
val document = Options.default_string "document";
val skip = (document = "" orelse document = "false") andalso Goal.skip_proofs_enabled ();
val schematic_goal = try Proof.schematic_goal prf;
@@ -588,9 +616,9 @@
local
-fun app int (tr as Transition {trans, ...}) =
+fun app int (tr as Transition {markers, trans, ...}) =
setmp_thread_position tr
- (Timing.protocol (name_of tr) (pos_of tr) (apply_trans int trans)
+ (Timing.protocol (name_of tr) (pos_of tr) (apply_trans int trans markers)
##> Option.map (fn UNDEF => ERROR (type_error tr) | exn => exn));
in
@@ -690,18 +718,26 @@
then Future.proofs_enabled 1
else Future.proofs_enabled 2 orelse Future.proofs_enabled_timing estimate));
+val empty_markers = markers [];
+val empty_trans = reset_trans #> keep (K ());
+
+in
+
+fun fork_presentation tr = (tr |> empty_markers, tr |> empty_trans);
+
fun atom_result keywords tr st =
let
val st' =
if Future.proofs_enabled 1 andalso Keyword.is_diag keywords (name_of tr) then
- (Execution.fork
- {name = "Toplevel.diag", pos = pos_of tr, pri = ~1}
- (fn () => command tr st); st)
+ let
+ val (tr1, tr2) = fork_presentation tr;
+ val _ =
+ Execution.fork {name = "Toplevel.diag", pos = pos_of tr, pri = ~1}
+ (fn () => command tr1 st);
+ in command tr2 st end
else command tr st;
in (Result (tr, st'), st') end;
-in
-
fun element_result keywords (Thy_Element.Element (tr, NONE)) st = atom_result keywords tr st
| element_result keywords (elem as Thy_Element.Element (head_tr, SOME element_rest)) st =
let
@@ -717,6 +753,8 @@
in (Result_List (head_result :: proof_results), st'') end
else
let
+ val (end_tr1, end_tr2) = fork_presentation end_tr;
+
val finish = Context.Theory o Proof_Context.theory_of;
val future_proof =
@@ -725,12 +763,12 @@
{name = "Toplevel.future_proof", pos = pos_of head_tr, pri = ~1}
(fn () =>
let
- val State (SOME (Proof (prf, (_, orig_gthy)), _), prev) = st';
+ val State ((Proof (prf, (_, orig_gthy)), _), prev_thy) = st';
val node' = Proof (Proof_Node.apply (K state) prf, (finish, orig_gthy));
- val (result, result_state) =
- State (SOME (node_presentation0 node'), prev)
- |> fold_map (element_result keywords) body_elems ||> command end_tr;
- in (Result_List result, presentation_context0 result_state) end))
+ val (results, result_state) =
+ State (node_presentation node', prev_thy)
+ |> fold_map (element_result keywords) body_elems ||> command end_tr1;
+ in (Result_List results, presentation_context0 result_state) end))
#> (fn (res, state') => state' |> put_result (Result_Future res));
val forked_proof =
@@ -739,12 +777,12 @@
end_proof (fn _ => future_proof #>
(fn state => state |> Proof.global_done_proof |> Result.put (get_result state)));
- val st'' = st'
- |> command (head_tr |> reset_trans |> forked_proof);
- val end_result = Result (end_tr, st'');
+ val st'' = st' |> command (head_tr |> reset_trans |> forked_proof);
+ val end_st = st'' |> command end_tr2;
+ val end_result = Result (end_tr, end_st);
val result =
Result_List [head_result, Result.get (presentation_context0 st''), end_result];
- in (result, st'') end
+ in (result, end_st) end
end;
end;
--- a/src/Pure/ML/ml_lex.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/ML/ml_lex.ML Sun Mar 10 21:33:05 2019 +0100
@@ -312,7 +312,7 @@
val scan_sml_antiq = scan_sml >> Antiquote.Text;
val scan_ml_antiq =
- Comment.scan >> (fn (kind, ss) => Antiquote.Text (token (Comment (SOME kind)) ss)) ||
+ Comment.scan_inner >> (fn (kind, ss) => Antiquote.Text (token (Comment (SOME kind)) ss)) ||
Antiquote.scan_control >> Antiquote.Control ||
Antiquote.scan_antiq >> Antiquote.Antiq ||
scan_rat_antiq >> Antiquote.Antiq ||
--- a/src/Pure/PIDE/command.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/PIDE/command.ML Sun Mar 10 21:33:05 2019 +0100
@@ -155,12 +155,7 @@
Position.here_list verbatim);
in
if exists #1 token_reports then Toplevel.malformed pos "Malformed command syntax"
- else
- (case Outer_Syntax.parse_tokens thy (resolve_files keywords master_dir blobs_info span) of
- [tr] => Toplevel.modify_init init tr
- | [] => Toplevel.ignored (#1 (Token.range_of span))
- | _ => Toplevel.malformed (#1 core_range) "Exactly one command expected")
- handle ERROR msg => Toplevel.malformed (#1 core_range) msg
+ else Outer_Syntax.parse_span thy init (resolve_files keywords master_dir blobs_info span)
end;
end;
@@ -173,7 +168,10 @@
fun init_eval_state opt_thy =
{failed = false,
command = Toplevel.empty,
- state = (case opt_thy of NONE => Toplevel.toplevel | SOME thy => Toplevel.theory_toplevel thy)};
+ state =
+ (case opt_thy of
+ NONE => Toplevel.init_toplevel ()
+ | SOME thy => Toplevel.theory_toplevel thy)};
datatype eval =
Eval of
@@ -214,8 +212,12 @@
fun run keywords int tr st =
if Future.proofs_enabled 1 andalso Keyword.is_diag keywords (Toplevel.name_of tr) then
- (Execution.fork {name = "Toplevel.diag", pos = Toplevel.pos_of tr, pri = ~1}
- (fn () => Toplevel.command_exception int tr st); ([], SOME st))
+ let
+ val (tr1, tr2) = Toplevel.fork_presentation tr;
+ val _ =
+ Execution.fork {name = "Toplevel.diag", pos = Toplevel.pos_of tr, pri = ~1}
+ (fn () => Toplevel.command_exception int tr1 st);
+ in Toplevel.command_errors int tr2 st end
else Toplevel.command_errors int tr st;
fun check_token_comments ctxt tok =
@@ -260,10 +262,7 @@
val errs2 =
(case result of
NONE => []
- | SOME st' =>
- (case try Toplevel.presentation_context st' of
- NONE => []
- | SOME ctxt' => check_span_comments ctxt' span tr));
+ | SOME st' => check_span_comments (Toplevel.presentation_context st') span tr);
val errs = errs1 @ errs2;
val _ = List.app (Future.error_message (Toplevel.pos_of tr)) errs;
in
--- a/src/Pure/PIDE/document.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/PIDE/document.ML Sun Mar 10 21:33:05 2019 +0100
@@ -575,7 +575,7 @@
val imports = #imports header;
fun maybe_eval_result eval = Command.eval_result_state eval
- handle Fail _ => Toplevel.toplevel;
+ handle Fail _ => Toplevel.init_toplevel ();
fun maybe_end_theory pos st = SOME (Toplevel.end_theory pos st)
handle ERROR msg => (Output.error_message msg; NONE);
@@ -586,7 +586,7 @@
NONE =>
maybe_end_theory pos
(case get_result (snd (the (AList.lookup (op =) deps import))) of
- NONE => Toplevel.toplevel
+ NONE => Toplevel.init_toplevel ()
| SOME (_, eval) => maybe_eval_result eval)
| some => some)
|> Option.map (fn thy => (thy, (pos, Theory.get_markup thy))));
--- a/src/Pure/PIDE/markup.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/PIDE/markup.ML Sun Mar 10 21:33:05 2019 +0100
@@ -16,6 +16,11 @@
val serialN: string
val serial_properties: int -> Properties.T
val instanceN: string
+ val meta_titleN: string val meta_title: T
+ val meta_creatorN: string val meta_creator: T
+ val meta_contributorN: string val meta_contributor: T
+ val meta_dateN: string val meta_date: T
+ val meta_descriptionN: string val meta_description: T
val languageN: string
val symbolsN: string
val delimitedN: string
@@ -32,6 +37,7 @@
val language_ML: bool -> T
val language_SML: bool -> T
val language_document: bool -> T
+ val language_document_marker: T
val language_antiquotation: T
val language_text: bool -> T
val language_verbatim: bool -> T
@@ -282,6 +288,15 @@
val instanceN = "instance";
+(* meta data -- see http://dublincore.org/documents/dces *)
+
+val (meta_titleN, meta_title) = markup_elem "meta_title";
+val (meta_creatorN, meta_creator) = markup_elem "meta_creator";
+val (meta_contributorN, meta_contributor) = markup_elem "meta_contributor";
+val (meta_dateN, meta_date) = markup_elem "meta_date";
+val (meta_descriptionN, meta_description) = markup_elem "meta_description";
+
+
(* embedded languages *)
val languageN = "language";
@@ -314,6 +329,8 @@
val language_ML = language' {name = "ML", symbols = false, antiquotes = true};
val language_SML = language' {name = "SML", symbols = false, antiquotes = false};
val language_document = language' {name = "document", symbols = false, antiquotes = true};
+val language_document_marker =
+ language {name = "document_marker", symbols = true, antiquotes = true, delimited = true};
val language_antiquotation =
language {name = "antiquotation", symbols = true, antiquotes = false, delimited = true};
val language_text = language' {name = "text", symbols = true, antiquotes = false};
--- a/src/Pure/PIDE/markup.scala Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/PIDE/markup.scala Sun Mar 10 21:33:05 2019 +0100
@@ -89,6 +89,15 @@
}
+ /* meta data */
+
+ val META_TITLE = "meta_title"
+ val META_CREATOR = "meta_creator"
+ val META_CONTRIBUTOR = "meta_contributor"
+ val META_DATE = "meta_date"
+ val META_DESCRIPTION = "meta_description"
+
+
/* formal entities */
val BINDING = "binding"
--- a/src/Pure/ROOT.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/ROOT.ML Sun Mar 10 21:33:05 2019 +0100
@@ -208,6 +208,7 @@
ML_file "Isar/parse.ML";
ML_file "Thy/document_source.ML";
ML_file "Thy/thy_header.ML";
+ML_file "Thy/document_marker.ML";
(*proof context*)
ML_file "Isar/object_logic.ML";
--- a/src/Pure/Syntax/lexicon.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Syntax/lexicon.ML Sun Mar 10 21:33:05 2019 +0100
@@ -301,7 +301,7 @@
val scan =
Symbol_Pos.scan_cartouche err_prefix >> token Cartouche ||
- Comment.scan >> (fn (kind, ss) => token (Comment kind) ss) ||
+ Comment.scan_inner >> (fn (kind, ss) => token (Comment kind) ss) ||
Scan.max token_leq scan_lit scan_val ||
scan_string >> token String ||
scan_str >> token Str ||
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Thy/document_marker.ML Sun Mar 10 21:33:05 2019 +0100
@@ -0,0 +1,84 @@
+(* Title: Pure/Thy/document_marker.ML
+ Author: Makarius
+
+Document markers: declarations on the presentation context.
+*)
+
+signature DOCUMENT_MARKER =
+sig
+ type marker = Proof.context -> Proof.context
+ val setup: binding -> 'a context_parser -> ('a -> marker) -> theory -> theory
+ val setup0: binding -> 'a parser -> ('a -> marker) -> theory -> theory
+ val evaluate: Input.source -> marker
+ val legacy_tag: string -> Input.source
+end;
+
+structure Document_Marker: DOCUMENT_MARKER =
+struct
+
+(* theory data *)
+
+type marker = Proof.context -> Proof.context;
+
+structure Markers = Theory_Data
+(
+ type T = (Token.src -> Proof.context -> Proof.context) Name_Space.table;
+ val empty : T = Name_Space.empty_table "document_marker";
+ val extend = I;
+ val merge = Name_Space.merge_tables;
+);
+
+val get_markers = Markers.get o Proof_Context.theory_of;
+
+fun check ctxt = Name_Space.check (Context.Proof ctxt) (get_markers ctxt);
+
+fun setup name scan body thy =
+ let
+ fun marker src ctxt =
+ let val (x, ctxt') = Token.syntax scan src ctxt
+ in body x ctxt' end;
+ in thy |> Markers.map (Name_Space.define (Context.Theory thy) true (name, marker) #> #2) end;
+
+fun setup0 name scan = setup name (Scan.lift scan);
+
+
+(* evaluate inner syntax *)
+
+val parse_marker = Parse.token Parse.liberal_name ::: Parse.!!! Parse.args;
+
+fun evaluate input ctxt =
+ let
+ val body =
+ Input.source_explode input
+ |> drop_prefix (fn (s, _) => s = Symbol.marker)
+ |> Symbol_Pos.cartouche_content;
+ val markers =
+ (case Token.read_body (Thy_Header.get_keywords' ctxt) (Parse.list parse_marker) body of
+ SOME res => res
+ | NONE => error ("Bad input" ^ Position.here (Input.pos_of input)));
+ in fold (fn src => #2 (check ctxt (Token.name_of_src src)) src) markers ctxt end;
+
+
+(* concrete markers *)
+
+fun meta_data markup arg ctxt =
+ (Context_Position.report_text ctxt (Position.thread_data ()) markup arg; ctxt);
+
+val _ =
+ Theory.setup
+ (setup0 (Binding.make ("tag", \<^here>)) Parse.name Document_Source.update_tags #>
+ setup0 (Binding.make ("title", \<^here>)) Parse.embedded (meta_data Markup.meta_title) #>
+ setup0 (Binding.make ("creator", \<^here>)) Parse.embedded (meta_data Markup.meta_creator) #>
+ setup0 (Binding.make ("contributor", \<^here>)) Parse.embedded (meta_data Markup.meta_contributor) #>
+ setup0 (Binding.make ("date", \<^here>)) Parse.embedded (meta_data Markup.meta_date) #>
+ setup0 (Binding.make ("description", \<^here>)) (Parse.input Parse.embedded)
+ (fn source => fn ctxt =>
+ let
+ val (arg, pos) = Input.source_content source;
+ val _ = Context_Position.report ctxt pos Markup.words;
+ in meta_data Markup.meta_description arg ctxt end));
+
+fun legacy_tag name =
+ Input.string (cartouche ("tag " ^ Symbol_Pos.quote_string_qq name));
+
+end;
--- a/src/Pure/Thy/document_source.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Thy/document_source.ML Sun Mar 10 21:33:05 2019 +0100
@@ -14,8 +14,10 @@
val improper: Token.T list parser
val improper_end: Token.T list parser
val blank_end: Token.T list parser
- val tag: string parser
+ val get_tags: Proof.context -> string list
+ val update_tags: string -> Proof.context -> Proof.context
val tags: string list parser
+ val annotation: unit parser
end;
structure Document_Source: DOCUMENT_SOURCE =
@@ -41,11 +43,27 @@
val blank_end = Scan.repeat (Scan.unless space_proper (Scan.one Token.is_blank));
-(* tags *)
+(* syntactic tags (old-style) *)
+
+structure Tags = Proof_Data
+(
+ type T = string list;
+ fun init _ = [];
+);
+
+val get_tags = Tags.get;
+val update_tags = Tags.map o update (op =);
val tag_name = Parse.group (fn () => "document tag name") (Parse.short_ident || Parse.string);
val tag = (improper -- Parse.$$$ "%" -- improper) |-- Parse.!!! (tag_name --| blank_end);
val tags = Scan.repeat tag;
+
+(* semantic markers (operation on presentation context) *)
+
+val marker = improper |-- Parse.document_marker --| blank_end;
+
+val annotation = Scan.repeat (tag >> K () || marker >> K ()) >> K ();
+
end;
--- a/src/Pure/Thy/thy_header.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Thy/thy_header.ML Sun Mar 10 21:33:05 2019 +0100
@@ -175,10 +175,11 @@
Parse.command_name textN ||
Parse.command_name txtN ||
Parse.command_name text_rawN) --
- Document_Source.tags -- Parse.!!! Parse.document_source;
+ (Document_Source.annotation |-- Parse.!!! Parse.document_source);
val parse_header =
- (Scan.repeat heading -- Parse.command_name theoryN -- Document_Source.tags) |-- Parse.!!! args;
+ (Scan.repeat heading -- Parse.command_name theoryN --| Document_Source.annotation)
+ |-- Parse.!!! args;
fun read_tokens pos toks =
filter Token.is_proper toks
--- a/src/Pure/Thy/thy_header.scala Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Thy/thy_header.scala Sun Mar 10 21:33:05 2019 +0100
@@ -172,9 +172,9 @@
command(TEXT) |
command(TXT) |
command(TEXT_RAW)) ~
- tags ~! document_source
+ annotation ~! document_source
- (rep(heading) ~ command(THEORY) ~ tags) ~! args ^^ { case _ ~ x => x }
+ (rep(heading) ~ command(THEORY) ~ annotation) ~! args ^^ { case _ ~ x => x }
}
}
--- a/src/Pure/Thy/thy_info.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Thy/thy_info.ML Sun Mar 10 21:33:05 2019 +0100
@@ -295,7 +295,7 @@
in (results, (st', pos')) end;
val (results, (end_state, end_pos)) =
- fold_map element_result elements (Toplevel.toplevel, Position.none);
+ fold_map element_result elements (Toplevel.init_toplevel (), Position.none);
val thy = Toplevel.end_theory end_pos end_state;
in (results, thy) end;
@@ -451,11 +451,9 @@
fun script_thy pos txt thy =
let
- val trs =
- Outer_Syntax.parse thy pos txt
- |> map (Toplevel.modify_init (K thy));
+ val trs = Outer_Syntax.parse_text thy (K thy) pos txt;
val end_pos = if null trs then pos else Toplevel.pos_of (List.last trs);
- val end_state = fold (Toplevel.command_exception true) trs Toplevel.toplevel;
+ val end_state = fold (Toplevel.command_exception true) trs (Toplevel.init_toplevel ());
in Toplevel.end_theory end_pos end_state end;
--- a/src/Pure/Thy/thy_output.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Thy/thy_output.ML Sun Mar 10 21:33:05 2019 +0100
@@ -61,8 +61,8 @@
Symbol_Pos.cartouche_content syms
|> output_symbols
|> Latex.enclose_body "%\n\\isamarkupcancel{" "}"
- | Comment.Latex =>
- [Latex.symbols (Symbol_Pos.cartouche_content syms)])
+ | Comment.Latex => [Latex.symbols (Symbol_Pos.cartouche_content syms)]
+ | Comment.Marker => [])
and output_comment_document ctxt (comment, syms) =
(case comment of
SOME kind => output_comment ctxt (kind, syms)
@@ -139,6 +139,7 @@
in
(case Token.kind_of tok of
Token.Comment NONE => []
+ | Token.Comment (SOME Comment.Marker) => []
| Token.Command => output false "\\isacommand{" "}"
| Token.Keyword =>
if Symbol.is_ascii_identifier (Token.content_of tok)
@@ -202,7 +203,7 @@
(* command spans *)
-type command = string * Position.T * string list; (*name, position, tags*)
+type command = string * Position.T; (*name, position*)
type source = (token * (string * int)) list; (*token, markup flag, meta-comment depth*)
datatype span = Span of command * (source * source * source * source) * bool;
@@ -276,18 +277,17 @@
in {tag' = tag', active_tag' = active_tag'} end
end;
-fun present_span thy command_tag span state state'
+fun present_span command_tag span state state'
(tag_stack, active_tag, newline, latex, present_cont) =
let
- val ctxt' =
- Toplevel.presentation_context state'
- handle Toplevel.UNDEF => Proof_Context.get_global thy Context.PureN;
+ val ctxt' = Toplevel.presentation_context state';
val present = fold (fn (tok, (flag, 0)) =>
fold cons (present_token ctxt' tok)
#> cons (Latex.string flag)
| _ => I);
- val Span ((cmd_name, cmd_pos, cmd_tags), srcs, span_newline) = span;
+ val Span ((cmd_name, cmd_pos), srcs, span_newline) = span;
+ val cmd_tags = Document_Source.get_tags ctxt';
val (tag, tags) = tag_stack;
val {tag', active_tag'} =
@@ -371,19 +371,19 @@
Document_Source.improper |--
Parse.position (Scan.one (fn tok =>
Token.is_command tok andalso pred keywords (Token.content_of tok))) --
- Scan.repeat Document_Source.tag --
- Parse.!!!! ((Document_Source.improper -- locale -- Document_Source.improper) |--
- Parse.document_source --| Document_Source.improper_end)
- >> (fn (((tok, pos'), tags), source) =>
- let val name = Token.content_of tok
- in (SOME (name, pos', tags), (mk (name, source), (flag, d))) end));
+ (Document_Source.annotation |--
+ Parse.!!!! ((Document_Source.improper -- locale -- Document_Source.improper) |--
+ Parse.document_source --| Document_Source.improper_end))
+ >> (fn ((tok, pos'), source) =>
+ let val name = Token.content_of tok
+ in (SOME (name, pos'), (mk (name, source), (flag, d))) end));
val command = Scan.peek (fn d =>
Scan.optional (Scan.one Token.is_command_modifier ::: Document_Source.improper) [] --
- Scan.one Token.is_command -- Document_Source.tags
- >> (fn ((cmd_mod, cmd), tags) =>
+ Scan.one Token.is_command --| Document_Source.annotation
+ >> (fn (cmd_mod, cmd) =>
map (fn tok => (NONE, (Basic_Token tok, ("", d)))) cmd_mod @
- [(SOME (Token.content_of cmd, Token.pos_of cmd, tags),
+ [(SOME (Token.content_of cmd, Token.pos_of cmd),
(Basic_Token cmd, (markup_false, d)))]));
val cmt = Scan.peek (fn d =>
@@ -442,14 +442,14 @@
val command_tag = make_command_tag options keywords;
fun present_command tr span st st' =
- Toplevel.setmp_thread_position tr (present_span thy command_tag span st st');
+ Toplevel.setmp_thread_position tr (present_span command_tag span st st');
fun present _ [] = I
| present st ((span, (tr, st')) :: rest) = present_command tr span st st' #> present st' rest;
in
if length command_results = length spans then
((NONE, []), NONE, true, [], (I, I))
- |> present Toplevel.toplevel (spans ~~ command_results)
+ |> present (Toplevel.init_toplevel ()) (spans ~~ command_results)
|> present_trailer
|> rev
else error "Messed-up outer syntax for presentation"
--- a/src/Pure/Tools/debugger.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Tools/debugger.ML Sun Mar 10 21:33:05 2019 +0100
@@ -279,8 +279,7 @@
if Command.eval_finished eval then
let
val st = Command.eval_result_state eval;
- val ctxt = Toplevel.presentation_context st
- handle Toplevel.UNDEF => err ();
+ val ctxt = Toplevel.presentation_context st;
in
(case ML_Env.get_breakpoint (Context.Proof ctxt) breakpoint of
SOME (b, _) => b := breakpoint_state
--- a/src/Pure/Tools/rail.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/Tools/rail.ML Sun Mar 10 21:33:05 2019 +0100
@@ -118,7 +118,7 @@
val scan_token =
scan_space >> token Space ||
- Comment.scan >> (fn (kind, ss) => token (Comment kind) ss)||
+ Comment.scan_inner >> (fn (kind, ss) => token (Comment kind) ss) ||
Antiquote.scan_antiq >> antiq_token ||
scan_keyword >> (token Keyword o single) ||
Lexicon.scan_id >> token Ident ||
--- a/src/Pure/theory.ML Sun Mar 10 00:22:38 2019 +0000
+++ b/src/Pure/theory.ML Sun Mar 10 21:33:05 2019 +0100
@@ -13,6 +13,7 @@
val local_setup: (Proof.context -> Proof.context) -> unit
val install_pure: theory -> unit
val get_pure: unit -> theory
+ val get_pure_bootstrap: unit -> theory
val get_markup: theory -> Markup.T
val check: {long: bool} -> Proof.context -> string * Position.T -> theory
val axiom_table: theory -> term Name_Space.table
@@ -59,6 +60,11 @@
SOME thy => thy
| NONE => raise Fail "Theory Pure not present");
+fun get_pure_bootstrap () =
+ (case Single_Assignment.peek pure of
+ SOME thy => thy
+ | NONE => Context.the_global_context ());
+
(** datatype thy **)