--- a/src/Pure/IsaMakefile Fri Jan 02 16:21:10 2009 +0100
+++ b/src/Pure/IsaMakefile Fri Jan 02 16:21:47 2009 +0100
@@ -73,8 +73,8 @@
Syntax/printer.ML Syntax/simple_syntax.ML Syntax/syn_ext.ML \
Syntax/syn_trans.ML Syntax/syntax.ML Syntax/type_ext.ML Thy/html.ML \
Thy/latex.ML Thy/present.ML Thy/term_style.ML Thy/thm_deps.ML \
- Thy/thy_edit.ML Thy/thy_header.ML Thy/thy_info.ML Thy/thy_load.ML \
- Thy/thy_output.ML Tools/ROOT.ML Tools/invoke.ML \
+ Thy/thy_header.ML Thy/thy_info.ML Thy/thy_load.ML Thy/thy_output.ML \
+ Thy/thy_syntax.ML Tools/ROOT.ML Tools/invoke.ML \
Tools/isabelle_process.ML Tools/named_thms.ML Tools/xml_syntax.ML \
assumption.ML axclass.ML codegen.ML config.ML conjunction.ML \
consts.ML context.ML context_position.ML conv.ML defs.ML display.ML \
--- a/src/Pure/Isar/ROOT.ML Fri Jan 02 16:21:10 2009 +0100
+++ b/src/Pure/Isar/ROOT.ML Fri Jan 02 16:21:47 2009 +0100
@@ -80,7 +80,7 @@
(*theory syntax*)
use "../Thy/term_style.ML";
use "../Thy/thy_output.ML";
-use "../Thy/thy_edit.ML";
+use "../Thy/thy_syntax.ML";
use "../old_goals.ML";
use "outer_syntax.ML";
use "../Thy/thy_info.ML";
--- a/src/Pure/Isar/outer_syntax.ML Fri Jan 02 16:21:10 2009 +0100
+++ b/src/Pure/Isar/outer_syntax.ML Fri Jan 02 16:21:47 2009 +0100
@@ -239,9 +239,9 @@
fun prepare_span commands span =
let
- val range_pos = Position.encode_range (ThyEdit.span_range span);
- val toks = ThyEdit.span_content span;
- val _ = List.app ThyEdit.report_token toks;
+ val range_pos = Position.encode_range (ThySyntax.span_range span);
+ val toks = ThySyntax.span_content span;
+ val _ = List.app ThySyntax.report_token toks;
in
(case Source.exhaust (toplevel_source false NONE (K commands) (Source.of_list toks)) of
[tr] => (tr, true)
@@ -261,7 +261,7 @@
fun prepare_command pos str =
let val (lexs, commands) = get_syntax () in
- (case ThyEdit.parse_spans lexs pos str of
+ (case ThySyntax.parse_spans lexs pos str of
[span] => #1 (prepare_span commands span)
| _ => Toplevel.malformed pos not_singleton)
end;
@@ -275,14 +275,14 @@
val _ = Present.init_theory name;
- val toks = Source.exhausted (ThyEdit.token_source lexs pos (Source.of_list text));
- val spans = Source.exhaust (ThyEdit.span_source toks);
- val _ = List.app ThyEdit.report_span spans;
- val units = Source.exhaust (ThyEdit.unit_source (Source.of_list spans))
+ val toks = Source.exhausted (ThySyntax.token_source lexs pos (Source.of_list text));
+ val spans = Source.exhaust (ThySyntax.span_source toks);
+ val _ = List.app ThySyntax.report_span spans;
+ val units = Source.exhaust (ThySyntax.unit_source (Source.of_list spans))
|> maps (prepare_unit commands);
val _ = Present.theory_source name
- (fn () => HTML.html_mode (implode o map ThyEdit.present_span) spans);
+ (fn () => HTML.html_mode (implode o map ThySyntax.present_span) spans);
val _ = if time then writeln ("\n**** Starting theory " ^ quote name ^ " ****") else ();
val _ = cond_timeit time "" (fn () =>
--- a/src/Pure/ProofGeneral/pgip_parser.ML Fri Jan 02 16:21:10 2009 +0100
+++ b/src/Pure/ProofGeneral/pgip_parser.ML Fri Jan 02 16:21:47 2009 +0100
@@ -92,18 +92,18 @@
fun parse_span span =
let
- val kind = ThyEdit.span_kind span;
- val toks = ThyEdit.span_content span;
- val text = implode (map (PrintMode.setmp [] ThyEdit.present_token) toks);
+ val kind = ThySyntax.span_kind span;
+ val toks = ThySyntax.span_content span;
+ val text = implode (map (PrintMode.setmp [] ThySyntax.present_token) toks);
in
(case kind of
- ThyEdit.Command name => parse_command name text
- | ThyEdit.Ignored => [D.Whitespace {text = text}]
- | ThyEdit.Malformed => [D.Unparseable {text = text}])
+ ThySyntax.Command name => parse_command name text
+ | ThySyntax.Ignored => [D.Whitespace {text = text}]
+ | ThySyntax.Malformed => [D.Unparseable {text = text}])
end;
fun pgip_parser pos str =
- maps parse_span (ThyEdit.parse_spans (OuterKeyword.get_lexicons ()) pos str);
+ maps parse_span (ThySyntax.parse_spans (OuterKeyword.get_lexicons ()) pos str);
end;
--- a/src/Pure/Thy/thy_edit.ML Fri Jan 02 16:21:10 2009 +0100
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,194 +0,0 @@
-(* Title: Pure/Thy/thy_edit.ML
- ID: $Id$
- Author: Makarius
-
-Basic editor operations on theory sources.
-*)
-
-signature THY_EDIT =
-sig
- val token_source: Scan.lexicon * Scan.lexicon -> Position.T -> (string, 'a) Source.source ->
- (OuterLex.token, (SymbolPos.T, Position.T * (Symbol.symbol, (string, 'a)
- Source.source) Source.source) Source.source) Source.source
- val parse_tokens: Scan.lexicon * Scan.lexicon -> Position.T -> string -> OuterLex.token list
- val present_token: OuterLex.token -> output
- val report_token: OuterLex.token -> unit
- datatype span_kind = Command of string | Ignored | Malformed
- type span
- val span_kind: span -> span_kind
- val span_content: span -> OuterLex.token list
- val span_range: span -> Position.range
- val span_source: (OuterLex.token, 'a) Source.source ->
- (span, (OuterLex.token, 'a) Source.source) Source.source
- val parse_spans: Scan.lexicon * Scan.lexicon -> Position.T -> string -> span list
- val present_span: span -> output
- val report_span: span -> unit
- val unit_source: (span, 'a) Source.source ->
- (span * span list * bool, (span, 'a) Source.source) Source.source
-end;
-
-structure ThyEdit: THY_EDIT =
-struct
-
-structure K = OuterKeyword;
-structure T = OuterLex;
-structure P = OuterParse;
-
-
-(** tokens **)
-
-(* parse *)
-
-fun token_source lexs pos src =
- Symbol.source {do_recover = true} src
- |> T.source {do_recover = SOME false} (K lexs) pos;
-
-fun parse_tokens lexs pos str =
- Source.of_string str
- |> token_source lexs pos
- |> Source.exhaust;
-
-
-(* present *)
-
-local
-
-val token_kind_markup =
- fn T.Command => (Markup.commandN, [])
- | T.Keyword => (Markup.keywordN, [])
- | T.Ident => Markup.ident
- | T.LongIdent => Markup.ident
- | T.SymIdent => Markup.ident
- | T.Var => Markup.ident
- | T.TypeIdent => Markup.ident
- | T.TypeVar => Markup.ident
- | T.Nat => Markup.ident
- | T.String => Markup.string
- | T.AltString => Markup.altstring
- | T.Verbatim => Markup.verbatim
- | T.Space => Markup.none
- | T.Comment => Markup.comment
- | T.InternalValue => Markup.none
- | T.Malformed => Markup.malformed
- | T.Error _ => Markup.malformed
- | T.Sync => Markup.control
- | T.EOF => Markup.control;
-
-in
-
-fun present_token tok =
- Markup.enclose (token_kind_markup (T.kind_of tok)) (Output.output (T.unparse tok));
-
-fun report_token tok =
- Position.report (token_kind_markup (T.kind_of tok)) (T.position_of tok);
-
-end;
-
-
-
-(** spans **)
-
-(* type span *)
-
-datatype span_kind = Command of string | Ignored | Malformed;
-datatype span = Span of span_kind * OuterLex.token list;
-
-fun span_kind (Span (k, _)) = k;
-fun span_content (Span (_, toks)) = toks;
-
-fun span_range span =
- (case span_content span of
- [] => (Position.none, Position.none)
- | toks =>
- let
- val start_pos = T.position_of (hd toks);
- val end_pos = T.end_position_of (List.last toks);
- in (start_pos, end_pos) end);
-
-
-(* parse *)
-
-local
-
-val is_whitespace = T.is_kind T.Space orf T.is_kind T.Comment;
-
-val body = Scan.unless (Scan.many is_whitespace -- Scan.ahead (P.command || P.eof)) P.not_eof;
-
-val span =
- Scan.ahead P.command -- P.not_eof -- Scan.repeat body
- >> (fn ((name, c), bs) => Span (Command name, c :: bs)) ||
- Scan.many1 is_whitespace >> (fn toks => Span (Ignored, toks)) ||
- Scan.repeat1 body >> (fn toks => Span (Malformed, toks));
-
-in
-
-fun span_source src = Source.source T.stopper (Scan.bulk span) NONE src;
-
-end;
-
-fun parse_spans lexs pos str =
- Source.of_string str
- |> token_source lexs pos
- |> span_source
- |> Source.exhaust;
-
-
-(* present *)
-
-local
-
-fun kind_markup (Command name) = Markup.command_span name
- | kind_markup Ignored = Markup.ignored_span
- | kind_markup Malformed = Markup.malformed_span;
-
-in
-
-fun present_span span =
- Markup.enclose (kind_markup (span_kind span)) (implode (map present_token (span_content span)));
-
-fun report_span span =
- Position.report (kind_markup (span_kind span)) (Position.encode_range (span_range span));
-
-end;
-
-
-
-(** units: commands with proof **)
-
-(* scanning spans *)
-
-val eof = Span (Command "", []);
-
-fun is_eof (Span (Command "", _)) = true
- | is_eof _ = false;
-
-val not_eof = not o is_eof;
-
-val stopper = Scan.stopper (K eof) is_eof;
-
-
-(* unit_source *)
-
-local
-
-fun command_with pred = Scan.one (fn (Span (Command name, _)) => pred name | _ => false);
-
-val proof = Scan.pass 1 (Scan.repeat (Scan.depend (fn d =>
- if d <= 0 then Scan.fail
- else
- command_with K.is_qed_global >> pair ~1 ||
- command_with K.is_proof_goal >> pair (d + 1) ||
- (if d = 0 then Scan.fail else command_with K.is_qed >> pair (d - 1)) ||
- Scan.unless (command_with K.is_theory) (Scan.one not_eof) >> pair d)) -- Scan.state);
-
-val unit =
- command_with K.is_theory_goal -- proof >> (fn (a, (bs, d)) => (a, bs, d >= 0)) ||
- Scan.one not_eof >> (fn a => (a, [], true));
-
-in
-
-fun unit_source src = Source.source stopper (Scan.bulk unit) NONE src;
-
-end;
-
-end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/Thy/thy_syntax.ML Fri Jan 02 16:21:47 2009 +0100
@@ -0,0 +1,193 @@
+(* Title: Pure/Thy/thy_syntax.ML
+ Author: Makarius
+
+Superficial theory syntax: tokens and spans.
+*)
+
+signature THY_SYNTAX =
+sig
+ val token_source: Scan.lexicon * Scan.lexicon -> Position.T -> (string, 'a) Source.source ->
+ (OuterLex.token, (SymbolPos.T, Position.T * (Symbol.symbol, (string, 'a)
+ Source.source) Source.source) Source.source) Source.source
+ val parse_tokens: Scan.lexicon * Scan.lexicon -> Position.T -> string -> OuterLex.token list
+ val present_token: OuterLex.token -> output
+ val report_token: OuterLex.token -> unit
+ datatype span_kind = Command of string | Ignored | Malformed
+ type span
+ val span_kind: span -> span_kind
+ val span_content: span -> OuterLex.token list
+ val span_range: span -> Position.range
+ val span_source: (OuterLex.token, 'a) Source.source ->
+ (span, (OuterLex.token, 'a) Source.source) Source.source
+ val parse_spans: Scan.lexicon * Scan.lexicon -> Position.T -> string -> span list
+ val present_span: span -> output
+ val report_span: span -> unit
+ val unit_source: (span, 'a) Source.source ->
+ (span * span list * bool, (span, 'a) Source.source) Source.source
+end;
+
+structure ThySyntax: THY_SYNTAX =
+struct
+
+structure K = OuterKeyword;
+structure T = OuterLex;
+structure P = OuterParse;
+
+
+(** tokens **)
+
+(* parse *)
+
+fun token_source lexs pos src =
+ Symbol.source {do_recover = true} src
+ |> T.source {do_recover = SOME false} (K lexs) pos;
+
+fun parse_tokens lexs pos str =
+ Source.of_string str
+ |> token_source lexs pos
+ |> Source.exhaust;
+
+
+(* present *)
+
+local
+
+val token_kind_markup =
+ fn T.Command => (Markup.commandN, [])
+ | T.Keyword => (Markup.keywordN, [])
+ | T.Ident => Markup.ident
+ | T.LongIdent => Markup.ident
+ | T.SymIdent => Markup.ident
+ | T.Var => Markup.ident
+ | T.TypeIdent => Markup.ident
+ | T.TypeVar => Markup.ident
+ | T.Nat => Markup.ident
+ | T.String => Markup.string
+ | T.AltString => Markup.altstring
+ | T.Verbatim => Markup.verbatim
+ | T.Space => Markup.none
+ | T.Comment => Markup.comment
+ | T.InternalValue => Markup.none
+ | T.Malformed => Markup.malformed
+ | T.Error _ => Markup.malformed
+ | T.Sync => Markup.control
+ | T.EOF => Markup.control;
+
+in
+
+fun present_token tok =
+ Markup.enclose (token_kind_markup (T.kind_of tok)) (Output.output (T.unparse tok));
+
+fun report_token tok =
+ Position.report (token_kind_markup (T.kind_of tok)) (T.position_of tok);
+
+end;
+
+
+
+(** spans **)
+
+(* type span *)
+
+datatype span_kind = Command of string | Ignored | Malformed;
+datatype span = Span of span_kind * OuterLex.token list;
+
+fun span_kind (Span (k, _)) = k;
+fun span_content (Span (_, toks)) = toks;
+
+fun span_range span =
+ (case span_content span of
+ [] => (Position.none, Position.none)
+ | toks =>
+ let
+ val start_pos = T.position_of (hd toks);
+ val end_pos = T.end_position_of (List.last toks);
+ in (start_pos, end_pos) end);
+
+
+(* parse *)
+
+local
+
+val is_whitespace = T.is_kind T.Space orf T.is_kind T.Comment;
+
+val body = Scan.unless (Scan.many is_whitespace -- Scan.ahead (P.command || P.eof)) P.not_eof;
+
+val span =
+ Scan.ahead P.command -- P.not_eof -- Scan.repeat body
+ >> (fn ((name, c), bs) => Span (Command name, c :: bs)) ||
+ Scan.many1 is_whitespace >> (fn toks => Span (Ignored, toks)) ||
+ Scan.repeat1 body >> (fn toks => Span (Malformed, toks));
+
+in
+
+fun span_source src = Source.source T.stopper (Scan.bulk span) NONE src;
+
+end;
+
+fun parse_spans lexs pos str =
+ Source.of_string str
+ |> token_source lexs pos
+ |> span_source
+ |> Source.exhaust;
+
+
+(* present *)
+
+local
+
+fun kind_markup (Command name) = Markup.command_span name
+ | kind_markup Ignored = Markup.ignored_span
+ | kind_markup Malformed = Markup.malformed_span;
+
+in
+
+fun present_span span =
+ Markup.enclose (kind_markup (span_kind span)) (implode (map present_token (span_content span)));
+
+fun report_span span =
+ Position.report (kind_markup (span_kind span)) (Position.encode_range (span_range span));
+
+end;
+
+
+
+(** units: commands with proof **)
+
+(* scanning spans *)
+
+val eof = Span (Command "", []);
+
+fun is_eof (Span (Command "", _)) = true
+ | is_eof _ = false;
+
+val not_eof = not o is_eof;
+
+val stopper = Scan.stopper (K eof) is_eof;
+
+
+(* unit_source *)
+
+local
+
+fun command_with pred = Scan.one (fn (Span (Command name, _)) => pred name | _ => false);
+
+val proof = Scan.pass 1 (Scan.repeat (Scan.depend (fn d =>
+ if d <= 0 then Scan.fail
+ else
+ command_with K.is_qed_global >> pair ~1 ||
+ command_with K.is_proof_goal >> pair (d + 1) ||
+ (if d = 0 then Scan.fail else command_with K.is_qed >> pair (d - 1)) ||
+ Scan.unless (command_with K.is_theory) (Scan.one not_eof) >> pair d)) -- Scan.state);
+
+val unit =
+ command_with K.is_theory_goal -- proof >> (fn (a, (bs, d)) => (a, bs, d >= 0)) ||
+ Scan.one not_eof >> (fn a => (a, [], true));
+
+in
+
+fun unit_source src = Source.source stopper (Scan.bulk unit) NONE src;
+
+end;
+
+end;