# HG changeset patch # User huffman # Date 1313986739 25200 # Node ID 7ce460760f99483af6a031382873fd3567307cbb # Parent 5daa55003649c6be30b12b698698204458216c5c# Parent 78c43fb3adb05358f7669ebdc22a24735a821b65 merged diff -r 5daa55003649 -r 7ce460760f99 src/HOL/Predicate.thy --- a/src/HOL/Predicate.thy Sun Aug 21 12:22:31 2011 -0700 +++ b/src/HOL/Predicate.thy Sun Aug 21 21:18:59 2011 -0700 @@ -25,7 +25,6 @@ subsection {* Predicates as (complete) lattices *} - text {* Handy introduction and elimination rules for @{text "\"} on unary and binary predicates @@ -423,14 +422,6 @@ "(\w. eval P w \ eval Q w) \ P = Q" by (cases P, cases Q) (auto simp add: fun_eq_iff) -lemma eval_mem [simp]: - "x \ eval P \ eval P x" - by (simp add: mem_def) - -lemma eq_mem [simp]: - "x \ (op =) y \ x = y" - by (auto simp add: mem_def) - instantiation pred :: (type) complete_lattice begin @@ -798,11 +789,11 @@ "map f P = P \= (single o f)" lemma eval_map [simp]: - "eval (map f P) = image f (eval P)" + "eval (map f P) = (\x\{x. eval P x}. (\y. f x = y))" by (auto simp add: map_def) enriched_type map: map - by (auto intro!: pred_eqI simp add: fun_eq_iff image_compose) + by (rule ext, rule pred_eqI, auto)+ subsubsection {* Implementation *} @@ -1040,6 +1031,14 @@ end; *} +lemma eval_mem [simp]: + "x \ eval P \ eval P x" + by (simp add: mem_def) + +lemma eq_mem [simp]: + "x \ (op =) y \ x = y" + by (auto simp add: mem_def) + no_notation bot ("\") and top ("\") and diff -r 5daa55003649 -r 7ce460760f99 src/HOL/Tools/Function/function_common.ML --- a/src/HOL/Tools/Function/function_common.ML Sun Aug 21 12:22:31 2011 -0700 +++ b/src/HOL/Tools/Function/function_common.ML Sun Aug 21 21:18:59 2011 -0700 @@ -333,7 +333,7 @@ local - val option_parser = Parse.group "option" + val option_parser = Parse.group (fn () => "option") ((Parse.reserved "sequential" >> K Sequential) || ((Parse.reserved "default" |-- Parse.term) >> Default) || (Parse.reserved "domintros" >> K DomIntros) diff -r 5daa55003649 -r 7ce460760f99 src/Pure/Isar/args.ML --- a/src/Pure/Isar/args.ML Sun Aug 21 12:22:31 2011 -0700 +++ b/src/Pure/Isar/args.ML Sun Aug 21 21:18:59 2011 -0700 @@ -227,7 +227,7 @@ fun parse_args is_symid = let val keyword_symid = token (Parse.keyword_with is_symid); - fun atom blk = Parse.group "argument" + fun atom blk = Parse.group (fn () => "argument") (ident || keyword_symid || string || alt_string || token Parse.float_number || (if blk then token (Parse.$$$ ",") else Scan.fail)); diff -r 5daa55003649 -r 7ce460760f99 src/Pure/Isar/outer_syntax.ML --- a/src/Pure/Isar/outer_syntax.ML Sun Aug 21 12:22:31 2011 -0700 +++ b/src/Pure/Isar/outer_syntax.ML Sun Aug 21 21:18:59 2011 -0700 @@ -62,7 +62,9 @@ local fun terminate false = Scan.succeed () - | terminate true = Parse.group "end of input" (Scan.option Parse.sync -- Parse.semicolon >> K ()); + | terminate true = + Parse.group (fn () => "end of input") + (Scan.option Parse.sync -- Parse.semicolon >> K ()); fun body cmd (name, _) = (case cmd name of diff -r 5daa55003649 -r 7ce460760f99 src/Pure/Isar/parse.ML --- a/src/Pure/Isar/parse.ML Sun Aug 21 12:22:31 2011 -0700 +++ b/src/Pure/Isar/parse.ML Sun Aug 21 21:18:59 2011 -0700 @@ -8,7 +8,7 @@ sig type 'a parser = Token.T list -> 'a * Token.T list type 'a context_parser = Context.generic * Token.T list -> 'a * (Context.generic * Token.T list) - val group: string -> (Token.T list -> 'a) -> Token.T list -> 'a + val group: (unit -> string) -> (Token.T list -> 'a) -> Token.T list -> 'a val !!! : (Token.T list -> 'a) -> Token.T list -> 'a val !!!! : (Token.T list -> 'a) -> Token.T list -> 'a val triple1: ('a * 'b) * 'c -> 'a * 'b * 'c @@ -114,17 +114,15 @@ (* group atomic parsers (no cuts!) *) -fun fail_with s = Scan.fail_with - (fn [] => (fn () => s ^ " expected (past end-of-file!)") +fun group s scan = scan || Scan.fail_with + (fn [] => (fn () => s () ^ " expected (past end-of-file!)") | tok :: _ => (fn () => (case Token.text_of tok of (txt, "") => - s ^ " expected,\nbut " ^ txt ^ Token.pos_of tok ^ " was found" + s () ^ " expected,\nbut " ^ txt ^ Token.pos_of tok ^ " was found" | (txt1, txt2) => - s ^ " expected,\nbut " ^ txt1 ^ Token.pos_of tok ^ " was found:\n" ^ txt2))); - -fun group s scan = scan || fail_with s; + s () ^ " expected,\nbut " ^ txt1 ^ Token.pos_of tok ^ " was found:\n" ^ txt2))); (* cut *) @@ -170,7 +168,8 @@ fun inner_syntax atom = Scan.ahead atom |-- not_eof >> Token.source_of; fun kind k = - group (Token.str_of_kind k) (RESET_VALUE (Scan.one (Token.is_kind k) >> Token.content_of)); + group (fn () => Token.str_of_kind k) + (RESET_VALUE (Scan.one (Token.is_kind k) >> Token.content_of)); val command = kind Token.Command; val keyword = kind Token.Keyword; @@ -192,10 +191,11 @@ val keyword_ident_or_symbolic = keyword_with Token.ident_or_symbolic; fun $$$ x = - group (Token.str_of_kind Token.Keyword ^ " " ^ quote x) (keyword_with (fn y => x = y)); + group (fn () => Token.str_of_kind Token.Keyword ^ " " ^ quote x) + (keyword_with (fn y => x = y)); fun reserved x = - group ("reserved identifier " ^ quote x) + group (fn () => "reserved identifier " ^ quote x) (RESET_VALUE (Scan.one (Token.ident_with (fn y => x = y)) >> Token.content_of)); val semicolon = $$$ ";"; @@ -208,7 +208,7 @@ val int = Scan.optional (minus >> K ~1) 1 -- nat >> op *; val real = float_number >> (the o Real.fromString) || int >> Real.fromInt; -val tag_name = group "tag name" (short_ident || string); +val tag_name = group (fn () => "tag name") (short_ident || string); val tags = Scan.repeat ($$$ "%" |-- !!! tag_name); val opt_unit = Scan.optional ($$$ "(" -- $$$ ")" >> (K ())) (); @@ -240,11 +240,17 @@ (* names and text *) -val name = group "name declaration" (short_ident || sym_ident || string || number); +val name = group (fn () => "name declaration") (short_ident || sym_ident || string || number); + val binding = position name >> Binding.make; -val xname = group "name reference" (short_ident || long_ident || sym_ident || string || number); -val text = group "text" (short_ident || long_ident || sym_ident || string || number || verbatim); -val path = group "file name/path specification" name >> Path.explode; + +val xname = group (fn () => "name reference") + (short_ident || long_ident || sym_ident || string || number); + +val text = group (fn () => "text") + (short_ident || long_ident || sym_ident || string || number || verbatim); + +val path = group (fn () => "file name/path specification") name >> Path.explode; val liberal_name = keyword_ident_or_symbolic || xname; @@ -254,7 +260,7 @@ (* sorts and arities *) -val sort = group "sort" (inner_syntax xname); +val sort = group (fn () => "sort") (inner_syntax xname); val arity = xname -- ($$$ "::" |-- !!! (Scan.optional ($$$ "(" |-- !!! (list1 sort --| $$$ ")")) [] -- sort)) >> triple2; @@ -265,8 +271,9 @@ (* types *) -val typ_group = group "type" - (short_ident || long_ident || sym_ident || type_ident || type_var || string || number); +val typ_group = + group (fn () => "type") + (short_ident || long_ident || sym_ident || type_ident || type_var || string || number); val typ = inner_syntax typ_group; @@ -324,24 +331,24 @@ (* embedded source text *) -val ML_source = source_position (group "ML source" text); -val doc_source = source_position (group "document source" text); +val ML_source = source_position (group (fn () => "ML source") text); +val doc_source = source_position (group (fn () => "document source") text); (* terms *) val tm = short_ident || long_ident || sym_ident || term_var || number || string; -val term_group = group "term" tm; -val prop_group = group "proposition" tm; +val term_group = group (fn () => "term") tm; +val prop_group = group (fn () => "proposition") tm; val term = inner_syntax term_group; val prop = inner_syntax prop_group; -val type_const = inner_syntax (group "type constructor" xname); -val const = inner_syntax (group "constant" xname); +val type_const = inner_syntax (group (fn () => "type constructor") xname); +val const = inner_syntax (group (fn () => "constant") xname); -val literal_fact = inner_syntax (group "literal fact" alt_string); +val literal_fact = inner_syntax (group (fn () => "literal fact") alt_string); (* patterns *) diff -r 5daa55003649 -r 7ce460760f99 src/Pure/Isar/parse_spec.ML --- a/src/Pure/Isar/parse_spec.ML Sun Aug 21 12:22:31 2011 -0700 +++ b/src/Pure/Isar/parse_spec.ML Sun Aug 21 21:18:59 2011 -0700 @@ -136,7 +136,7 @@ val expr0 = plus1_unless locale_keyword expr1; in expr0 -- Scan.optional (Parse.$$$ "for" |-- Parse.!!! locale_fixes) [] end; -val context_element = Parse.group "context element" loc_element; +val context_element = Parse.group (fn () => "context element") loc_element; end; diff -r 5daa55003649 -r 7ce460760f99 src/Pure/Thy/thy_header.ML --- a/src/Pure/Thy/thy_header.ML Sun Aug 21 12:22:31 2011 -0700 +++ b/src/Pure/Thy/thy_header.ML Sun Aug 21 21:18:59 2011 -0700 @@ -29,8 +29,8 @@ (* header args *) -val file_name = Parse.group "file name" Parse.name; -val theory_name = Parse.group "theory name" Parse.name; +val file_name = Parse.group (fn () => "file name") Parse.name; +val theory_name = Parse.group (fn () => "theory name") Parse.name; val file = Parse.$$$ "(" |-- Parse.!!! (file_name --| Parse.$$$ ")") >> rpair false || diff -r 5daa55003649 -r 7ce460760f99 src/Pure/Thy/thy_syntax.ML --- a/src/Pure/Thy/thy_syntax.ML Sun Aug 21 12:22:31 2011 -0700 +++ b/src/Pure/Thy/thy_syntax.ML Sun Aug 21 21:18:59 2011 -0700 @@ -115,15 +115,15 @@ local -val is_whitespace = Token.is_kind Token.Space orf Token.is_kind Token.Comment; +val whitespace = Scan.many (not o Token.is_proper); +val whitespace1 = Scan.many1 (not o Token.is_proper); -val body = - Scan.unless (Scan.many is_whitespace -- Scan.ahead (Parse.command || Parse.eof)) Parse.not_eof; +val body = Scan.unless (whitespace -- Scan.ahead (Parse.command || Parse.eof)) Parse.not_eof; val span = Scan.ahead Parse.command -- Parse.not_eof -- Scan.repeat body >> (fn ((name, c), bs) => Span (Command name, c :: bs)) || - Scan.many1 is_whitespace >> (fn toks => Span (Ignored, toks)) || + whitespace1 >> (fn toks => Span (Ignored, toks)) || Scan.repeat1 body >> (fn toks => Span (Malformed, toks)); in diff -r 5daa55003649 -r 7ce460760f99 src/Tools/jEdit/src/document_model.scala --- a/src/Tools/jEdit/src/document_model.scala Sun Aug 21 12:22:31 2011 -0700 +++ b/src/Tools/jEdit/src/document_model.scala Sun Aug 21 21:18:59 2011 -0700 @@ -139,13 +139,13 @@ { buffer.addBufferListener(buffer_listener) pending_edits.init() - buffer.propertiesChanged() + Token_Markup.refresh_buffer(buffer) } private def deactivate() { pending_edits.flush() buffer.removeBufferListener(buffer_listener) - buffer.propertiesChanged() + Token_Markup.refresh_buffer(buffer) } } diff -r 5daa55003649 -r 7ce460760f99 src/Tools/jEdit/src/token_markup.scala --- a/src/Tools/jEdit/src/token_markup.scala Sun Aug 21 12:22:31 2011 -0700 +++ b/src/Tools/jEdit/src/token_markup.scala Sun Aug 21 21:18:59 2011 -0700 @@ -14,9 +14,10 @@ import java.awt.geom.AffineTransform import org.gjt.sp.util.SyntaxUtilities -import org.gjt.sp.jedit.Mode +import org.gjt.sp.jedit.{jEdit, Mode} import org.gjt.sp.jedit.syntax.{Token => JEditToken, TokenMarker, TokenHandler, ParserRuleSet, ModeProvider, XModeHandler, SyntaxStyle} +import org.gjt.sp.jedit.buffer.JEditBuffer import javax.swing.text.Segment @@ -79,10 +80,14 @@ private def bold_style(style: SyntaxStyle): SyntaxStyle = font_style(style, _.deriveFont(Font.BOLD)) + private def hidden_color: Color = new Color(255, 255, 255, 0) + class Style_Extender extends SyntaxUtilities.StyleExtender { - if (Symbol.font_names.length > 2) - error("Too many user symbol fonts (max 2 permitted): " + Symbol.font_names.mkString(", ")) + val max_user_fonts = 2 + if (Symbol.font_names.length > max_user_fonts) + error("Too many user symbol fonts (max " + max_user_fonts + " permitted): " + + Symbol.font_names.mkString(", ")) override def extendStyles(styles: Array[SyntaxStyle]): Array[SyntaxStyle] = { @@ -93,11 +98,13 @@ new_styles(subscript(i.toByte)) = script_style(style, -1) new_styles(superscript(i.toByte)) = script_style(style, 1) new_styles(bold(i.toByte)) = bold_style(style) + for (idx <- 0 until max_user_fonts) + new_styles(user_font(idx, i.toByte)) = style for ((family, idx) <- Symbol.font_index) new_styles(user_font(idx, i.toByte)) = font_style(style, imitate_font(family, _)) } new_styles(hidden) = - new SyntaxStyle(Color.white, null, + new SyntaxStyle(hidden_color, null, { val font = styles(0).getFont transform_font(new Font(font.getFamily, 0, 1), AffineTransform.getScaleInstance(1.0, font.getSize.toDouble)) }) @@ -144,7 +151,7 @@ private val isabelle_rules = new ParserRuleSet("isabelle", "MAIN") - private class Line_Context(val context: Scan.Context) + private class Line_Context(val context: Option[Scan.Context]) extends TokenMarker.LineContext(isabelle_rules, null) { override def hashCode: Int = context.hashCode @@ -160,17 +167,17 @@ override def markTokens(context: TokenMarker.LineContext, handler: TokenHandler, line: Segment): TokenMarker.LineContext = { + val line_ctxt = + context match { + case c: Line_Context => c.context + case _ => Some(Scan.Finished) + } val context1 = - if (Isabelle.session.is_ready) { + if (line_ctxt.isDefined && Isabelle.session.is_ready) { val syntax = Isabelle.session.current_syntax() - val ctxt = - context match { - case c: Line_Context => c.context - case _ => Scan.Finished - } - val (tokens, ctxt1) = syntax.scan_context(line, ctxt) - val context1 = new Line_Context(ctxt1) + val (tokens, ctxt1) = syntax.scan_context(line, line_ctxt.get) + val context1 = new Line_Context(Some(ctxt1)) val extended = extended_styles(line) @@ -196,7 +203,7 @@ context1 } else { - val context1 = new Line_Context(Scan.Finished) + val context1 = new Line_Context(None) handler.handleToken(line, JEditToken.NULL, 0, line.count, context1) handler.handleToken(line, JEditToken.END, line.count, 0, context1) context1 @@ -210,12 +217,12 @@ /* mode provider */ + private val isabelle_token_marker = new Token_Markup.Marker + class Mode_Provider(orig_provider: ModeProvider) extends ModeProvider { for (mode <- orig_provider.getModes) addMode(mode) - val isabelle_token_marker = new Token_Markup.Marker - override def loadMode(mode: Mode, xmh: XModeHandler) { super.loadMode(mode, xmh) @@ -223,5 +230,12 @@ mode.setTokenMarker(isabelle_token_marker) } } + + def refresh_buffer(buffer: JEditBuffer) + { + buffer.setTokenMarker(jEdit.getMode("text").getTokenMarker) + buffer.setTokenMarker(isabelle_token_marker) + buffer.propertiesChanged + } }