merged
authorwenzelm
Mon, 31 May 2010 10:29:04 +0200
changeset 37207 c40c9b05952c
parent 37206 7f2a6f3143ad (current diff)
parent 37205 1509e49c8d33 (diff)
child 37208 e8b1c3a0562c
child 37210 1f1f9cbd23ae
child 37213 efcad7594872
merged
--- a/doc-src/antiquote_setup.ML	Sun May 30 21:29:37 2010 +0200
+++ b/doc-src/antiquote_setup.ML	Mon May 31 10:29:04 2010 +0200
@@ -66,7 +66,7 @@
         else if kind = "exception" then txt1 ^ " of " ^ txt2
         else txt1 ^ ": " ^ txt2;
       val txt' = if kind = "" then txt else kind ^ " " ^ txt;
-      val _ = ML_Context.eval_in (SOME ctxt) false Position.none (ml (txt1, txt2));
+      val _ = ML_Context.eval_text_in (SOME ctxt) false Position.none (ml (txt1, txt2));  (* ML_Lex.read (!?) *)
       val kind' = if kind = "" then "ML" else "ML " ^ kind;
     in
       "\\indexdef{}{" ^ kind' ^ "}{" ^ clean_string txt1 ^ "}" ^
--- a/etc/symbols	Sun May 30 21:29:37 2010 +0200
+++ b/etc/symbols	Mon May 31 10:29:04 2010 +0200
@@ -162,10 +162,10 @@
 \<Longleftarrow>        code: 0x0027f8  font: Isabelle 
 \<Rightarrow>           code: 0x0021d2  font: Isabelle  abbrev: =>
 \<Longrightarrow>       code: 0x0027f9  font: Isabelle  abbrev: ==>
-\<leftrightarrow>       code: 0x002194  font: Isabelle  abbrev: <->
-\<longleftrightarrow>   code: 0x0027f7  font: Isabelle  abbrev: <-->
-\<Leftrightarrow>       code: 0x0021d4  font: Isabelle  abbrev: <=>
-\<Longleftrightarrow>   code: 0x0027fa  font: Isabelle  abbrev: <==>
+\<leftrightarrow>       code: 0x002194  font: Isabelle
+\<longleftrightarrow>   code: 0x0027f7  font: Isabelle  abbrev: <->
+\<Leftrightarrow>       code: 0x0021d4  font: Isabelle
+\<Longleftrightarrow>   code: 0x0027fa  font: Isabelle  abbrev: <=>
 \<mapsto>               code: 0x0021a6  font: Isabelle  abbrev: |->
 \<longmapsto>           code: 0x0027fc  font: Isabelle  abbrev: |-->
 \<midarrow>             code: 0x002500  font: Isabelle
--- a/lib/html/isabelle.css	Sun May 30 21:29:37 2010 +0200
+++ b/lib/html/isabelle.css	Mon May 31 10:29:04 2010 +0200
@@ -38,6 +38,7 @@
 .loc, loc  { color: #D2691E; }
 
 .keyword, keyword      { font-weight: bold; }
+.operator, operator    { }
 .command, command      { font-weight: bold; }
 .ident, ident          { }
 .string, string        { color: #008B00; }
--- a/src/HOL/Tools/inductive_codegen.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/HOL/Tools/inductive_codegen.ML	Mon May 31 10:29:04 2010 +0200
@@ -836,7 +836,7 @@
             [str "]"]), Pretty.brk 1,
           str "| NONE => NONE);"]) ^
       "\n\nend;\n";
-    val _ = ML_Context.eval_in (SOME ctxt) false Position.none s;
+    val _ = ML_Context.eval_text_in (SOME ctxt) false Position.none s;
     val values = Config.get ctxt random_values;
     val bound = Config.get ctxt depth_bound;
     val start = Config.get ctxt depth_start;
--- a/src/Pure/General/markup.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/General/markup.ML	Mon May 31 10:29:04 2010 +0200
@@ -63,6 +63,7 @@
   val attributeN: string val attribute: string -> T
   val methodN: string val method: string -> T
   val ML_keywordN: string val ML_keyword: T
+  val ML_delimiterN: string val ML_delimiter: T
   val ML_identN: string val ML_ident: T
   val ML_tvarN: string val ML_tvar: T
   val ML_numeralN: string val ML_numeral: T
@@ -82,8 +83,9 @@
   val doc_antiqN: string val doc_antiq: string -> T
   val keyword_declN: string val keyword_decl: string -> T
   val command_declN: string val command_decl: string -> string -> T
-  val keywordN: string val keyword: string -> T
-  val commandN: string val command: string -> T
+  val keywordN: string val keyword: T
+  val operatorN: string val operator: T
+  val commandN: string val command: T
   val identN: string val ident: T
   val stringN: string val string: T
   val altstringN: string val altstring: T
@@ -238,6 +240,7 @@
 (* ML syntax *)
 
 val (ML_keywordN, ML_keyword) = markup_elem "ML_keyword";
+val (ML_delimiterN, ML_delimiter) = markup_elem "ML_delimiter";
 val (ML_identN, ML_ident) = markup_elem "ML_ident";
 val (ML_tvarN, ML_tvar) = markup_elem "ML_tvar";
 val (ML_numeralN, ML_numeral) = markup_elem "ML_numeral";
@@ -270,8 +273,9 @@
 val command_declN = "command_decl";
 fun command_decl name kind : T = (command_declN, [(nameN, name), (kindN, kind)]);
 
-val (keywordN, keyword) = markup_string "keyword" nameN;
-val (commandN, command) = markup_string "command" nameN;
+val (keywordN, keyword) = markup_elem "keyword";
+val (operatorN, operator) = markup_elem "operator";
+val (commandN, command) = markup_elem "command";
 val (identN, ident) = markup_elem "ident";
 val (stringN, string) = markup_elem "string";
 val (altstringN, altstring) = markup_elem "altstring";
--- a/src/Pure/General/markup.scala	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/General/markup.scala	Mon May 31 10:29:04 2010 +0200
@@ -123,6 +123,7 @@
   /* ML syntax */
 
   val ML_KEYWORD = "ML_keyword"
+  val ML_DELIMITER = "ML_delimiter"
   val ML_IDENT = "ML_ident"
   val ML_TVAR = "ML_tvar"
   val ML_NUMERAL = "ML_numeral"
@@ -144,6 +145,7 @@
   val COMMAND_DECL = "command_decl"
 
   val KEYWORD = "keyword"
+  val OPERATOR = "operator"
   val COMMAND = "command"
   val IDENT = "ident"
   val STRING = "string"
--- a/src/Pure/General/pretty.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/General/pretty.ML	Mon May 31 10:29:04 2010 +0200
@@ -137,8 +137,8 @@
 
 fun markup m prts = markup_block m (0, prts);
 fun mark m prt = markup m [prt];
-fun keyword name = mark (Markup.keyword name) (str name);
-fun command name = mark (Markup.command name) (str name);
+fun keyword name = mark Markup.keyword (str name);
+fun command name = mark Markup.command (str name);
 
 fun markup_chunks m prts = markup m (fbreaks prts);
 val chunks = markup_chunks Markup.none;
--- a/src/Pure/Isar/attrib.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/Isar/attrib.ML	Mon May 31 10:29:04 2010 +0200
@@ -153,7 +153,9 @@
   Context.theory_map (ML_Context.expression pos
     "val (name, scan, comment): binding * attribute context_parser * string"
     "Context.map_theory (Attrib.setup name scan comment)"
-    ("(" ^ ML_Syntax.make_binding name ^ ", " ^ txt ^ ", " ^ ML_Syntax.print_string cmt ^ ")"));
+    (ML_Lex.read Position.none ("(" ^ ML_Syntax.make_binding name ^ ", ") @
+      ML_Lex.read pos txt @
+      ML_Lex.read Position.none (", " ^ ML_Syntax.print_string cmt ^ ")")));
 
 
 (* add/del syntax *)
--- a/src/Pure/Isar/isar_cmd.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/Isar/isar_cmd.ML	Mon May 31 10:29:04 2010 +0200
@@ -94,11 +94,13 @@
 (* generic setup *)
 
 fun global_setup (txt, pos) =
-  ML_Context.expression pos "val setup: theory -> theory" "Context.map_theory setup" txt
+  ML_Lex.read pos txt
+  |> ML_Context.expression pos "val setup: theory -> theory" "Context.map_theory setup"
   |> Context.theory_map;
 
 fun local_setup (txt, pos) =
-  ML_Context.expression pos "val setup: local_theory -> local_theory" "Context.map_proof setup" txt
+  ML_Lex.read pos txt
+  |> ML_Context.expression pos "val setup: local_theory -> local_theory" "Context.map_proof setup"
   |> Context.proof_map;
 
 
@@ -115,35 +117,40 @@
 in
 
 fun parse_ast_translation (a, (txt, pos)) =
-  txt |> ML_Context.expression pos
+  ML_Lex.read pos txt
+  |> ML_Context.expression pos
     ("val parse_ast_translation: (string * (" ^ advancedT a ^
       "Syntax.ast list -> Syntax.ast)) list")
     ("Context.map_theory (Sign.add_" ^ advancedN a ^ "trfuns (parse_ast_translation, [], [], []))")
   |> Context.theory_map;
 
 fun parse_translation (a, (txt, pos)) =
-  txt |> ML_Context.expression pos
+  ML_Lex.read pos txt
+  |> ML_Context.expression pos
     ("val parse_translation: (string * (" ^ advancedT a ^
       "term list -> term)) list")
     ("Context.map_theory (Sign.add_" ^ advancedN a ^ "trfuns ([], parse_translation, [], []))")
   |> Context.theory_map;
 
 fun print_translation (a, (txt, pos)) =
-  txt |> ML_Context.expression pos
+  ML_Lex.read pos txt
+  |> ML_Context.expression pos
     ("val print_translation: (string * (" ^ advancedT a ^
       "term list -> term)) list")
     ("Context.map_theory (Sign.add_" ^ advancedN a ^ "trfuns ([], [], print_translation, []))")
   |> Context.theory_map;
 
 fun print_ast_translation (a, (txt, pos)) =
-  txt |> ML_Context.expression pos
+  ML_Lex.read pos txt
+  |> ML_Context.expression pos
     ("val print_ast_translation: (string * (" ^ advancedT a ^
       "Syntax.ast list -> Syntax.ast)) list")
     ("Context.map_theory (Sign.add_" ^ advancedN a ^ "trfuns ([], [], [], print_ast_translation))")
   |> Context.theory_map;
 
 fun typed_print_translation (a, (txt, pos)) =
-  txt |> ML_Context.expression pos
+  ML_Lex.read pos txt
+  |> ML_Context.expression pos
     ("val typed_print_translation: (string * (" ^ advancedT a ^
       "bool -> typ -> term list -> term)) list")
     ("Context.map_theory (Sign.add_" ^ advancedN a ^ "trfunsT typed_print_translation)")
@@ -156,15 +163,16 @@
 
 fun oracle (name, pos) (body_txt, body_pos) =
   let
-    val body = Symbol_Pos.content (Symbol_Pos.explode (body_txt, body_pos));
-    val txt =
-      "local\n\
-      \  val binding = " ^ ML_Syntax.make_binding (name, pos) ^ ";\n\
-      \  val body = " ^ body ^ ";\n\
-      \in\n\
-      \  val " ^ name ^ " = snd (Context.>>> (Context.map_theory_result (Thm.add_oracle (binding, body))));\n\
-      \end;\n";
-  in Context.theory_map (ML_Context.exec (fn () => ML_Context.eval false body_pos txt)) end;
+    val body = ML_Lex.read body_pos body_txt;
+    val ants =
+      ML_Lex.read Position.none
+       ("local\n\
+        \  val binding = " ^ ML_Syntax.make_binding (name, pos) ^ ";\n\
+        \  val body = ") @ body @ ML_Lex.read Position.none (";\n\
+        \in\n\
+        \  val " ^ name ^ " = snd (Context.>>> (Context.map_theory_result (Thm.add_oracle (binding, body))));\n\
+        \end;\n");
+  in Context.theory_map (ML_Context.exec (fn () => ML_Context.eval false body_pos ants)) end;
 
 
 (* old-style axioms *)
@@ -180,7 +188,8 @@
 (* declarations *)
 
 fun declaration pervasive (txt, pos) =
-  txt |> ML_Context.expression pos
+  ML_Lex.read pos txt
+  |> ML_Context.expression pos
     "val declaration: Morphism.declaration"
     ("Context.map_proof (Local_Theory.declaration " ^ Bool.toString pervasive ^ " declaration)")
   |> Context.proof_map;
@@ -188,12 +197,13 @@
 
 (* simprocs *)
 
-fun simproc_setup name lhss (proc, pos) identifier =
-  ML_Context.expression pos
+fun simproc_setup name lhss (txt, pos) identifier =
+  ML_Lex.read pos txt
+  |> ML_Context.expression pos
     "val proc: Morphism.morphism -> Simplifier.simpset -> cterm -> thm option"
-  ("Context.map_proof (Simplifier.def_simproc {name = " ^ ML_Syntax.print_string name ^ ", \
-    \lhss = " ^ ML_Syntax.print_strings lhss ^ ", proc = proc, \
-    \identifier = Library.maps ML_Context.thms " ^ ML_Syntax.print_strings identifier ^ "})") proc
+    ("Context.map_proof (Simplifier.def_simproc {name = " ^ ML_Syntax.print_string name ^ ", \
+      \lhss = " ^ ML_Syntax.print_strings lhss ^ ", proc = proc, \
+      \identifier = Library.maps ML_Context.thms " ^ ML_Syntax.print_strings identifier ^ "})")
   |> Context.proof_map;
 
 
@@ -290,7 +300,7 @@
 (* diagnostic ML evaluation *)
 
 fun ml_diag verbose (txt, pos) = Toplevel.keep (fn state =>
-  (ML_Context.eval_in
+  (ML_Context.eval_text_in
     (Option.map Context.proof_of (try Toplevel.generic_theory_of state)) verbose pos txt));
 
 
--- a/src/Pure/Isar/isar_syn.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/Isar/isar_syn.ML	Mon May 31 10:29:04 2010 +0200
@@ -321,13 +321,13 @@
     (Keyword.tag_ml Keyword.thy_decl)
     (Parse.ML_source >> (fn (txt, pos) =>
       Toplevel.generic_theory
-        (ML_Context.exec (fn () => ML_Context.eval true pos txt) #> propagate_env)));
+        (ML_Context.exec (fn () => ML_Context.eval_text true pos txt) #> propagate_env)));
 
 val _ =
   Outer_Syntax.command "ML_prf" "ML text within proof" (Keyword.tag_proof Keyword.prf_decl)
     (Parse.ML_source >> (fn (txt, pos) =>
       Toplevel.proof (Proof.map_context (Context.proof_map
-        (ML_Context.exec (fn () => ML_Context.eval true pos txt))) #> propagate_env_prf)));
+        (ML_Context.exec (fn () => ML_Context.eval_text true pos txt))) #> propagate_env_prf)));
 
 val _ =
   Outer_Syntax.command "ML_val" "diagnostic ML text" (Keyword.tag_ml Keyword.diag)
--- a/src/Pure/Isar/method.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/Isar/method.ML	Mon May 31 10:29:04 2010 +0200
@@ -286,7 +286,7 @@
     val ctxt' = ctxt |> Context.proof_map
       (ML_Context.expression pos
         "fun tactic (facts: thm list) : tactic"
-        "Context.map_proof (Method.set_tactic tactic)" txt);
+        "Context.map_proof (Method.set_tactic tactic)" (ML_Lex.read pos txt));
   in Context.setmp_thread_data (SOME (Context.Proof ctxt)) (TacticData.get ctxt') end;
 
 fun tactic txt ctxt = METHOD (ml_tactic txt ctxt);
@@ -376,7 +376,9 @@
   Context.theory_map (ML_Context.expression pos
     "val (name, scan, comment): binding * (Proof.context -> Proof.method) context_parser * string"
     "Context.map_theory (Method.setup name scan comment)"
-    ("(" ^ ML_Syntax.make_binding name ^ ", " ^ txt ^ ", " ^ ML_Syntax.print_string cmt ^ ")"));
+    (ML_Lex.read Position.none ("(" ^ ML_Syntax.make_binding name ^ ", ") @
+      ML_Lex.read pos txt @
+      ML_Lex.read Position.none (", " ^ ML_Syntax.print_string cmt ^ ")")));
 
 
 
@@ -463,9 +465,9 @@
   setup (Binding.name "rotate_tac") (Args.goal_spec -- Scan.lift (Scan.optional Parse.int 1) >>
     (fn (quant, i) => K (SIMPLE_METHOD'' quant (Tactic.rotate_tac i))))
       "rotate assumptions of goal" #>
-  setup (Binding.name "tactic") (Scan.lift (Parse.position Args.name) >> tactic)
+  setup (Binding.name "tactic") (Scan.lift Args.name_source_position >> tactic)
     "ML tactic as proof method" #>
-  setup (Binding.name "raw_tactic") (Scan.lift (Parse.position Args.name) >> raw_tactic)
+  setup (Binding.name "raw_tactic") (Scan.lift Args.name_source_position >> raw_tactic)
     "ML tactic as raw proof method"));
 
 
--- a/src/Pure/Isar/theory_target.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/Isar/theory_target.ML	Mon May 31 10:29:04 2010 +0200
@@ -50,7 +50,9 @@
 fun pretty_thy ctxt target is_class =
   let
     val thy = ProofContext.theory_of ctxt;
-    val target_name = (if is_class then "class " else "locale ") ^ Locale.extern thy target;
+    val target_name =
+      [Pretty.command (if is_class then "class" else "locale"),
+        Pretty.str (" " ^ Locale.extern thy target)];
     val fixes =
       map (fn (x, T) => (Binding.name x, SOME T, NoSyn)) (#1 (ProofContext.inferred_fixes ctxt));
     val assumes =
@@ -60,13 +62,13 @@
       (if null assumes then [] else [Element.Assumes assumes]);
   in
     if target = "" then []
-    else if null elems then [Pretty.str target_name]
-    else [Pretty.big_list (target_name ^ " =")
-      (map (Pretty.chunks o Element.pretty_ctxt ctxt) elems)]
+    else if null elems then [Pretty.block target_name]
+    else [Pretty.block (Pretty.fbreaks (Pretty.block (target_name @ [Pretty.str " ="]) ::
+      map (Pretty.chunks o Element.pretty_ctxt ctxt) elems))]
   end;
 
 fun pretty (Target {target, is_class, instantiation, overloading, ...}) ctxt =
-  Pretty.block [Pretty.str "theory", Pretty.brk 1,
+  Pretty.block [Pretty.command "theory", Pretty.brk 1,
       Pretty.str (Context.theory_name (ProofContext.theory_of ctxt))] ::
     (if not (null overloading) then [Overloading.pretty ctxt]
      else if not (null (#1 instantiation)) then [Class_Target.pretty_instantiation ctxt]
--- a/src/Pure/ML/ml_context.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/ML/ml_context.ML	Mon May 31 10:29:04 2010 +0200
@@ -27,12 +27,16 @@
   val trace: bool Unsynchronized.ref
   val eval_antiquotes: ML_Lex.token Antiquote.antiquote list * Position.T ->
     Context.generic option -> (ML_Lex.token list * ML_Lex.token list) * Context.generic option
-  val eval: bool -> Position.T -> Symbol_Pos.text -> unit
+  val eval: bool -> Position.T -> ML_Lex.token Antiquote.antiquote list -> unit
+  val eval_text: bool -> Position.T -> Symbol_Pos.text -> unit
   val eval_file: Path.T -> unit
-  val eval_in: Proof.context option -> bool -> Position.T -> Symbol_Pos.text -> unit
-  val evaluate: Proof.context -> bool ->
-    string * (unit -> 'a) option Unsynchronized.ref -> string -> 'a
-  val expression: Position.T -> string -> string -> string -> Context.generic -> Context.generic
+  val eval_in: Proof.context option -> bool -> Position.T ->
+    ML_Lex.token Antiquote.antiquote list -> unit
+  val eval_text_in: Proof.context option -> bool -> Position.T -> Symbol_Pos.text -> unit
+  val expression: Position.T -> string -> string -> ML_Lex.token Antiquote.antiquote list ->
+    Context.generic -> Context.generic
+  val evaluate:
+    Proof.context -> bool -> string * (unit -> 'a) option Unsynchronized.ref -> string -> 'a
 end
 
 structure ML_Context: ML_CONTEXT =
@@ -159,11 +163,9 @@
 
 val trace = Unsynchronized.ref false;
 
-fun eval verbose pos txt =
+fun eval verbose pos ants =
   let
     (*prepare source text*)
-    val _ = Position.report Markup.ML_source pos;
-    val ants = ML_Lex.read_antiq (Symbol_Pos.explode (txt, pos), pos);
     val ((env, body), env_ctxt) = eval_antiquotes (ants, pos) (Context.thread_data ());
     val _ =
       if ! trace then tracing (cat_lines [ML_Lex.flatten env, ML_Lex.flatten body])
@@ -183,24 +185,32 @@
 
 (* derived versions *)
 
-fun eval_file path = eval true (Path.position path) (File.read path);
+fun eval_text verbose pos txt = eval verbose pos (ML_Lex.read pos txt);
+fun eval_file path = eval_text true (Path.position path) (File.read path);
+
+fun eval_in ctxt verbose pos ants =
+  Context.setmp_thread_data (Option.map Context.Proof ctxt) (fn () => eval verbose pos ants) ();
 
-fun eval_in ctxt verbose pos txt =
-  Context.setmp_thread_data (Option.map Context.Proof ctxt) (fn () => eval verbose pos txt) ();
+fun eval_text_in ctxt verbose pos txt =
+  Context.setmp_thread_data (Option.map Context.Proof ctxt) (fn () => eval_text verbose pos txt) ();
+
+fun expression pos bind body ants =
+  exec (fn () => eval false pos
+    (ML_Lex.read Position.none ("Context.set_thread_data (SOME (let " ^ bind ^ " = ") @ ants @
+      ML_Lex.read Position.none (" in " ^ body ^ " end (ML_Context.the_generic_context ())));")));
+
 
 (* FIXME not thread-safe -- reference can be accessed directly *)
 fun evaluate ctxt verbose (ref_name, r) txt = NAMED_CRITICAL "ML" (fn () =>
   let
+    val ants =
+      ML_Lex.read Position.none ("val _ = (" ^ ref_name ^ " := SOME (fn () => " ^ txt ^ "))");
     val _ = r := NONE;
-    val _ = Context.setmp_thread_data (SOME (Context.Proof ctxt)) (fn () =>
-      eval verbose Position.none ("val _ = (" ^ ref_name ^ " := SOME (fn () => " ^ txt ^ "))")) ();
+    val _ =
+      Context.setmp_thread_data (SOME (Context.Proof ctxt))
+        (fn () => eval verbose Position.none ants) ();
   in (case ! r of NONE => error ("Bad evaluation for " ^ ref_name) | SOME e => e) end) ();
 
-fun expression pos bind body txt =
-  exec (fn () => eval false pos
-    ("Context.set_thread_data (SOME (let " ^ bind ^ " = " ^ txt ^ " in " ^ body ^
-      " end (ML_Context.the_generic_context ())));"));
-
 end;
 
 structure Basic_ML_Context: BASIC_ML_CONTEXT = ML_Context;
--- a/src/Pure/ML/ml_lex.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/ML/ml_lex.ML	Mon May 31 10:29:04 2010 +0200
@@ -26,7 +26,7 @@
     (token, (Symbol_Pos.T, Position.T * (Symbol.symbol, 'a) Source.source)
       Source.source) Source.source
   val tokenize: string -> token list
-  val read_antiq: Symbol_Pos.T list * Position.T -> token Antiquote.antiquote list
+  val read: Position.T -> Symbol_Pos.text -> token Antiquote.antiquote list
 end;
 
 structure ML_Lex: ML_LEX =
@@ -88,6 +88,8 @@
 
 (* markup *)
 
+local
+
 val token_kind_markup =
  fn Keyword   => Markup.ML_keyword
   | Ident     => Markup.ML_ident
@@ -103,8 +105,16 @@
   | Error _   => Markup.ML_malformed
   | EOF       => Markup.none;
 
-fun report_token (Token ((pos, _), (kind, _))) =
-  Position.report (token_kind_markup kind) pos;
+fun token_markup kind x =
+  if kind = Keyword andalso exists_string (not o Symbol.is_ascii_letter) x
+  then Markup.ML_delimiter
+  else token_kind_markup kind;
+
+in
+
+fun report_token (Token ((pos, _), (kind, x))) = Position.report (token_markup kind x) pos;
+
+end;
 
 
 
@@ -253,16 +263,21 @@
   source #>
   Source.exhaust;
 
-fun read_antiq (syms, pos) =
-  (Source.of_list syms
-    |> Source.source Symbol_Pos.stopper (Scan.bulk (!!! "bad input" scan_antiq))
-      (SOME (false, fn msg => recover msg >> map Antiquote.Text))
-    |> Source.exhaust
-    |> tap (List.app (Antiquote.report report_token))
-    |> tap Antiquote.check_nesting
-    |> tap (List.app (fn Antiquote.Text tok => ignore (check_content_of tok) | _ => ())))
-  handle ERROR msg =>
-    cat_error msg ("The error(s) above occurred in ML source" ^ Position.str_of pos);
+fun read pos txt =
+  let
+    val _ = Position.report Markup.ML_source pos;
+    val syms = Symbol_Pos.explode (txt, pos);
+  in
+    (Source.of_list syms
+      |> Source.source Symbol_Pos.stopper (Scan.bulk (!!! "bad input" scan_antiq))
+        (SOME (false, fn msg => recover msg >> map Antiquote.Text))
+      |> Source.exhaust
+      |> tap (List.app (Antiquote.report report_token))
+      |> tap Antiquote.check_nesting
+      |> tap (List.app (fn Antiquote.Text tok => ignore (check_content_of tok) | _ => ())))
+    handle ERROR msg =>
+      cat_error msg ("The error(s) above occurred in ML source" ^ Position.str_of pos)
+  end;
 
 end;
 
--- a/src/Pure/PIDE/command.scala	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/PIDE/command.scala	Mon May 31 10:29:04 2010 +0200
@@ -26,7 +26,9 @@
     val UNDEFINED = Value("UNDEFINED")
   }
 
-  case class HighlightInfo(highlight: String) { override def toString = highlight }
+  case class HighlightInfo(kind: String, sub_kind: Option[String]) {
+    override def toString = kind
+  }
   case class TypeInfo(ty: String)
   case class RefInfo(file: Option[String], line: Option[Int],
     command_id: Option[String], offset: Option[Int])
--- a/src/Pure/PIDE/state.scala	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/PIDE/state.scala	Mon May 31 10:29:04 2010 +0200
@@ -41,7 +41,7 @@
   lazy val highlight: Markup_Text =
   {
     markup_root.filter(_.info match {
-      case Command.HighlightInfo(_) => true
+      case Command.HighlightInfo(_, _) => true
       case _ => false
     })
   }
@@ -56,7 +56,7 @@
     types.find(t => t.start <= pos && pos < t.stop) match {
       case Some(t) =>
         t.info match {
-          case Command.TypeInfo(ty) => Some(command.source(t.start, t.stop) + ": " + ty)
+          case Command.TypeInfo(ty) => Some(command.source(t.start, t.stop) + " : " + ty)
           case _ => None
         }
       case None => None
@@ -107,7 +107,8 @@
                   }
                   else {
                     state.add_markup(
-                      command.markup_node(begin - 1, end - 1, Command.HighlightInfo(kind)))
+                      command.markup_node(begin - 1, end - 1,
+                        Command.HighlightInfo(kind, Markup.get_string(Markup.KIND, atts))))
                   }
                 case _ => state
               }
--- a/src/Pure/System/isabelle_process.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/System/isabelle_process.ML	Mon May 31 10:29:04 2010 +0200
@@ -93,7 +93,6 @@
   setup_channels out |> init_message;
   Keyword.report ();
   Output.status (Markup.markup Markup.ready "");
-  Goal.parallel_proofs := 3;
   Isar.toplevel_loop {init = true, welcome = false, sync = true, secure = true});
 
 end;
--- a/src/Pure/Thy/html.scala	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/Thy/html.scala	Mon May 31 10:29:04 2010 +0200
@@ -11,6 +11,24 @@
 
 object HTML
 {
+  // encode text
+
+  def encode(text: String): String =
+  {
+    val s = new StringBuilder
+    for (c <- text.iterator) c match {
+      case '<' => s ++= "&lt;"
+      case '>' => s ++= "&gt;"
+      case '&' => s ++= "&amp;"
+      case '"' => s ++= "&quot;"
+      case '\'' => s ++= "&#39;"
+      case '\n' => s ++= "<br/>"
+      case _ => s += c
+    }
+    s.toString
+  }
+
+
   // common elements and attributes
 
   val BODY = "body"
--- a/src/Pure/Thy/thy_output.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/Thy/thy_output.ML	Mon May 31 10:29:04 2010 +0200
@@ -587,7 +587,7 @@
 
 fun ml_text name ml = antiquotation name (Scan.lift Args.name_source_position)
   (fn {context, ...} => fn (txt, pos) =>
-   (ML_Context.eval_in (SOME context) false pos (ml txt);
+   (ML_Context.eval_in (SOME context) false pos (ml pos txt);
     Symbol_Pos.content (Symbol_Pos.explode (txt, pos))
     |> (if ! quotes then quote else I)
     |> (if ! display then enclose "\\begin{verbatim}\n" "\n\\end{verbatim}"
@@ -596,13 +596,21 @@
       #> map (space_implode "\\verb,|," o map (enclose "\\verb|" "|") o space_explode "|")
       #> space_implode "\\isasep\\isanewline%\n")));
 
+fun ml_enclose bg en pos txt =
+  ML_Lex.read Position.none bg @ ML_Lex.read pos txt @ ML_Lex.read Position.none en;
+
 in
 
-val _ = ml_text "ML" (fn txt => "fn _ => (" ^ txt ^ ");");
-val _ = ml_text "ML_type" (fn txt => "val _ = NONE : (" ^ txt ^ ") option;");
-val _ = ml_text "ML_struct" (fn txt => "functor XXX() = struct structure XX = " ^ txt ^ " end;");
-val _ = ml_text "ML_functor" (fn txt => "ML_Env.check_functor " ^ ML_Syntax.print_string txt);
-val _ = ml_text "ML_text" (K "");
+val _ = ml_text "ML" (ml_enclose "fn _ => (" ");");
+val _ = ml_text "ML_type" (ml_enclose "val _ = NONE : (" ") option;");
+val _ = ml_text "ML_struct" (ml_enclose "functor XXX() = struct structure XX = " " end;");
+
+val _ = ml_text "ML_functor"   (* FIXME formal treatment of functor name (!?) *)
+  (fn pos => fn txt =>
+    ML_Lex.read Position.none ("ML_Env.check_functor " ^
+      ML_Syntax.print_string (Symbol_Pos.content (Symbol_Pos.explode (txt, pos)))));
+
+val _ = ml_text "ML_text" (K (K []));
 
 end;
 
--- a/src/Pure/Thy/thy_syntax.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/Thy/thy_syntax.ML	Mon May 31 10:29:04 2010 +0200
@@ -47,8 +47,8 @@
 local
 
 val token_kind_markup =
- fn Token.Command       => (Markup.commandN, [])
-  | Token.Keyword       => (Markup.keywordN, [])
+ fn Token.Command       => Markup.command
+  | Token.Keyword       => Markup.keyword
   | Token.Ident         => Markup.ident
   | Token.LongIdent     => Markup.ident
   | Token.SymIdent      => Markup.ident
@@ -67,13 +67,26 @@
   | Token.Sync          => Markup.control
   | Token.EOF           => Markup.control;
 
+fun token_markup tok =
+  if Token.keyword_with (not o Syntax.is_identifier) tok then Markup.operator
+  else
+    let
+      val kind = Token.kind_of tok;
+      val props =
+        if kind = Token.Command then
+          (case Keyword.command_keyword (Token.content_of tok) of
+            SOME k => Markup.properties [(Markup.kindN, Keyword.kind_of k)]
+          | NONE => I)
+        else I;
+    in props (token_kind_markup kind) end;
+
 in
 
 fun present_token tok =
-  Markup.enclose (token_kind_markup (Token.kind_of tok)) (Output.output (Token.unparse tok));
+  Markup.enclose (token_markup tok) (Output.output (Token.unparse tok));
 
 fun report_token tok =
-  Position.report (token_kind_markup (Token.kind_of tok)) (Token.position_of tok);
+  Position.report (token_markup tok) (Token.position_of tok);
 
 end;
 
--- a/src/Pure/codegen.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/Pure/codegen.ML	Mon May 31 10:29:04 2010 +0200
@@ -894,7 +894,7 @@
                   [str "]"])]]),
           str ");"]) ^
       "\n\nend;\n";
-    val _ = ML_Context.eval_in (SOME ctxt) false Position.none s;
+    val _ = ML_Context.eval_text_in (SOME ctxt) false Position.none s;
     val dummy_report = ([], false)
   in (fn size => (! test_fn size, dummy_report)) end;
 
@@ -926,7 +926,7 @@
               str ");"]) ^
           "\n\nend;\n";
         val _ = NAMED_CRITICAL "codegen" (fn () =>   (* FIXME ??? *)
-          ML_Context.eval_in (SOME ctxt) false Position.none s);
+          ML_Context.eval_text_in (SOME ctxt) false Position.none s);
       in !eval_result end;
   in e () end;
 
@@ -1001,7 +1001,7 @@
       val (code, gr) = setmp_CRITICAL mode mode'' (generate_code thy modules module) xs;
       val thy' = thy |> Context.theory_map (ML_Context.exec (fn () =>
         (case opt_fname of
-          NONE => ML_Context.eval false Position.none (cat_lines (map snd code))
+          NONE => ML_Context.eval_text false Position.none (cat_lines (map snd code))
         | SOME fname =>
             if lib then app (fn (name, s) => File.write
                 (Path.append (Path.explode fname) (Path.basic (name ^ ".ML"))) s)
--- a/src/Tools/Code/code_eval.ML	Sun May 30 21:29:37 2010 +0200
+++ b/src/Tools/Code/code_eval.ML	Mon May 31 10:29:04 2010 +0200
@@ -165,7 +165,8 @@
       in
         thy
         |> Code_Target.add_reserved target module_name
-        |> Context.theory_map (ML_Context.exec (fn () => ML_Context.eval true Position.none code_body))
+        |> Context.theory_map
+          (ML_Context.exec (fn () => ML_Context.eval_text true Position.none code_body))
         |> fold (add_eval_tyco o apsnd pr) tyco_map
         |> fold (add_eval_constr o apsnd pr) constr_map
         |> fold (add_eval_const o apsnd pr) const_map
--- a/src/Tools/jEdit/dist-template/etc/isabelle-jedit.css	Sun May 30 21:29:37 2010 +0200
+++ b/src/Tools/jEdit/dist-template/etc/isabelle-jedit.css	Mon May 31 10:29:04 2010 +0200
@@ -10,3 +10,7 @@
 
 .hilite { background-color: #FFFACD; }
 
+.keyword { font-weight: bold; color: #009966; }
+.operator { font-weight: bold; }
+.command { font-weight: bold; color: #006699; }
+
--- a/src/Tools/jEdit/dist-template/properties/jedit.props	Sun May 30 21:29:37 2010 +0200
+++ b/src/Tools/jEdit/dist-template/properties/jedit.props	Mon May 31 10:29:04 2010 +0200
@@ -174,11 +174,17 @@
 encoding.opt-out.x-windows-950=true
 encoding.opt-out.x-windows-iso2022jp=true
 encodingDetectors=BOM XML-PI buffer-local-property
+end.shortcut=
 fallbackEncodings=UTF-8 ISO-8859-15 US-ASCII
 firstTime=false
+home.shortcut=
+insert-newline-indent.shortcut=
+insert-newline.shortcut=ENTER
 isabelle-output.dock-position=bottom
 isabelle-protocol.dock-position=bottom
 isabelle.activate.shortcut=CS+ENTER
+line-end.shortcut=END
+line-home.shortcut=HOME
 mode.isabelle.sidekick.showStatusWindow.label=true
 print.font=IsabelleText
 sidekick-tree.dock-position=right
--- a/src/Tools/jEdit/plugin/Isabelle.props	Sun May 30 21:29:37 2010 +0200
+++ b/src/Tools/jEdit/plugin/Isabelle.props	Mon May 31 10:29:04 2010 +0200
@@ -27,6 +27,8 @@
 options.isabelle.logic.title=Logic
 options.isabelle.relative-font-size.title=Relative Font Size
 options.isabelle.relative-font-size=100
+options.isabelle.tooltip-font-size.title=Tooltip Font Size
+options.isabelle.tooltip-font-size=10
 options.isabelle.startup-timeout=10000
 
 #menu actions
--- a/src/Tools/jEdit/src/jedit/document_view.scala	Sun May 30 21:29:37 2010 +0200
+++ b/src/Tools/jEdit/src/jedit/document_view.scala	Mon May 31 10:29:04 2010 +0200
@@ -131,7 +131,10 @@
       val offset = model.from_current(document, text_area.xyToOffset(x, y))
       document.command_at(offset) match {
         case Some((command, command_start)) =>
-          document.current_state(command).type_at(offset - command_start) getOrElse null
+          document.current_state(command).type_at(offset - command_start) match {
+            case Some(text) => Isabelle.tooltip(text)
+            case None => null
+          }
         case None => null
       }
     }
--- a/src/Tools/jEdit/src/jedit/html_panel.scala	Sun May 30 21:29:37 2010 +0200
+++ b/src/Tools/jEdit/src/jedit/html_panel.scala	Mon May 31 10:29:04 2010 +0200
@@ -107,7 +107,7 @@
 
   private def template(font_family: String, font_size: Int): String =
     template_head +
-    "body { font-family: " + font_family + "; font-size: " + raw_px(font_size) + "px }" +
+    "body { font-family: " + font_family + "; font-size: " + raw_px(font_size) + "px; }" +
     template_tail
 
 
--- a/src/Tools/jEdit/src/jedit/isabelle_options.scala	Sun May 30 21:29:37 2010 +0200
+++ b/src/Tools/jEdit/src/jedit/isabelle_options.scala	Mon May 31 10:29:04 2010 +0200
@@ -16,6 +16,7 @@
 {
   private val logic_name = new JComboBox()
   private val relative_font_size = new JSpinner()
+  private val tooltip_font_size = new JSpinner()
 
   private class List_Item(val name: String, val descr: String) {
     def this(name: String) = this(name, name)
@@ -37,9 +38,14 @@
     })
 
     addComponent(Isabelle.Property("relative-font-size.title"), {
-      relative_font_size.setValue(Isabelle.Int_Property("relative-font-size"))
+      relative_font_size.setValue(Isabelle.Int_Property("relative-font-size", 100))
       relative_font_size
     })
+
+    addComponent(Isabelle.Property("tooltip-font-size.title"), {
+      tooltip_font_size.setValue(Isabelle.Int_Property("tooltip-font-size", 10))
+      tooltip_font_size
+    })
   }
 
   override def _save()
@@ -49,5 +55,8 @@
 
     Isabelle.Int_Property("relative-font-size") =
       relative_font_size.getValue().asInstanceOf[Int]
+
+    Isabelle.Int_Property("tooltip-font-size") =
+      tooltip_font_size.getValue().asInstanceOf[Int]
   }
 }
--- a/src/Tools/jEdit/src/jedit/isabelle_token_marker.scala	Sun May 30 21:29:37 2010 +0200
+++ b/src/Tools/jEdit/src/jedit/isabelle_token_marker.scala	Mon May 31 10:29:04 2010 +0200
@@ -8,13 +8,11 @@
 package isabelle.jedit
 
 
-import isabelle.Markup
+import isabelle._
 
 import org.gjt.sp.jedit.buffer.JEditBuffer
-import org.gjt.sp.jedit.syntax.{Token, TokenMarker, TokenHandler, SyntaxStyle, ParserRuleSet}
+import org.gjt.sp.jedit.syntax.{Token, TokenMarker, TokenHandler, ParserRuleSet}
 
-import java.awt.Color
-import java.awt.Font
 import javax.swing.text.Segment;
 
 
@@ -30,60 +28,74 @@
   /* mapping to jEdit token types */
   // TODO: as properties or CSS style sheet
 
-  private val choose_byte: Map[String, Byte] =
+  private val command_style: Map[String, Byte] =
+  {
+    import Token._
+    Map[String, Byte](
+      Keyword.THY_END -> KEYWORD2,
+      Keyword.THY_SCRIPT -> LABEL,
+      Keyword.PRF_SCRIPT -> LABEL,
+      Keyword.PRF_ASM -> KEYWORD3,
+      Keyword.PRF_ASM_GOAL -> KEYWORD3
+    ).withDefaultValue(KEYWORD1)
+  }
+
+  private val token_style: Map[String, Byte] =
   {
     import Token._
     Map[String, Byte](
       // logical entities
-      Markup.TCLASS -> LABEL,
-      Markup.TYCON -> LABEL,
-      Markup.FIXED_DECL -> LABEL,
-      Markup.FIXED -> LABEL,
-      Markup.CONST_DECL -> LABEL,
-      Markup.CONST -> LABEL,
-      Markup.FACT_DECL -> LABEL,
-      Markup.FACT -> LABEL,
+      Markup.TCLASS -> NULL,
+      Markup.TYCON -> NULL,
+      Markup.FIXED_DECL -> FUNCTION,
+      Markup.FIXED -> NULL,
+      Markup.CONST_DECL -> FUNCTION,
+      Markup.CONST -> NULL,
+      Markup.FACT_DECL -> FUNCTION,
+      Markup.FACT -> NULL,
       Markup.DYNAMIC_FACT -> LABEL,
-      Markup.LOCAL_FACT_DECL -> LABEL,
-      Markup.LOCAL_FACT -> LABEL,
+      Markup.LOCAL_FACT_DECL -> FUNCTION,
+      Markup.LOCAL_FACT -> NULL,
       // inner syntax
-      Markup.TFREE -> LITERAL2,
-      Markup.FREE -> LITERAL2,
-      Markup.TVAR -> LITERAL3,
-      Markup.SKOLEM -> LITERAL3,
-      Markup.BOUND -> LITERAL3,
-      Markup.VAR -> LITERAL3,
+      Markup.TFREE -> NULL,
+      Markup.FREE -> NULL,
+      Markup.TVAR -> NULL,
+      Markup.SKOLEM -> NULL,
+      Markup.BOUND -> NULL,
+      Markup.VAR -> NULL,
       Markup.NUM -> DIGIT,
       Markup.FLOAT -> DIGIT,
       Markup.XNUM -> DIGIT,
       Markup.XSTR -> LITERAL4,
-      Markup.LITERAL -> LITERAL4,
+      Markup.LITERAL -> OPERATOR,
       Markup.INNER_COMMENT -> COMMENT1,
-      Markup.SORT -> FUNCTION,
-      Markup.TYP -> FUNCTION,
-      Markup.TERM -> FUNCTION,
-      Markup.PROP -> FUNCTION,
-      Markup.ATTRIBUTE -> FUNCTION,
-      Markup.METHOD -> FUNCTION,
+      Markup.SORT -> NULL,
+      Markup.TYP -> NULL,
+      Markup.TERM -> NULL,
+      Markup.PROP -> NULL,
+      Markup.ATTRIBUTE -> NULL,
+      Markup.METHOD -> NULL,
       // ML syntax
-      Markup.ML_KEYWORD -> KEYWORD2,
+      Markup.ML_KEYWORD -> KEYWORD1,
+      Markup.ML_DELIMITER -> OPERATOR,
       Markup.ML_IDENT -> NULL,
-      Markup.ML_TVAR -> LITERAL3,
+      Markup.ML_TVAR -> NULL,
       Markup.ML_NUMERAL -> DIGIT,
       Markup.ML_CHAR -> LITERAL1,
       Markup.ML_STRING -> LITERAL1,
       Markup.ML_COMMENT -> COMMENT1,
       Markup.ML_MALFORMED -> INVALID,
       // embedded source text
-      Markup.ML_SOURCE -> COMMENT4,
-      Markup.DOC_SOURCE -> COMMENT4,
+      Markup.ML_SOURCE -> COMMENT3,
+      Markup.DOC_SOURCE -> COMMENT3,
       Markup.ANTIQ -> COMMENT4,
       Markup.ML_ANTIQ -> COMMENT4,
       Markup.DOC_ANTIQ -> COMMENT4,
       // outer syntax
+      Markup.KEYWORD -> KEYWORD2,
+      Markup.OPERATOR -> OPERATOR,
+      Markup.COMMAND -> KEYWORD1,
       Markup.IDENT -> NULL,
-      Markup.COMMAND -> OPERATOR,
-      Markup.KEYWORD -> KEYWORD4,
       Markup.VERBATIM -> COMMENT3,
       Markup.COMMENT -> COMMENT1,
       Markup.CONTROL -> COMMENT3,
@@ -92,9 +104,6 @@
       Markup.ALTSTRING -> LITERAL1
     ).withDefaultValue(NULL)
   }
-
-  def choose_color(kind: String, styles: Array[SyntaxStyle]): Color =
-    styles(choose_byte(kind)).getForegroundColor
 }
 
 
@@ -121,20 +130,27 @@
       val abs_stop = to(command_start + markup.stop)
       if (abs_stop > start)
       if (abs_start < stop)
-      val byte = Isabelle_Token_Marker.choose_byte(markup.info.toString)
       val token_start = (abs_start - start) max 0
       val token_length =
         (abs_stop - abs_start) -
         ((start - abs_start) max 0) -
         ((abs_stop - stop) max 0)
-      }
-      {
-        if (start + token_start > next_x)
-          handler.handleToken(line_segment, 1,
-            next_x - start, start + token_start - next_x, context)
-        handler.handleToken(line_segment, byte, token_start, token_length, context)
-        next_x = start + token_start + token_length
-      }
+    }
+    {
+      val byte =
+        markup.info match {
+          case Command.HighlightInfo(Markup.COMMAND, Some(kind)) =>
+            Isabelle_Token_Marker.command_style(kind)
+          case Command.HighlightInfo(kind, _) =>
+            Isabelle_Token_Marker.token_style(kind)
+          case _ => Token.NULL
+        }
+      if (start + token_start > next_x)
+        handler.handleToken(line_segment, 1,
+          next_x - start, start + token_start - next_x, context)
+      handler.handleToken(line_segment, byte, token_start, token_length, context)
+      next_x = start + token_start + token_length
+    }
     if (next_x < stop)
       handler.handleToken(line_segment, 1, next_x - start, stop - next_x, context)
 
--- a/src/Tools/jEdit/src/jedit/plugin.scala	Sun May 30 21:29:37 2010 +0200
+++ b/src/Tools/jEdit/src/jedit/plugin.scala	Mon May 31 10:29:04 2010 +0200
@@ -81,6 +81,14 @@
       Int_Property("relative-font-size", 100)).toFloat / 100
 
 
+  /* tooltip markup */
+
+  def tooltip(text: String): String =
+    "<html><pre style=\"font-family: " + font_family() + "; font-size: " +
+        Int_Property("tooltip-font-size", 10).toString + "px; \">" +  // FIXME proper scaling (!?)
+      HTML.encode(text) + "</pre></html>"
+
+
   /* settings */
 
   def default_logic(): String =