clarified init_assignable: make double-sure that initial values are reset;
more systematic reports for Args.syntax: indicate Args.$$$ quasi-keywords and suppress confusing completion of single symbols like ":", "|", "?";
--- a/src/Pure/General/completion.scala Wed Mar 05 09:59:48 2014 +0100
+++ b/src/Pure/General/completion.scala Wed Mar 05 13:11:08 2014 +0100
@@ -27,6 +27,11 @@
immediate: Boolean)
{ override def toString: String = description }
+ object Result
+ {
+ def empty(range: Text.Range): Result = Result(range, "", false, Nil)
+ }
+
sealed case class Result(
range: Text.Range,
original: String,
@@ -136,6 +141,8 @@
Some(Names(info.range, total, names))
}
catch { case _: XML.Error => None }
+ case XML.Elem(Markup(Markup.NO_COMPLETION, _), _) =>
+ Some(Names(info.range, 0, Nil))
case _ => None
}
}
@@ -143,6 +150,8 @@
sealed case class Names(range: Text.Range, total: Int, names: List[(String, String)])
{
+ def no_completion: Boolean = total == 0 && names.isEmpty
+
def complete(
history: Completion.History,
decode: Boolean,
--- a/src/Pure/Isar/args.ML Wed Mar 05 09:59:48 2014 +0100
+++ b/src/Pure/Isar/args.ML Wed Mar 05 13:11:08 2014 +0100
@@ -13,7 +13,7 @@
val pretty_src: Proof.context -> src -> Pretty.T
val map_name: (string -> string) -> src -> src
val transform_values: morphism -> src -> src
- val assignable: src -> src
+ val init_assignable: src -> src
val closure: src -> src
val context: Proof.context context_parser
val theory: theory context_parser
@@ -85,7 +85,8 @@
val prt_thm = Pretty.backquote o Display.pretty_thm ctxt;
fun prt arg =
(case Token.get_value arg of
- SOME (Token.Text s) => Pretty.str (quote s)
+ SOME (v as Token.Literal s) => Pretty.marks_str (Token.value_markup false v, s)
+ | SOME (Token.Text s) => Pretty.str (quote s)
| SOME (Token.Typ T) => Syntax.pretty_typ ctxt T
| SOME (Token.Term t) => Syntax.pretty_term ctxt t
| SOME (Token.Fact ths) => Pretty.enclose "(" ")" (Pretty.breaks (map prt_thm ths))
@@ -100,14 +101,13 @@
(* values *)
fun transform_values phi = map_args (Token.map_value
- (fn Token.Text s => Token.Text s
- | Token.Typ T => Token.Typ (Morphism.typ phi T)
+ (fn Token.Typ T => Token.Typ (Morphism.typ phi T)
| Token.Term t => Token.Term (Morphism.term phi t)
| Token.Fact ths => Token.Fact (Morphism.fact phi ths)
| Token.Attribute att => Token.Attribute (Morphism.transform phi att)
- | tok as Token.Files _ => tok));
+ | tok => tok));
-val assignable = map_args Token.assignable;
+val init_assignable = map_args Token.init_assignable;
val closure = map_args Token.closure;
@@ -132,8 +132,10 @@
val alt_string = token Parse.alt_string;
val symbolic = token Parse.keyword_ident_or_symbolic;
-fun $$$ x = (ident >> Token.content_of || Parse.keyword)
- :|-- (fn y => if x = y then Scan.succeed x else Scan.fail);
+fun $$$ x =
+ (ident || token Parse.keyword) :|-- (fn tok =>
+ let val y = Token.content_of tok
+ in if x = y then (Token.assign (SOME (Token.Literal x)) tok; Scan.succeed x) else Scan.fail end);
val named = ident || string;
@@ -287,13 +289,25 @@
(** syntax wrapper **)
-fun syntax kind scan (Src ((s, args), pos)) st =
- (case Scan.error (Scan.finite' Token.stopper (Scan.option scan)) (st, args) of
- (SOME x, (st', [])) => (x, st')
- | (_, (_, args')) =>
- error ("Bad arguments for " ^ kind ^ " " ^ quote s ^ Position.here pos ^
- (if null args' then "" else ":\n " ^ space_implode " " (map Token.print args'))));
+fun syntax kind scan (Src ((s, args0), pos)) context =
+ let
+ val args1 = map Token.init_assignable args0;
+ fun reported_text () =
+ if Context_Position.is_visible_generic context then
+ maps (Token.reports_of_value o Token.closure) args1
+ |> map (fn (p, m) => Position.reported_text p m "")
+ |> implode
+ else "";
+ in
+ (case Scan.error (Scan.finite' Token.stopper (Scan.option scan)) (context, args1) of
+ (SOME x, (context', [])) => (Output.report (reported_text ()); (x, context'))
+ | (_, (_, args2)) =>
+ error ("Bad arguments for " ^ kind ^ " " ^ quote s ^ Position.here pos ^
+ (if null args2 then "" else ":\n " ^ space_implode " " (map Token.print args2)) ^
+ Markup.markup Markup.report (reported_text ())))
+ end;
-fun context_syntax kind scan src = apsnd Context.the_proof o syntax kind scan src o Context.Proof;
+fun context_syntax kind scan src =
+ apsnd Context.the_proof o syntax kind scan src o Context.Proof;
end;
--- a/src/Pure/Isar/attrib.ML Wed Mar 05 09:59:48 2014 +0100
+++ b/src/Pure/Isar/attrib.ML Wed Mar 05 13:11:08 2014 +0100
@@ -262,7 +262,7 @@
fun apply_att src (context, th) =
let
- val src1 = Args.assignable src;
+ val src1 = Args.init_assignable src;
val result = attribute_generic context src1 (context, th);
val src2 = Args.closure src1;
in (src2, result) end;
--- a/src/Pure/Isar/element.ML Wed Mar 05 09:59:48 2014 +0100
+++ b/src/Pure/Isar/element.ML Wed Mar 05 13:11:08 2014 +0100
@@ -512,7 +512,7 @@
fun activate_i elem ctxt =
let
val elem' =
- (case map_ctxt_attrib Args.assignable elem of
+ (case map_ctxt_attrib Args.init_assignable elem of
Defines defs =>
Defines (defs |> map (fn ((a, atts), (t, ps)) =>
((Thm.def_binding_optional (Binding.name (#1 (#1 (Local_Defs.cert_def ctxt t)))) a, atts),
--- a/src/Pure/Isar/token.ML Wed Mar 05 09:59:48 2014 +0100
+++ b/src/Pure/Isar/token.ML Wed Mar 05 13:11:08 2014 +0100
@@ -12,7 +12,7 @@
Error of string | Sync | EOF
type file = {src_path: Path.T, lines: string list, digest: SHA1.digest, pos: Position.T}
datatype value =
- Text of string | Typ of typ | Term of term | Fact of thm list |
+ Literal of string | Text of string | Typ of typ | Term of term | Fact of thm list |
Attribute of morphism -> attribute | Files of file Exn.result list
type T
val str_of_kind: kind -> string
@@ -43,6 +43,7 @@
val source_position_of: T -> Symbol_Pos.source
val content_of: T -> string
val markup: T -> Markup.T * string
+ val value_markup: bool -> value -> Markup.T list
val unparse: T -> string
val print: T -> string
val text_of: T -> string * string
@@ -50,12 +51,13 @@
val put_files: file Exn.result list -> T -> T
val get_value: T -> value option
val map_value: (value -> value) -> T -> T
+ val reports_of_value: T -> Position.report list
val mk_text: string -> T
val mk_typ: typ -> T
val mk_term: term -> T
val mk_fact: thm list -> T
val mk_attribute: (morphism -> attribute) -> T
- val assignable: T -> T
+ val init_assignable: T -> T
val assign: value option -> T -> unit
val closure: T -> T
val ident_or_symbolic: string -> bool
@@ -83,6 +85,7 @@
type file = {src_path: Path.T, lines: string list, digest: SHA1.digest, pos: Position.T};
datatype value =
+ Literal of string |
Text of string |
Typ of typ |
Term of term |
@@ -248,13 +251,24 @@
| Sync => (Markup.control, "")
| EOF => (Markup.control, "");
+fun keyword_markup x =
+ if Symbol.is_ascii_identifier x
+ then Markup.keyword2
+ else Markup.operator;
+
in
fun markup tok =
- if keyword_with (not o Symbol.is_ascii_identifier) tok
- then (Markup.operator, "")
+ if is_kind Keyword tok
+ then (keyword_markup (content_of tok), "")
else token_kind_markup (kind_of tok);
+fun value_markup known_keyword (Literal x) =
+ (if known_keyword then [] else [keyword_markup x]) @
+ (if Symbol.is_ascii_identifier x orelse length (Symbol.explode x) > 1 then []
+ else [Markup.no_completion])
+ | value_markup _ _ = [];
+
end;
@@ -309,6 +323,18 @@
fun map_value f (Token (x, y, Value (SOME v))) = Token (x, y, Value (SOME (f v)))
| map_value _ tok = tok;
+fun reports_of_value tok =
+ (case get_value tok of
+ NONE => []
+ | SOME v =>
+ let
+ val pos = pos_of tok;
+ val known_keyword = is_kind Keyword tok;
+ in
+ if Position.is_reported pos then map (pair pos) (value_markup known_keyword v)
+ else []
+ end);
+
(* make values *)
@@ -323,9 +349,10 @@
(* static binding *)
-(*1st stage: make empty slots assignable*)
-fun assignable (Token (x, y, Slot)) = Token (x, y, Assignable (Unsynchronized.ref NONE))
- | assignable tok = tok;
+(*1st stage: initialize assignable slots*)
+fun init_assignable (Token (x, y, Slot)) = Token (x, y, Assignable (Unsynchronized.ref NONE))
+ | init_assignable (tok as Token (_, _, Assignable r)) = (r := NONE; tok)
+ | init_assignable tok = tok;
(*2nd stage: assign values as side-effect of scanning*)
fun assign v (Token (_, _, Assignable r)) = r := v
--- a/src/Pure/ML/ml_thms.ML Wed Mar 05 09:59:48 2014 +0100
+++ b/src/Pure/ML/ml_thms.ML Wed Mar 05 13:11:08 2014 +0100
@@ -90,17 +90,13 @@
val _ = Theory.setup
(ML_Context.add_antiq @{binding lemma}
(Scan.depend (fn context =>
- Args.mode "open" -- Parse.enum1_positions "and" (Scan.repeat1 goal) --
- Parse.position by -- Method.parse -- Scan.option Method.parse
- >> (fn ((((is_open, (raw_propss, and_positions)), (_, by_pos)), m1), m2) =>
+ Args.mode "open" -- Parse.enum1 "and" (Scan.repeat1 goal) --
+ (by |-- Method.parse -- Scan.option Method.parse)
+ >> (fn ((is_open, raw_propss), (m1, m2)) =>
let
val ctxt = Context.proof_of context;
- val reports =
- (by_pos, Markup.keyword1) ::
- map (rpair Markup.keyword2) and_positions @
- maps Method.reports_of (m1 :: the_list m2);
- val _ = Context_Position.reports ctxt reports;
+ val _ = Context_Position.reports ctxt (maps Method.reports_of (m1 :: the_list m2));
val propss = burrow (map (rpair []) o Syntax.read_props ctxt) raw_propss;
val prep_result = Goal.norm_result ctxt #> not is_open ? Thm.close_derivation;
--- a/src/Pure/PIDE/markup.ML Wed Mar 05 09:59:48 2014 +0100
+++ b/src/Pure/PIDE/markup.ML Wed Mar 05 13:11:08 2014 +0100
@@ -42,6 +42,7 @@
val defN: string
val refN: string
val completionN: string val completion: T
+ val no_completionN: string val no_completion: T
val lineN: string
val offsetN: string
val end_offsetN: string
@@ -304,6 +305,7 @@
(* completion *)
val (completionN, completion) = markup_elem "completion";
+val (no_completionN, no_completion) = markup_elem "no_completion";
(* position *)
--- a/src/Pure/PIDE/markup.scala Wed Mar 05 09:59:48 2014 +0100
+++ b/src/Pure/PIDE/markup.scala Wed Mar 05 13:11:08 2014 +0100
@@ -70,6 +70,7 @@
/* completion */
val COMPLETION = "completion"
+ val NO_COMPLETION = "no_completion"
/* position */
--- a/src/Tools/jEdit/src/completion_popup.scala Wed Mar 05 09:59:48 2014 +0100
+++ b/src/Tools/jEdit/src/completion_popup.scala Wed Mar 05 13:11:08 2014 +0100
@@ -114,7 +114,9 @@
before_caret_range(rendering).try_restrict(line_range) match {
case Some(range) if !range.is_singularity =>
rendering.completion_names(range) match {
- case Some(names) => Some(names.range)
+ case Some(names) =>
+ if (names.no_completion) None
+ else Some(names.range)
case None =>
syntax_completion(false, Some(rendering)) match {
case Some(result) => Some(result.range)
@@ -232,12 +234,15 @@
case Some(doc_view) =>
val rendering = doc_view.get_rendering()
rendering.completion_names(before_caret_range(rendering)) match {
+ case Some(names) =>
+ if (names.no_completion)
+ Some(Completion.Result.empty(names.range))
+ else
+ JEdit_Lib.try_get_text(buffer, names.range) match {
+ case Some(original) => names.complete(history, decode, original)
+ case None => None
+ }
case None => None
- case Some(names) =>
- JEdit_Lib.try_get_text(buffer, names.range) match {
- case Some(original) => names.complete(history, decode, original)
- case None => None
- }
}
case None => None
}
--- a/src/Tools/jEdit/src/rendering.scala Wed Mar 05 09:59:48 2014 +0100
+++ b/src/Tools/jEdit/src/rendering.scala Wed Mar 05 13:11:08 2014 +0100
@@ -130,7 +130,7 @@
/* markup elements */
private val completion_names_elements =
- Document.Elements(Markup.COMPLETION)
+ Document.Elements(Markup.COMPLETION, Markup.NO_COMPLETION)
private val completion_language_elements =
Document.Elements(Markup.STRING, Markup.ALTSTRING, Markup.VERBATIM,