# HG changeset patch # User wenzelm # Date 1415223589 -3600 # Node ID edcd9339bda1f8d80a168f183532881b84ff02c9 # Parent 1ebf0a1f12a4816a22bdc73c1918f49afbb56fa3# Parent f323497583d108b27f6764446e5924cf359afe9b merged diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Doc/antiquote_setup.ML --- a/src/Doc/antiquote_setup.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Doc/antiquote_setup.ML Wed Nov 05 22:39:49 2014 +0100 @@ -195,7 +195,7 @@ is_some (Keyword.command_keyword name) andalso let val markup = - Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none name + Outer_Syntax.scan (Keyword.get_keywords ()) Position.none name |> maps (Outer_Syntax.command_reports (#2 (Outer_Syntax.get_syntax ()))) |> map (snd o fst); val _ = Context_Position.reports ctxt (map (pair pos) markup); diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/HOL/Decision_Procs/MIR.thy --- a/src/HOL/Decision_Procs/MIR.thy Wed Nov 05 19:43:17 2014 +0100 +++ b/src/HOL/Decision_Procs/MIR.thy Wed Nov 05 22:39:49 2014 +0100 @@ -5667,19 +5667,19 @@ *} "decision procedure for MIR arithmetic" -lemma "ALL (x::real). (\x\ = \x\ = (x = real \x\))" +lemma "\x::real. (\x\ = \x\ = (x = real \x\))" by mir -lemma "ALL (x::real). real (2::int)*x - (real (1::int)) < real \x\ + real \x\ \ real \x\ + real \x\ \ real (2::int)*x + (real (1::int))" +lemma "\x::real. real (2::int)*x - (real (1::int)) < real \x\ + real \x\ \ real \x\ + real \x\ \ real (2::int)*x + (real (1::int))" by mir -lemma "ALL (x::real). 2*\x\ \ \2*x\ \ \2*x\ \ 2*\x+1\" +lemma "\x::real. 2*\x\ \ \2*x\ \ \2*x\ \ 2*\x+1\" by mir -lemma "ALL (x::real). \y \ x. (\x\ = \y\)" +lemma "\x::real. \y \ x. (\x\ = \y\)" by mir -lemma "ALL (x::real) (y::real). \x\ = \y\ \ 0 \ abs (y - x) \ abs (y - x) \ 1" +lemma "\(x::real) (y::real). \x\ = \y\ \ 0 \ abs (y - x) \ abs (y - x) \ 1" by mir end diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML --- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML Wed Nov 05 22:39:49 2014 +0100 @@ -535,7 +535,7 @@ fun do_method named_thms ctxt = let val ref_of_str = - suffix ";" #> Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none #> Parse.xthm + suffix ";" #> Outer_Syntax.scan (Keyword.get_keywords ()) Position.none #> Parse.xthm #> fst val thms = named_thms |> maps snd val facts = named_thms |> map (ref_of_str o fst o fst) @@ -561,7 +561,7 @@ let val (type_encs, lam_trans) = !meth - |> Outer_Syntax.scan (Keyword.get_lexicons ()) Position.start + |> Outer_Syntax.scan (Keyword.get_keywords ()) Position.start |> filter Token.is_proper |> tl |> Metis_Tactic.parse_metis_options |> fst |>> the_default [ATP_Proof_Reconstruct.partial_typesN] diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/HOL/Tools/Lifting/lifting_setup.ML --- a/src/HOL/Tools/Lifting/lifting_setup.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/HOL/Tools/Lifting/lifting_setup.ML Wed Nov 05 22:39:49 2014 +0100 @@ -221,7 +221,7 @@ val thy = Proof_Context.theory_of lthy val dummy_thm = Thm.transfer thy Drule.dummy_thm - val pointer = Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none bundle_name + val pointer = Outer_Syntax.scan (Keyword.get_keywords ()) Position.none bundle_name val restore_lifting_att = ([dummy_thm], [Token.src ("Lifting.lifting_restore_internal", Position.none) pointer]) in diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/HOL/Tools/Sledgehammer/sledgehammer_util.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_util.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_util.ML Wed Nov 05 22:39:49 2014 +0100 @@ -80,7 +80,7 @@ val subgoal_count = Try.subgoal_count fun reserved_isar_keyword_table () = - Keyword.dest () |-> union (op =) |> map (rpair ()) |> Symtab.make + Symtab.make_set (Scan.dest_lexicon (Scan.merge_lexicons (Keyword.get_lexicons ()))); exception TOO_MANY of unit @@ -126,12 +126,12 @@ fun thms_of_name ctxt name = let - val lex = Keyword.get_lexicons val get = maps (Proof_Context.get_fact ctxt o fst) + val keywords = Keyword.get_keywords () in Source.of_string name |> Symbol.source - |> Token.source lex Position.start + |> Token.source keywords Position.start |> Token.source_proper |> Source.source Token.stopper (Parse.xthms1 >> get) |> Source.exhaust diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/HOL/Tools/try0.ML --- a/src/HOL/Tools/try0.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/HOL/Tools/try0.ML Wed Nov 05 22:39:49 2014 +0100 @@ -43,7 +43,7 @@ fun parse_method s = enclose "(" ")" s - |> Outer_Syntax.scan (Keyword.get_lexicons ()) Position.start + |> Outer_Syntax.scan (Keyword.get_keywords ()) Position.start |> filter Token.is_proper |> Scan.read Token.stopper Method.parse |> (fn SOME (Method.Source src, _) => src | _ => raise Fail "expected Source"); diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Isar/attrib.ML --- a/src/Pure/Isar/attrib.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Isar/attrib.ML Wed Nov 05 22:39:49 2014 +0100 @@ -289,9 +289,9 @@ fun read_attribs ctxt source = let val syms = Symbol_Pos.source_explode source; - val lex = #1 (Keyword.get_lexicons ()); + val keywords = Keyword.get_keywords (); in - (case Token.read lex Parse.attribs syms of + (case Token.read_no_commands keywords Parse.attribs syms of [raw_srcs] => check_attribs ctxt raw_srcs | _ => error ("Malformed attributes" ^ Position.here (#pos source))) end; diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Isar/keyword.ML --- a/src/Pure/Isar/keyword.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Isar/keyword.ML Wed Nov 05 22:39:49 2014 +0100 @@ -1,7 +1,7 @@ (* Title: Pure/Isar/keyword.ML Author: Makarius -Isar command keyword classification and global keyword tables. +Isar keyword classification. *) signature KEYWORD = @@ -41,13 +41,20 @@ type spec = (string * string list) * string list val spec: spec -> T val command_spec: (string * spec) * Position.T -> (string * T) * Position.T + type keywords + val minor_keywords: keywords -> Scan.lexicon + val major_keywords: keywords -> Scan.lexicon + val empty_keywords: keywords + val merge_keywords: keywords * keywords -> keywords + val no_command_keywords: keywords -> keywords + val add: string * T option -> keywords -> keywords + val define: string * T option -> unit + val get_keywords: unit -> keywords val get_lexicons: unit -> Scan.lexicon * Scan.lexicon val is_keyword: string -> bool val command_keyword: string -> T option val command_files: string -> Path.T -> Path.T list val command_tags: string -> string list - val dest: unit -> string list * string list - val define: string * T option -> unit val is_diag: string -> bool val is_heading: string -> bool val is_theory_begin: string -> bool @@ -144,32 +151,69 @@ -(** global keyword tables **) +(** keyword tables **) + +(* type keywords *) datatype keywords = Keywords of - {lexicons: Scan.lexicon * Scan.lexicon, (*minor, major*) - commands: T Symtab.table}; (*command classification*) + {minor: Scan.lexicon, + major: Scan.lexicon, + commands: T Symtab.table}; + +fun minor_keywords (Keywords {minor, ...}) = minor; +fun major_keywords (Keywords {major, ...}) = major; +fun commands (Keywords {commands, ...}) = commands; + +fun make_keywords (minor, major, commands) = + Keywords {minor = minor, major = major, commands = commands}; -fun make_keywords (lexicons, commands) = - Keywords {lexicons = lexicons, commands = commands}; +fun map_keywords f (Keywords {minor, major, commands}) = + make_keywords (f (minor, major, commands)); + +val empty_keywords = + make_keywords (Scan.empty_lexicon, Scan.empty_lexicon, Symtab.empty); -local +fun merge_keywords + (Keywords {minor = minor1, major = major1, commands = commands1}, + Keywords {minor = minor2, major = major2, commands = commands2}) = + make_keywords + (Scan.merge_lexicons (minor1, minor2), + Scan.merge_lexicons (major1, major2), + Symtab.merge (K true) (commands1, commands2)); -val global_keywords = - Unsynchronized.ref (make_keywords ((Scan.empty_lexicon, Scan.empty_lexicon), Symtab.empty)); +val no_command_keywords = + map_keywords (fn (minor, _, _) => (minor, Scan.empty_lexicon, Symtab.empty)); -in + +(* add keywords *) -fun get_keywords () = ! global_keywords; +fun add (name, opt_kind) = map_keywords (fn (minor, major, commands) => + (case opt_kind of + NONE => + let + val minor' = Scan.extend_lexicon (Symbol.explode name) minor; + in (minor', major, commands) end + | SOME kind => + let + val major' = Scan.extend_lexicon (Symbol.explode name) major; + val commands' = Symtab.update (name, kind) commands; + in (minor, major', commands') end)); -fun change_keywords f = CRITICAL (fn () => - Unsynchronized.change global_keywords - (fn Keywords {lexicons, commands} => make_keywords (f (lexicons, commands)))); + +(* global state *) + +local val global_keywords = Unsynchronized.ref empty_keywords in + +fun define decl = CRITICAL (fn () => Unsynchronized.change global_keywords (add decl)); +fun get_keywords () = ! global_keywords; end; -fun get_lexicons () = get_keywords () |> (fn Keywords {lexicons, ...} => lexicons); -fun get_commands () = get_keywords () |> (fn Keywords {commands, ...} => commands); +fun get_lexicons () = + let val keywords = get_keywords () + in (minor_keywords keywords, major_keywords keywords) end; + +fun get_commands () = commands (get_keywords ()); (* lookup *) @@ -192,23 +236,6 @@ val command_tags = these o Option.map tags_of o command_keyword; -fun dest () = pairself (sort_strings o Scan.dest_lexicon) (get_lexicons ()); - - -(* define *) - -fun define (name, opt_kind) = change_keywords (fn ((minor, major), commands) => - (case opt_kind of - NONE => - let - val minor' = Scan.extend_lexicon (Symbol.explode name) minor; - in ((minor', major), commands) end - | SOME kind => - let - val major' = Scan.extend_lexicon (Symbol.explode name) major; - val commands' = Symtab.update (name, kind) commands; - in ((minor, major'), commands') end)); - (* command categories *) diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Isar/keyword.scala --- a/src/Pure/Isar/keyword.scala Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Isar/keyword.scala Wed Nov 05 22:39:49 2014 +0100 @@ -1,7 +1,7 @@ /* Title: Pure/Isar/keyword.scala Author: Makarius -Isar command keyword classification and keyword tables. +Isar keyword classification. */ package isabelle @@ -9,9 +9,10 @@ object Keyword { + /** keyword classification **/ + /* kinds */ - val MINOR = "minor" val DIAG = "diag" val HEADING = "heading" val THY_BEGIN = "thy_begin" @@ -60,5 +61,74 @@ val proof_goal = Set(PRF_GOAL, PRF_ASM_GOAL, PRF_ASM_GOAL_SCRIPT) val qed = Set(QED, QED_SCRIPT, QED_BLOCK) val qed_global = Set(QED_GLOBAL) + + + type Spec = ((String, List[String]), List[String]) + + + + /** keyword tables **/ + + object Keywords + { + def empty: Keywords = new Keywords() + } + + class Keywords private( + val minor: Scan.Lexicon = Scan.Lexicon.empty, + val major: Scan.Lexicon = Scan.Lexicon.empty, + commands: Map[String, (String, List[String])] = Map.empty) + { + /* content */ + + def is_empty: Boolean = minor.isEmpty && major.isEmpty + + override def toString: String = + { + val keywords1 = minor.iterator.map(quote(_)).toList + val keywords2 = + for ((name, (kind, files)) <- commands.toList.sortBy(_._1)) yield { + quote(name) + " :: " + quote(kind) + + (if (files.isEmpty) "" else " (" + commas_quote(files) + ")") + } + (keywords1 ::: keywords2).mkString("keywords\n ", " and\n ", "") + } + + + /* add keywords */ + + def + (name: String): Keywords = new Keywords(minor + name, major, commands) + def + (name: String, kind: String): Keywords = this + (name, (kind, Nil)) + def + (name: String, kind: (String, List[String])): Keywords = + { + val major1 = major + name + val commands1 = commands + (name -> kind) + new Keywords(minor, major1, commands1) + } + + + /* command kind */ + + def command_kind(name: String): Option[String] = commands.get(name).map(_._1) + + def is_command_kind(token: Token, pred: String => Boolean): Boolean = + token.is_command && + (command_kind(token.source) match { case Some(k) => pred(k) case None => false }) + + + /* load commands */ + + def load_command(name: String): Option[List[String]] = + commands.get(name) match { + case Some((THY_LOAD, exts)) => Some(exts) + case _ => None + } + + private lazy val load_commands: List[(String, List[String])] = + (for ((name, (THY_LOAD, files)) <- commands.iterator) yield (name, files)).toList + + def load_commands_in(text: String): Boolean = + load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) }) + } } diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Isar/outer_syntax.ML --- a/src/Pure/Isar/outer_syntax.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Isar/outer_syntax.ML Wed Nov 05 22:39:49 2014 +0100 @@ -12,7 +12,7 @@ type syntax val batch_mode: bool Unsynchronized.ref val is_markup: syntax -> Thy_Output.markup -> string -> bool - val get_syntax: unit -> (Scan.lexicon * Scan.lexicon) * syntax + val get_syntax: unit -> Keyword.keywords * syntax val check_syntax: unit -> unit type command_spec = (string * Keyword.T) * Position.T val command: command_spec -> string -> @@ -29,9 +29,8 @@ (local_theory -> Proof.state) parser -> unit val help_syntax: string list -> unit val print_syntax: unit -> unit - val scan: Scan.lexicon * Scan.lexicon -> Position.T -> string -> Token.T list - val parse: (Scan.lexicon * Scan.lexicon) * syntax -> - Position.T -> string -> Toplevel.transition list + val scan: Keyword.keywords -> Position.T -> string -> Token.T list + val parse: Keyword.keywords * syntax -> Position.T -> string -> Toplevel.transition list val parse_spans: Token.T list -> Command_Span.span list val side_comments: Token.T list -> Token.T list val command_reports: syntax -> Token.T -> Position.report_text list @@ -137,13 +136,14 @@ in -fun get_syntax () = CRITICAL (fn () => (Keyword.get_lexicons (), ! global_syntax)); +fun get_syntax () = CRITICAL (fn () => (Keyword.get_keywords (), ! global_syntax)); fun check_syntax () = let - val ((_, major), syntax) = CRITICAL (fn () => (Keyword.dest (), ! global_syntax)); + val (keywords, syntax) = CRITICAL (fn () => (Keyword.get_keywords (), ! global_syntax)); + val major = Keyword.major_keywords keywords; in - (case subtract (op =) (map #1 (dest_commands syntax)) major of + (case subtract (op =) (map #1 (dest_commands syntax)) (Scan.dest_lexicon major) of [] => () | missing => error ("Missing outer syntax command(s) " ^ commas_quote missing)) end; @@ -178,10 +178,11 @@ fun print_syntax () = let - val ((keywords, _), syntax) = CRITICAL (fn () => (Keyword.dest (), #2 (get_syntax ()))); + val (keywords, syntax) = CRITICAL (fn () => (Keyword.get_keywords (), #2 (get_syntax ()))); + val minor = Scan.dest_lexicon (Keyword.minor_keywords keywords); val commands = dest_commands syntax; in - [Pretty.strs ("syntax keywords:" :: map quote keywords), + [Pretty.strs ("keywords:" :: map quote minor), Pretty.big_list "commands:" (map pretty_command commands)] |> Pretty.writeln_chunks end; @@ -195,7 +196,7 @@ local fun parse_command syntax = - Parse.position Parse.command :|-- (fn (name, pos) => + Parse.position Parse.command_ :|-- (fn (name, pos) => let val tr0 = Toplevel.empty |> Toplevel.name name |> Toplevel.position pos; in @@ -222,16 +223,16 @@ (* off-line scanning/parsing *) -fun scan lexs pos = +fun scan keywords pos = Source.of_string #> Symbol.source #> - Token.source (K lexs) pos #> + Token.source keywords pos #> Source.exhaust; -fun parse (lexs, syntax) pos str = +fun parse (keywords, syntax) pos str = Source.of_string str |> Symbol.source - |> Token.source (K lexs) pos + |> Token.source keywords pos |> commands_source syntax |> Source.exhaust; diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Isar/outer_syntax.scala --- a/src/Pure/Isar/outer_syntax.scala Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Isar/outer_syntax.scala Wed Nov 05 22:39:49 2014 +0100 @@ -74,95 +74,58 @@ } final class Outer_Syntax private( - keywords: Map[String, (String, List[String])] = Map.empty, - lexicon: Scan.Lexicon = Scan.Lexicon.empty, + val keywords: Keyword.Keywords = Keyword.Keywords.empty, val completion: Completion = Completion.empty, val language_context: Completion.Language_Context = Completion.Language_Context.outer, val has_tokens: Boolean = true) extends Prover.Syntax { /** syntax content **/ - override def toString: String = - (for ((name, (kind, files)) <- keywords) yield { - if (kind == Keyword.MINOR) quote(name) - else - quote(name) + " :: " + quote(kind) + - (if (files.isEmpty) "" else " (" + commas_quote(files) + ")") - }).toList.sorted.mkString("keywords\n ", " and\n ", "") + override def toString: String = keywords.toString - /* keyword kind */ - - def keyword_kind_files(name: String): Option[(String, List[String])] = keywords.get(name) - def keyword_kind(name: String): Option[String] = keyword_kind_files(name).map(_._1) + /* add keywords */ - def is_command(name: String): Boolean = - keyword_kind(name) match { - case Some(kind) => kind != Keyword.MINOR - case None => false + def + (name: String): Outer_Syntax = this + (name, None, None) + def + (name: String, kind: String): Outer_Syntax = this + (name, Some((kind, Nil)), None) + def + (name: String, opt_kind: Option[(String, List[String])], replace: Option[String]) + : Outer_Syntax = + { + val keywords1 = + opt_kind match { + case None => keywords + name + case Some(kind) => keywords + (name, kind) + } + val completion1 = + if (replace == Some("")) completion + else completion + (name, replace getOrElse name) + new Outer_Syntax(keywords1, completion1, language_context, true) + } + + def add_keywords(keywords: Thy_Header.Keywords): Outer_Syntax = + (this /: keywords) { + case (syntax, (name, opt_spec, replace)) => + val opt_kind = opt_spec.map(_._1) + syntax + + (Symbol.decode(name), opt_kind, replace) + + (Symbol.encode(name), opt_kind, replace) } - def command_kind(token: Token, pred: String => Boolean): Boolean = - token.is_command && is_command(token.source) && - pred(keyword_kind(token.source).get) - /* load commands */ - def load_command(name: String): Option[List[String]] = - keywords.get(name) match { - case Some((Keyword.THY_LOAD, exts)) => Some(exts) - case _ => None - } - - val load_commands: List[(String, List[String])] = - (for ((name, (Keyword.THY_LOAD, files)) <- keywords.iterator) yield (name, files)).toList - - def load_commands_in(text: String): Boolean = - load_commands.exists({ case (cmd, _) => text.containsSlice(cmd) }) - - - /* add keywords */ - - def + (name: String, kind: (String, List[String]), replace: Option[String]): Outer_Syntax = - { - val keywords1 = keywords + (name -> kind) - val lexicon1 = lexicon + name - val completion1 = - if (replace == Some("")) completion - else completion + (name, replace getOrElse name) - new Outer_Syntax(keywords1, lexicon1, completion1, language_context, true) - } - - def + (name: String, kind: (String, List[String])): Outer_Syntax = - this + (name, kind, Some(name)) - def + (name: String, kind: String): Outer_Syntax = - this + (name, (kind, Nil), Some(name)) - def + (name: String, replace: Option[String]): Outer_Syntax = - this + (name, (Keyword.MINOR, Nil), replace) - def + (name: String): Outer_Syntax = this + (name, None) - - def add_keywords(keywords: Thy_Header.Keywords): Outer_Syntax = - (this /: keywords) { - case (syntax, (name, Some((kind, _)), replace)) => - syntax + - (Symbol.decode(name), kind, replace) + - (Symbol.encode(name), kind, replace) - case (syntax, (name, None, replace)) => - syntax + - (Symbol.decode(name), replace) + - (Symbol.encode(name), replace) - } + def load_command(name: String): Option[List[String]] = keywords.load_command(name) + def load_commands_in(text: String): Boolean = keywords.load_commands_in(text) /* language context */ def set_language_context(context: Completion.Language_Context): Outer_Syntax = - new Outer_Syntax(keywords, lexicon, completion, context, has_tokens) + new Outer_Syntax(keywords, completion, context, has_tokens) def no_tokens: Outer_Syntax = { - require(keywords.isEmpty && lexicon.isEmpty) + require(keywords.is_empty) new Outer_Syntax( completion = completion, language_context = language_context, @@ -182,7 +145,7 @@ val command1 = tokens.exists(_.is_command) val depth1 = - if (tokens.exists(tok => command_kind(tok, Keyword.theory))) 0 + if (tokens.exists(tok => keywords.is_command_kind(tok, Keyword.theory))) 0 else if (command1) struct.after_span_depth else struct.span_depth @@ -190,11 +153,16 @@ ((struct.span_depth, struct.after_span_depth) /: tokens) { case ((x, y), tok) => if (tok.is_command) { - if (command_kind(tok, Keyword.theory_goal)) (2, 1) - else if (command_kind(tok, Keyword.theory)) (1, 0) - else if (command_kind(tok, Keyword.proof_goal) || tok.is_begin_block) (y + 2, y + 1) - else if (command_kind(tok, Keyword.qed) || tok.is_end_block) (y + 1, y - 1) - else if (command_kind(tok, Keyword.qed_global)) (1, 0) + if (keywords.is_command_kind(tok, Keyword.theory_goal)) + (2, 1) + else if (keywords.is_command_kind(tok, Keyword.theory)) + (1, 0) + else if (keywords.is_command_kind(tok, Keyword.proof_goal) || tok.is_begin_block) + (y + 2, y + 1) + else if (keywords.is_command_kind(tok, Keyword.qed) || tok.is_end_block) + (y + 1, y - 1) + else if (keywords.is_command_kind(tok, Keyword.qed_global)) + (1, 0) else (x, y) } else (x, y) @@ -209,8 +177,7 @@ def scan(input: CharSequence): List[Token] = { val in: Reader[Char] = new CharSequenceReader(input) - Token.Parsers.parseAll( - Token.Parsers.rep(Token.Parsers.token(lexicon, is_command)), in) match { + Token.Parsers.parseAll(Token.Parsers.rep(Token.Parsers.token(keywords)), in) match { case Token.Parsers.Success(tokens, _) => tokens case _ => error("Unexpected failure of tokenizing input:\n" + input.toString) } @@ -222,7 +189,7 @@ val toks = new mutable.ListBuffer[Token] var ctxt = context while (!in.atEnd) { - Token.Parsers.parse(Token.Parsers.token_line(lexicon, is_command, ctxt), in) match { + Token.Parsers.parse(Token.Parsers.token_line(keywords, ctxt), in) match { case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest } case Token.Parsers.NoSuccess(_, rest) => error("Unexpected failure of tokenizing input:\n" + rest.source.toString) @@ -283,7 +250,7 @@ case "subsection" => Some(2) case "subsubsection" => Some(3) case _ => - keyword_kind(command.name) match { + keywords.command_kind(command.name) match { case Some(kind) if Keyword.theory(kind) => Some(4) case _ => None } diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Isar/parse.ML --- a/src/Pure/Isar/parse.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Isar/parse.ML Wed Nov 05 22:39:49 2014 +0100 @@ -17,7 +17,7 @@ val position: 'a parser -> ('a * Position.T) parser val source_position: 'a parser -> Symbol_Pos.source parser val inner_syntax: 'a parser -> string parser - val command: string parser + val command_: string parser val keyword: string parser val short_ident: string parser val long_ident: string parser @@ -33,7 +33,7 @@ val verbatim: string parser val cartouche: string parser val eof: string parser - val command_name: string -> string parser + val command: string -> string parser val keyword_with: (string -> bool) -> string parser val keyword_markup: bool * Markup.T -> string -> string parser val keyword_improper: string -> string parser @@ -180,7 +180,7 @@ group (fn () => Token.str_of_kind k) (RESET_VALUE (Scan.one (Token.is_kind k) >> Token.content_of)); -val command = kind Token.Command; +val command_ = kind Token.Command; val keyword = kind Token.Keyword; val short_ident = kind Token.Ident; val long_ident = kind Token.LongIdent; @@ -196,7 +196,7 @@ val cartouche = kind Token.Cartouche; val eof = kind Token.EOF; -fun command_name x = +fun command x = group (fn () => Token.str_of_kind Token.Command ^ " " ^ quote x) (RESET_VALUE (Scan.one (fn tok => Token.is_command tok andalso Token.content_of tok = x))) >> Token.content_of; diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Isar/parse.scala --- a/src/Pure/Isar/parse.scala Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Isar/parse.scala Wed Nov 05 22:39:49 2014 +0100 @@ -51,7 +51,7 @@ token("command " + quote(name), tok => tok.is_command && tok.source == name) ^^ { case (_, pos) => pos.position } - def keyword(name: String): Parser[String] = + def $$$(name: String): Parser[String] = atom("keyword " + quote(name), tok => tok.is_keyword && tok.source == name) def string: Parser[String] = atom("string", _.is_string) @@ -73,7 +73,7 @@ tok.kind == Token.Kind.IDENT || tok.kind == Token.Kind.STRING) - def tags: Parser[List[String]] = rep(keyword("%") ~> tag_name) + def tags: Parser[List[String]] = rep($$$("%") ~> tag_name) /* wrappers */ diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Isar/token.ML --- a/src/Pure/Isar/token.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Isar/token.ML Wed Nov 05 22:39:49 2014 +0100 @@ -79,16 +79,16 @@ val pretty_src: Proof.context -> src -> Pretty.T val ident_or_symbolic: string -> bool val source_proper: (T, 'a) Source.source -> (T, (T, 'a) Source.source) Source.source - val source: (unit -> Scan.lexicon * Scan.lexicon) -> + val source: Keyword.keywords -> Position.T -> (Symbol.symbol, 'a) Source.source -> (T, (Symbol_Pos.T, Position.T * (Symbol.symbol, 'a) Source.source) Source.source) Source.source - val source_strict: (unit -> Scan.lexicon * Scan.lexicon) -> + val source_strict: Keyword.keywords -> Position.T -> (Symbol.symbol, 'a) Source.source -> (T, (Symbol_Pos.T, Position.T * (Symbol.symbol, 'a) Source.source) Source.source) Source.source type 'a parser = T list -> 'a * T list type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list) - val read: Scan.lexicon -> 'a parser -> Symbol_Pos.T list -> 'a list - val read_antiq: Scan.lexicon -> 'a parser -> Symbol_Pos.T list * Position.T -> 'a + val read_no_commands: Keyword.keywords -> 'a parser -> Symbol_Pos.T list -> 'a list + val read_antiq: Keyword.keywords -> 'a parser -> Symbol_Pos.T list * Position.T -> 'a val syntax_generic: 'a context_parser -> src -> Context.generic -> 'a * Context.generic val syntax: 'a context_parser -> src -> Proof.context -> 'a * Proof.context end; @@ -530,7 +530,7 @@ fun token_range k (pos1, (ss, pos2)) = Token (Symbol_Pos.implode_range pos1 pos2 ss, (k, Symbol_Pos.content ss), Slot); -fun scan (lex1, lex2) = !!! "bad input" +fun scan keywords = !!! "bad input" (Symbol_Pos.scan_string_qq err_prefix >> token_range String || Symbol_Pos.scan_string_bq err_prefix >> token_range AltString || scan_verbatim >> token_range Verbatim || @@ -539,8 +539,8 @@ scan_space >> token Space || (Scan.max token_leq (Scan.max token_leq - (Scan.literal lex2 >> pair Command) - (Scan.literal lex1 >> pair Keyword)) + (Scan.literal (Keyword.major_keywords keywords) >> pair Command) + (Scan.literal (Keyword.minor_keywords keywords) >> pair Keyword)) (Lexicon.scan_longid >> pair LongIdent || Lexicon.scan_id >> pair Ident || Lexicon.scan_var >> pair Var || @@ -561,14 +561,14 @@ in -fun source' strict get_lex = +fun source' strict keywords = let - val scan_strict = Scan.bulk (fn xs => scan (get_lex ()) xs); + val scan_strict = Scan.bulk (scan keywords); val scan = if strict then scan_strict else Scan.recover scan_strict recover; in Source.source Symbol_Pos.stopper scan end; -fun source get_lex pos src = Symbol_Pos.source pos src |> source' false get_lex; -fun source_strict get_lex pos src = Symbol_Pos.source pos src |> source' true get_lex; +fun source keywords pos src = Symbol_Pos.source pos src |> source' false keywords; +fun source_strict keywords pos src = Symbol_Pos.source pos src |> source' true keywords; end; @@ -582,19 +582,19 @@ (* read source *) -fun read lex scan syms = +fun read_no_commands keywords scan syms = Source.of_list syms - |> source' true (K (lex, Scan.empty_lexicon)) + |> source' true (Keyword.no_command_keywords keywords) |> source_proper |> Source.source stopper (Scan.error (Scan.bulk scan)) |> Source.exhaust; -fun read_antiq lex scan (syms, pos) = +fun read_antiq keywords scan (syms, pos) = let fun err msg = cat_error msg ("Malformed antiquotation" ^ Position.here pos ^ ":\n" ^ "@{" ^ Symbol_Pos.content syms ^ "}"); - val res = read lex scan syms handle ERROR msg => err msg; + val res = read_no_commands keywords scan syms handle ERROR msg => err msg; in (case res of [x] => x | _ => err "") end; diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Isar/token.scala --- a/src/Pure/Isar/token.scala Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Isar/token.scala Wed Nov 05 22:39:49 2014 +0100 @@ -51,8 +51,7 @@ string | (alt_string | (verb | (cart | cmt))) } - private def other_token(lexicon: Scan.Lexicon, is_command: String => Boolean) - : Parser[Token] = + private def other_token(keywords: Keyword.Keywords): Parser[Token] = { val letdigs1 = many1(Symbol.is_letdig) val sub = one(s => s == Symbol.sub_decoded || s == "\\<^sub>") @@ -80,9 +79,9 @@ (many1(Symbol.is_symbolic_char) | one(sym => Symbol.is_symbolic(sym))) ^^ (x => Token(Token.Kind.SYM_IDENT, x)) - val command_keyword = - literal(lexicon) ^^ - (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x)) + val keyword = + literal(keywords.minor) ^^ (x => Token(Token.Kind.KEYWORD, x)) ||| + literal(keywords.major) ^^ (x => Token(Token.Kind.COMMAND, x)) val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x)) @@ -96,13 +95,13 @@ space | (recover_delimited | (((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) ||| - command_keyword) | bad)) + keyword) | bad)) } - def token(lexicon: Scan.Lexicon, is_command: String => Boolean): Parser[Token] = - delimited_token | other_token(lexicon, is_command) + def token(keywords: Keyword.Keywords): Parser[Token] = + delimited_token | other_token(keywords) - def token_line(lexicon: Scan.Lexicon, is_command: String => Boolean, ctxt: Scan.Line_Context) + def token_line(keywords: Keyword.Keywords, ctxt: Scan.Line_Context) : Parser[(Token, Scan.Line_Context)] = { val string = @@ -112,7 +111,7 @@ val verb = verbatim_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) } val cart = cartouche_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) } val cmt = comment_line(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) } - val other = other_token(lexicon, is_command) ^^ { case x => (x, Scan.Finished) } + val other = other_token(keywords) ^^ { case x => (x, Scan.Finished) } string | (alt_string | (verb | (cart | (cmt | other)))) } diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/ML/ml_context.ML --- a/src/Pure/ML/ml_context.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/ML/ml_context.ML Wed Nov 05 22:39:49 2014 +0100 @@ -116,14 +116,14 @@ then ((begin_env0, map (fn Antiquote.Text tok => tok) ants), opt_ctxt) else let - val lex = #1 (Keyword.get_lexicons ()); + val keywords = Keyword.get_keywords (); fun no_decl _ = ([], []); fun expand (Antiquote.Text tok) ctxt = (K ([], [tok]), ctxt) | expand (Antiquote.Antiq (ss, {range, ...})) ctxt = let val (decl, ctxt') = - apply_antiquotation (Token.read_antiq lex antiq (ss, #1 range)) ctxt; + apply_antiquotation (Token.read_antiq keywords antiq (ss, #1 range)) ctxt; val decl' = decl #> pairself (ML_Lex.tokenize #> map (ML_Lex.set_range range)); in (decl', ctxt') end; diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/PIDE/document.ML --- a/src/Pure/PIDE/document.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/PIDE/document.ML Wed Nov 05 22:39:49 2014 +0100 @@ -318,7 +318,7 @@ val span = Lazy.lazy (fn () => Position.setmp_thread_data (Position.id_only id) - (fn () => Outer_Syntax.scan (Keyword.get_lexicons ()) (Position.id id) text) ()); + (fn () => Outer_Syntax.scan (Keyword.get_keywords ()) (Position.id id) text) ()); val _ = Position.setmp_thread_data (Position.id_only id) (fn () => Output.status (Markup.markup_only Markup.accepted)) (); diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/PIDE/resources.ML --- a/src/Pure/PIDE/resources.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/PIDE/resources.ML Wed Nov 05 22:39:49 2014 +0100 @@ -147,9 +147,9 @@ let val {name = (name, _), ...} = header; val _ = Thy_Header.define_keywords header; + val keywords = Keyword.get_keywords (); - val lexs = Keyword.get_lexicons (); - val toks = Outer_Syntax.scan lexs text_pos text; + val toks = Outer_Syntax.scan keywords text_pos text; val spans = Outer_Syntax.parse_spans toks; val elements = Thy_Syntax.parse_elements spans; @@ -165,13 +165,13 @@ fun present () = let val res = filter_out (Toplevel.is_ignored o #1) (maps Toplevel.join_results results); - val ((minor, _), syntax) = Outer_Syntax.get_syntax (); + val (keywords, syntax) = Outer_Syntax.get_syntax (); in if exists (Toplevel.is_skipped_proof o #2) res then warning ("Cannot present theory with skipped proofs: " ^ quote name) else let val tex_source = - Thy_Output.present_thy minor Keyword.command_tags + Thy_Output.present_thy keywords Keyword.command_tags (Outer_Syntax.is_markup syntax) res toks |> Buffer.content; in if document then Present.theory_output name tex_source else () end diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/ROOT.ML --- a/src/Pure/ROOT.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/ROOT.ML Wed Nov 05 22:39:49 2014 +0100 @@ -230,8 +230,8 @@ use "Isar/local_defs.ML"; (*outer syntax*) +use "Isar/keyword.ML"; use "Isar/token.ML"; -use "Isar/keyword.ML"; use "Isar/parse.ML"; use "Isar/args.ML"; diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/System/options.scala --- a/src/Pure/System/options.scala Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/System/options.scala Wed Nov 05 22:39:49 2014 +0100 @@ -93,15 +93,15 @@ { command(SECTION) ~! text ^^ { case _ ~ a => (options: Options) => options.set_section(a) } | - opt(command(PUBLIC)) ~ command(OPTION) ~! (option_name ~ keyword(":") ~ option_type ~ - keyword("=") ~ option_value ~ (keyword("--") ~! text ^^ { case _ ~ x => x } | success(""))) ^^ + opt(command(PUBLIC)) ~ command(OPTION) ~! (option_name ~ $$$(":") ~ option_type ~ + $$$("=") ~ option_value ~ ($$$("--") ~! text ^^ { case _ ~ x => x } | success(""))) ^^ { case a ~ pos ~ (b ~ _ ~ c ~ _ ~ d ~ e) => (options: Options) => options.declare(a.isDefined, pos, b, c, d, e) } } val prefs_entry: Parser[Options => Options] = { - option_name ~ (keyword("=") ~! option_value) ^^ + option_name ~ ($$$("=") ~! option_value) ^^ { case a ~ (_ ~ b) => (options: Options) => options.add_permissive(a, b) } } diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Thy/thy_header.ML --- a/src/Pure/Thy/thy_header.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Thy/thy_header.ML Wed Nov 05 22:39:49 2014 +0100 @@ -77,10 +77,17 @@ val keywordsN = "keywords"; val beginN = "begin"; -val header_lexicons = - pairself (Scan.make_lexicon o map Symbol.explode) - (["%", "(", ")", ",", "::", "==", "and", beginN, importsN, keywordsN], - [headerN, chapterN, sectionN, subsectionN, subsubsectionN, theoryN]); +val header_keywords = + Keyword.empty_keywords + |> fold (Keyword.add o rpair NONE) + ["%", "(", ")", ",", "::", "==", "and", beginN, importsN, keywordsN] + |> fold Keyword.add + [(headerN, SOME Keyword.heading), + (chapterN, SOME Keyword.heading), + (sectionN, SOME Keyword.heading), + (subsectionN, SOME Keyword.heading), + (subsubsectionN, SOME Keyword.heading), + (theoryN, SOME Keyword.thy_begin)]; (* header args *) @@ -124,21 +131,21 @@ (* read header *) val heading = - (Parse.command_name headerN || - Parse.command_name chapterN || - Parse.command_name sectionN || - Parse.command_name subsectionN || - Parse.command_name subsubsectionN) -- + (Parse.command headerN || + Parse.command chapterN || + Parse.command sectionN || + Parse.command subsectionN || + Parse.command subsubsectionN) -- Parse.tags -- Parse.!!! Parse.document_source; val header = - (Scan.repeat heading -- Parse.command_name theoryN -- Parse.tags) |-- Parse.!!! args; + (Scan.repeat heading -- Parse.command theoryN -- Parse.tags) |-- Parse.!!! args; fun token_source pos str = str |> Source.of_string_limited 8000 |> Symbol.source - |> Token.source_strict (K header_lexicons) pos; + |> Token.source_strict header_keywords pos; fun read_source pos source = let val res = diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Thy/thy_header.scala --- a/src/Pure/Thy/thy_header.scala Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Thy/thy_header.scala Wed Nov 05 22:39:49 2014 +0100 @@ -27,9 +27,15 @@ val AND = "and" val BEGIN = "begin" - private val lexicon = - Scan.Lexicon("%", "(", ")", ",", "::", "==", AND, BEGIN, - HEADER, CHAPTER, SECTION, SUBSECTION, SUBSUBSECTION, IMPORTS, KEYWORDS, THEORY) + private val header_keywords = + Keyword.Keywords.empty + + "%" + "(" + ")" + "," + "::" + "==" + AND + BEGIN + IMPORTS + KEYWORDS + + (HEADER, Keyword.HEADING) + + (CHAPTER, Keyword.HEADING) + + (SECTION, Keyword.HEADING) + + (SUBSECTION, Keyword.HEADING) + + (SUBSUBSECTION, Keyword.HEADING) + + (THEORY, Keyword.THY_BEGIN) /* theory file name */ @@ -51,7 +57,7 @@ val file_name = atom("file name", _.is_name) val opt_files = - keyword("(") ~! (rep1sep(name, keyword(",")) <~ keyword(")")) ^^ { case _ ~ x => x } | + $$$("(") ~! (rep1sep(name, $$$(",")) <~ $$$(")")) ^^ { case _ ~ x => x } | success(Nil) val keyword_spec = atom("outer syntax keyword specification", _.is_name) ~ opt_files ~ tags ^^ @@ -59,35 +65,35 @@ val keyword_decl = rep1(string) ~ - opt(keyword("::") ~! keyword_spec ^^ { case _ ~ x => x }) ~ - opt(keyword("==") ~! name ^^ { case _ ~ x => x }) ^^ + opt($$$("::") ~! keyword_spec ^^ { case _ ~ x => x }) ~ + opt($$$("==") ~! name ^^ { case _ ~ x => x }) ^^ { case xs ~ y ~ z => xs.map((_, y, z)) } val keyword_decls = - keyword_decl ~ rep(keyword(AND) ~! keyword_decl ^^ { case _ ~ x => x }) ^^ + keyword_decl ~ rep($$$(AND) ~! keyword_decl ^^ { case _ ~ x => x }) ^^ { case xs ~ yss => (xs :: yss).flatten } val file = - keyword("(") ~! (file_name ~ keyword(")")) ^^ { case _ ~ (x ~ _) => (x, false) } | + $$$("(") ~! (file_name ~ $$$(")")) ^^ { case _ ~ (x ~ _) => (x, false) } | file_name ^^ (x => (x, true)) val args = theory_name ~ - (opt(keyword(IMPORTS) ~! (rep1(theory_xname))) ^^ + (opt($$$(IMPORTS) ~! (rep1(theory_xname))) ^^ { case None => Nil case Some(_ ~ xs) => xs }) ~ - (opt(keyword(KEYWORDS) ~! keyword_decls) ^^ + (opt($$$(KEYWORDS) ~! keyword_decls) ^^ { case None => Nil case Some(_ ~ xs) => xs }) ~ - keyword(BEGIN) ^^ + $$$(BEGIN) ^^ { case x ~ ys ~ zs ~ _ => Thy_Header(x, ys, zs) } val heading = - (keyword(HEADER) | - keyword(CHAPTER) | - keyword(SECTION) | - keyword(SUBSECTION) | - keyword(SUBSUBSECTION)) ~ + (command(HEADER) | + command(CHAPTER) | + command(SECTION) | + command(SUBSECTION) | + command(SUBSUBSECTION)) ~ tags ~! document_source - (rep(heading) ~ keyword(THEORY) ~ tags) ~! args ^^ { case _ ~ x => x } + (rep(heading) ~ command(THEORY) ~ tags) ~! args ^^ { case _ ~ x => x } } @@ -95,7 +101,7 @@ def read(reader: Reader[Char]): Thy_Header = { - val token = Token.Parsers.token(lexicon, _ => false) + val token = Token.Parsers.token(header_keywords) val toks = new mutable.ListBuffer[Token] @tailrec def scan_to_begin(in: Reader[Char]) @@ -121,7 +127,7 @@ /* keywords */ - type Keywords = List[(String, Option[((String, List[String]), List[String])], Option[String])] + type Keywords = List[(String, Option[Keyword.Spec], Option[String])] } @@ -138,7 +144,7 @@ val f = Symbol.decode _ Thy_Header(f(name), imports.map(f), keywords.map({ case (a, b, c) => - (f(a), b.map({ case ((x, y), z) => ((f(x), y.map(f)), z.map(f)) }), c.map(f)) })) + (f(a), b.map({ case ((x, y), z) => ((f(x), y.map(f)), z.map(f)) }), c.map(f)) })) } } diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Thy/thy_output.ML --- a/src/Pure/Thy/thy_output.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Thy/thy_output.ML Wed Nov 05 22:39:49 2014 +0100 @@ -23,9 +23,9 @@ val boolean: string -> bool val integer: string -> int datatype markup = Markup | MarkupEnv | Verbatim - val eval_antiq: Scan.lexicon -> Toplevel.state -> Antiquote.antiq -> string + val eval_antiq: Keyword.keywords -> Toplevel.state -> Antiquote.antiq -> string val check_text: Symbol_Pos.source -> Toplevel.state -> unit - val present_thy: Scan.lexicon -> (string -> string list) -> (markup -> string -> bool) -> + val present_thy: Keyword.keywords -> (string -> string list) -> (markup -> string -> bool) -> (Toplevel.transition * Toplevel.state) list -> Token.T list -> Buffer.T val pretty_text: Proof.context -> string -> Pretty.T val pretty_term: Proof.context -> term -> Pretty.T @@ -158,9 +158,9 @@ (* eval_antiq *) -fun eval_antiq lex state ((ss, {range = (pos, _), ...}): Antiquote.antiq) = +fun eval_antiq keywords state ((ss, {range = (pos, _), ...}): Antiquote.antiq) = let - val (opts, src) = Token.read_antiq lex antiq (ss, pos); + val (opts, src) = Token.read_antiq keywords antiq (ss, pos); fun cmd ctxt = wrap ctxt (fn () => command src state ctxt) (); val preview_ctxt = fold option opts (Toplevel.presentation_context_of state); val print_ctxt = Context_Position.set_visible false preview_ctxt; @@ -171,13 +171,13 @@ (* check_text *) -fun eval_antiquote lex state (txt, pos) = +fun eval_antiquote keywords state (txt, pos) = let fun words (Antiquote.Text ss) = [(#1 (Symbol_Pos.range ss), Markup.words)] | words (Antiquote.Antiq _) = []; fun expand (Antiquote.Text ss) = Symbol_Pos.content ss - | expand (Antiquote.Antiq antiq) = eval_antiq lex state antiq; + | expand (Antiquote.Antiq antiq) = eval_antiq keywords state antiq; val ants = Antiquote.read (Symbol_Pos.explode (txt, pos), pos); val _ = Position.reports (maps words ants); @@ -190,7 +190,7 @@ fun check_text {delimited, text, pos} state = (Position.report pos (Markup.language_document delimited); if Toplevel.is_skipped_proof state then () - else ignore (eval_antiquote (#1 (Keyword.get_lexicons ())) state (text, pos))); + else ignore (eval_antiquote (Keyword.get_keywords ()) state (text, pos))); @@ -207,8 +207,8 @@ | MarkupEnvToken of string * (string * Position.T) | VerbatimToken of string * Position.T; -fun output_token lex state = - let val eval = eval_antiquote lex state in +fun output_token keywords state = + let val eval = eval_antiquote keywords state in fn NoToken => "" | BasicToken tok => Latex.output_basic tok | MarkupToken (cmd, txt) => Latex.output_markup cmd (eval txt) @@ -265,11 +265,11 @@ in -fun present_span lex default_tags span state state' +fun present_span keywords default_tags span state state' (tag_stack, active_tag, newline, buffer, present_cont) = let val present = fold (fn (tok, (flag, 0)) => - Buffer.add (output_token lex state' tok) + Buffer.add (output_token keywords state' tok) #> Buffer.add flag | _ => I); @@ -351,7 +351,7 @@ in -fun present_thy lex default_tags is_markup command_results toks = +fun present_thy keywords default_tags is_markup command_results toks = let (* tokens *) @@ -423,7 +423,7 @@ (* present commands *) fun present_command tr span st st' = - Toplevel.setmp_thread_position tr (present_span lex default_tags span st st'); + Toplevel.setmp_thread_position tr (present_span keywords default_tags span st st'); fun present _ [] = I | present st (((tr, st'), span) :: rest) = present_command tr span st st' #> present st' rest; diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Tools/build.scala Wed Nov 05 22:39:49 2014 +0100 @@ -228,30 +228,30 @@ val session_name = atom("session name", _.is_name) val option = - name ~ opt(keyword("=") ~! name ^^ { case _ ~ x => x }) ^^ { case x ~ y => (x, y) } - val options = keyword("[") ~> rep1sep(option, keyword(",")) <~ keyword("]") + name ~ opt($$$("=") ~! name ^^ { case _ ~ x => x }) ^^ { case x ~ y => (x, y) } + val options = $$$("[") ~> rep1sep(option, $$$(",")) <~ $$$("]") val theories = - (keyword(GLOBAL_THEORIES) | keyword(THEORIES)) ~! + ($$$(GLOBAL_THEORIES) | $$$(THEORIES)) ~! ((options | success(Nil)) ~ rep(theory_xname)) ^^ { case x ~ (y ~ z) => (x == GLOBAL_THEORIES, y, z) } val document_files = - keyword(DOCUMENT_FILES) ~! - ((keyword("(") ~! (keyword(IN) ~! (path ~ keyword(")"))) ^^ + $$$(DOCUMENT_FILES) ~! + (($$$("(") ~! ($$$(IN) ~! (path ~ $$$(")"))) ^^ { case _ ~ (_ ~ (x ~ _)) => x } | success("document")) ~ rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) } command(SESSION) ~! (session_name ~ - ((keyword("(") ~! (rep1(name) <~ keyword(")")) ^^ { case _ ~ x => x }) | success(Nil)) ~ - ((keyword(IN) ~! path ^^ { case _ ~ x => x }) | success(".")) ~ - (keyword("=") ~! - (opt(session_name ~! keyword("+") ^^ { case x ~ _ => x }) ~ - ((keyword(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")) ~ - ((keyword(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~ + (($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil)) ~ + (($$$(IN) ~! path ^^ { case _ ~ x => x }) | success(".")) ~ + ($$$("=") ~! + (opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~ + (($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")) ~ + (($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~ rep1(theories) ~ - ((keyword(FILES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~ + (($$$(FILES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~ (rep(document_files) ^^ (x => x.flatten))))) ^^ { case pos ~ (a ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i))) => Session_Entry(pos, a, b, c, d, e, f, g, h, i) } diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Tools/find_consts.ML --- a/src/Pure/Tools/find_consts.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Tools/find_consts.ML Wed Nov 05 22:39:49 2014 +0100 @@ -140,10 +140,12 @@ val query = Scan.repeat ((Scan.option Parse.minus >> is_none) -- criterion); +val query_keywords = Keyword.add (":", NONE) Keyword.empty_keywords; + in fun read_query pos str = - Outer_Syntax.scan (Keyword.get_lexicons ()) pos str + Outer_Syntax.scan query_keywords pos str |> filter Token.is_proper |> Scan.error (Scan.finite Token.stopper (Parse.!!! (query --| Scan.ahead Parse.eof))) |> #1; diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Tools/find_theorems.ML --- a/src/Pure/Tools/find_theorems.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Tools/find_theorems.ML Wed Nov 05 22:39:49 2014 +0100 @@ -526,10 +526,12 @@ val query = Scan.repeat ((Scan.option Parse.minus >> is_none) -- criterion); +val query_keywords = Keyword.add (":", NONE) Keyword.empty_keywords; + in fun read_query pos str = - Outer_Syntax.scan (Keyword.get_lexicons ()) pos str + Outer_Syntax.scan query_keywords pos str |> filter Token.is_proper |> Scan.error (Scan.finite Token.stopper (Parse.!!! (query --| Scan.ahead Parse.eof))) |> #1; diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Pure/Tools/rail.ML --- a/src/Pure/Tools/rail.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Pure/Tools/rail.ML Wed Nov 05 22:39:49 2014 +0100 @@ -316,7 +316,7 @@ fun output_rules state rules = let - val output_antiq = Thy_Output.eval_antiq (#1 (Keyword.get_lexicons ())) state; + val output_antiq = Thy_Output.eval_antiq (Keyword.get_keywords ()) state; fun output_text b s = Output.output s |> b ? enclose "\\isakeyword{" "}" diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Tools/Code/code_target.ML --- a/src/Tools/Code/code_target.ML Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Tools/Code/code_target.ML Wed Nov 05 22:39:49 2014 +0100 @@ -695,7 +695,7 @@ let val ctxt = Proof_Context.init_global (Thy_Info.get_theory thyname); val parse = Scan.read Token.stopper (Parse.!!! code_exprP) o - (filter Token.is_proper o Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none); + (filter Token.is_proper o Outer_Syntax.scan (Keyword.get_keywords ()) Position.none); in case parse cmd_expr of SOME f => (writeln "Now generating code..."; f ctxt) | NONE => error ("Bad directive " ^ quote cmd_expr) diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Tools/jEdit/src/rendering.scala --- a/src/Tools/jEdit/src/rendering.scala Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Tools/jEdit/src/rendering.scala Wed Nov 05 22:39:49 2014 +0100 @@ -85,7 +85,7 @@ } def token_markup(syntax: Outer_Syntax, token: Token): Byte = - if (token.is_command) command_style(syntax.keyword_kind(token.content).getOrElse("")) + if (token.is_command) command_style(syntax.keywords.command_kind(token.content).getOrElse("")) else if (token.is_delimiter) JEditToken.OPERATOR else token_style(token.kind) diff -r 1ebf0a1f12a4 -r edcd9339bda1 src/Tools/jEdit/src/structure_matching.scala --- a/src/Tools/jEdit/src/structure_matching.scala Wed Nov 05 19:43:17 2014 +0100 +++ b/src/Tools/jEdit/src/structure_matching.scala Wed Nov 05 22:39:49 2014 +0100 @@ -44,6 +44,9 @@ case Some(syntax) => val limit = PIDE.options.value.int("jedit_structure_limit") max 0 + def is_command_kind(token: Token, pred: String => Boolean): Boolean = + syntax.keywords.is_command_kind(token, pred) + def iterator(line: Int, lim: Int = limit): Iterator[Text.Info[Token]] = Token_Markup.line_token_iterator(syntax, buffer, line, line + lim). filter(_.info.is_proper) @@ -60,45 +63,45 @@ iterator(caret_line, 1).find(info => info.range.touches(caret)) match { - case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.theory_goal) => + case Some(Text.Info(range1, tok)) if is_command_kind(tok, Keyword.theory_goal) => find_block( - syntax.command_kind(_, Keyword.proof_goal), - syntax.command_kind(_, Keyword.qed), - syntax.command_kind(_, Keyword.qed_global), + is_command_kind(_, Keyword.proof_goal), + is_command_kind(_, Keyword.qed), + is_command_kind(_, Keyword.qed_global), t => - syntax.command_kind(t, Keyword.diag) || - syntax.command_kind(t, Keyword.proof), + is_command_kind(t, Keyword.diag) || + is_command_kind(t, Keyword.proof), caret_iterator()) - case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.proof_goal) => + case Some(Text.Info(range1, tok)) if is_command_kind(tok, Keyword.proof_goal) => find_block( - syntax.command_kind(_, Keyword.proof_goal), - syntax.command_kind(_, Keyword.qed), + is_command_kind(_, Keyword.proof_goal), + is_command_kind(_, Keyword.qed), _ => false, t => - syntax.command_kind(t, Keyword.diag) || - syntax.command_kind(t, Keyword.proof), + is_command_kind(t, Keyword.diag) || + is_command_kind(t, Keyword.proof), caret_iterator()) - case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.qed_global) => - rev_caret_iterator().find(info => syntax.command_kind(info.info, Keyword.theory)) + case Some(Text.Info(range1, tok)) if is_command_kind(tok, Keyword.qed_global) => + rev_caret_iterator().find(info => is_command_kind(info.info, Keyword.theory)) match { case Some(Text.Info(range2, tok)) - if syntax.command_kind(tok, Keyword.theory_goal) => Some((range1, range2)) + if is_command_kind(tok, Keyword.theory_goal) => Some((range1, range2)) case _ => None } - case Some(Text.Info(range1, tok)) if syntax.command_kind(tok, Keyword.qed) => + case Some(Text.Info(range1, tok)) if is_command_kind(tok, Keyword.qed) => find_block( - syntax.command_kind(_, Keyword.qed), + is_command_kind(_, Keyword.qed), t => - syntax.command_kind(t, Keyword.proof_goal) || - syntax.command_kind(t, Keyword.theory_goal), + is_command_kind(t, Keyword.proof_goal) || + is_command_kind(t, Keyword.theory_goal), _ => false, t => - syntax.command_kind(t, Keyword.diag) || - syntax.command_kind(t, Keyword.proof) || - syntax.command_kind(t, Keyword.theory_goal), + is_command_kind(t, Keyword.diag) || + is_command_kind(t, Keyword.proof) || + is_command_kind(t, Keyword.theory_goal), rev_caret_iterator()) case Some(Text.Info(range1, tok)) if tok.is_begin => @@ -114,7 +117,7 @@ find(info => info.info.is_command || info.info.is_begin) match { case Some(Text.Info(range3, tok)) => - if (syntax.command_kind(tok, Keyword.theory_block)) Some((range1, range3)) + if (is_command_kind(tok, Keyword.theory_block)) Some((range1, range3)) else Some((range1, range2)) case None => None }