--- a/src/HOL/Fun_Def.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/HOL/Fun_Def.thy Mon Jul 11 11:16:10 2016 +0200
@@ -6,7 +6,9 @@
theory Fun_Def
imports Basic_BNF_LFPs Partial_Function SAT
-keywords "function" "termination" :: thy_goal and "fun" "fun_cases" :: thy_decl
+keywords
+ "function" "termination" :: thy_goal and
+ "fun" "fun_cases" :: thy_decl
begin
subsection \<open>Definitions with default value\<close>
--- a/src/HOL/HOLCF/Domain.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/HOL/HOLCF/Domain.thy Mon Jul 11 11:16:10 2016 +0200
@@ -7,8 +7,8 @@
theory Domain
imports Representable Domain_Aux
keywords
- "domaindef" :: thy_decl and "lazy" "unsafe" and
- "domain_isomorphism" "domain" :: thy_decl
+ "lazy" "unsafe" and
+ "domaindef" "domain_isomorphism" "domain" :: thy_decl
begin
default_sort "domain"
--- a/src/HOL/Library/Groups_Big_Fun.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/HOL/Library/Groups_Big_Fun.thy Mon Jul 11 11:16:10 2016 +0200
@@ -215,10 +215,8 @@
begin
sublocale Sum_any: comm_monoid_fun plus 0
-defines
- Sum_any = Sum_any.G
-rewrites
- "comm_monoid_set.F plus 0 = setsum"
+ defines Sum_any = Sum_any.G
+ rewrites "comm_monoid_set.F plus 0 = setsum"
proof -
show "comm_monoid_fun plus 0" ..
then interpret Sum_any: comm_monoid_fun plus 0 .
@@ -284,10 +282,8 @@
begin
sublocale Prod_any: comm_monoid_fun times 1
-defines
- Prod_any = Prod_any.G
-rewrites
- "comm_monoid_set.F times 1 = setprod"
+ defines Prod_any = Prod_any.G
+ rewrites "comm_monoid_set.F times 1 = setprod"
proof -
show "comm_monoid_fun times 1" ..
then interpret Prod_any: comm_monoid_fun times 1 .
--- a/src/HOL/Library/Perm.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/HOL/Library/Perm.thy Mon Jul 11 11:16:10 2016 +0200
@@ -20,7 +20,9 @@
typedef 'a perm = "{f :: 'a \<Rightarrow> 'a. bij f \<and> finite {a. f a \<noteq> a}}"
morphisms "apply" Perm
- by (rule exI [of _ id]) simp
+proof
+ show "id \<in> ?perm" by simp
+qed
setup_lifting type_definition_perm
--- a/src/HOL/Library/Polynomial.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/HOL/Library/Polynomial.thy Mon Jul 11 11:16:10 2016 +0200
@@ -55,7 +55,8 @@
subsection \<open>Definition of type \<open>poly\<close>\<close>
typedef (overloaded) 'a poly = "{f :: nat \<Rightarrow> 'a::zero. \<forall>\<^sub>\<infinity> n. f n = 0}"
- morphisms coeff Abs_poly by (auto intro!: ALL_MOST)
+ morphisms coeff Abs_poly
+ by (auto intro!: ALL_MOST)
setup_lifting type_definition_poly
--- a/src/HOL/Library/Refute.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/HOL/Library/Refute.thy Mon Jul 11 11:16:10 2016 +0200
@@ -9,7 +9,9 @@
theory Refute
imports Main
-keywords "refute" :: diag and "refute_params" :: thy_decl
+keywords
+ "refute" :: diag and
+ "refute_params" :: thy_decl
begin
ML_file "refute.ML"
--- a/src/HOL/Library/Saturated.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/HOL/Library/Saturated.thy Mon Jul 11 11:16:10 2016 +0200
@@ -215,8 +215,7 @@
end
interpretation Inf_sat: semilattice_neutr_set min "top :: 'a::len sat"
-rewrites
- "semilattice_neutr_set.F min (top :: 'a sat) = Inf"
+ rewrites "semilattice_neutr_set.F min (top :: 'a sat) = Inf"
proof -
show "semilattice_neutr_set min (top :: 'a sat)"
by standard (simp add: min_def)
@@ -225,8 +224,7 @@
qed
interpretation Sup_sat: semilattice_neutr_set max "bot :: 'a::len sat"
-rewrites
- "semilattice_neutr_set.F max (bot :: 'a sat) = Sup"
+ rewrites "semilattice_neutr_set.F max (bot :: 'a sat) = Sup"
proof -
show "semilattice_neutr_set max (bot :: 'a sat)"
by standard (simp add: max_def bot.extremum_unique)
--- a/src/HOL/Library/Simps_Case_Conv.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/HOL/Library/Simps_Case_Conv.thy Mon Jul 11 11:16:10 2016 +0200
@@ -3,8 +3,10 @@
*)
theory Simps_Case_Conv
- imports Main
- keywords "simps_of_case" :: thy_decl == "" and "case_of_simps" :: thy_decl
+imports Main
+keywords
+ "simps_of_case" :: thy_decl == "" and
+ "case_of_simps" :: thy_decl
begin
ML_file "simps_case_conv.ML"
--- a/src/HOL/Predicate_Compile.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/HOL/Predicate_Compile.thy Mon Jul 11 11:16:10 2016 +0200
@@ -6,7 +6,9 @@
theory Predicate_Compile
imports Random_Sequence Quickcheck_Exhaustive
-keywords "code_pred" :: thy_goal and "values" :: diag
+keywords
+ "code_pred" :: thy_goal and
+ "values" :: diag
begin
ML_file "Tools/Predicate_Compile/predicate_compile_aux.ML"
--- a/src/HOL/SPARK/SPARK_Setup.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/HOL/SPARK/SPARK_Setup.thy Mon Jul 11 11:16:10 2016 +0200
@@ -11,7 +11,8 @@
"spark_open_vcg" :: thy_load ("vcg", "fdl", "rls") and
"spark_open" :: thy_load ("siv", "fdl", "rls") and
"spark_proof_functions" "spark_types" "spark_end" :: thy_decl and
- "spark_vc" :: thy_goal and "spark_status" :: diag
+ "spark_vc" :: thy_goal and
+ "spark_status" :: diag
begin
ML_file "Tools/fdl_lexer.ML"
--- a/src/HOL/Sledgehammer.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/HOL/Sledgehammer.thy Mon Jul 11 11:16:10 2016 +0200
@@ -8,7 +8,9 @@
theory Sledgehammer
imports Presburger SMT
-keywords "sledgehammer" :: diag and "sledgehammer_params" :: thy_decl
+keywords
+ "sledgehammer" :: diag and
+ "sledgehammer_params" :: thy_decl
begin
lemma size_ne_size_imp_ne: "size x \<noteq> size y \<Longrightarrow> x \<noteq> y"
--- a/src/HOL/Typedef.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/HOL/Typedef.thy Mon Jul 11 11:16:10 2016 +0200
@@ -6,7 +6,9 @@
theory Typedef
imports Set
-keywords "typedef" :: thy_goal and "morphisms"
+keywords
+ "typedef" :: thy_goal and
+ "morphisms" :: quasi_command
begin
locale type_definition =
--- a/src/Pure/GUI/gui.scala Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Pure/GUI/gui.scala Mon Jul 11 11:16:10 2016 +0200
@@ -15,7 +15,6 @@
import javax.swing.{ImageIcon, JOptionPane, UIManager, JLayeredPane, JFrame, JWindow, JDialog,
JButton, JTextField}
-import scala.collection.convert.WrapAsJava
import scala.swing.{ComboBox, TextArea, ScrollPane}
import scala.swing.event.SelectionChanged
--- a/src/Pure/General/untyped.scala Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Pure/General/untyped.scala Mon Jul 11 11:16:10 2016 +0200
@@ -8,7 +8,7 @@
package isabelle
-import java.lang.reflect.Method
+import java.lang.reflect.{Method, Field}
object Untyped
@@ -30,20 +30,25 @@
}
}
+ def field(obj: AnyRef, x: String): Field =
+ {
+ val iterator =
+ for {
+ c <- classes(obj)
+ field <- c.getDeclaredFields.iterator
+ if field.getName == x
+ } yield {
+ field.setAccessible(true)
+ field
+ }
+ if (iterator.hasNext) iterator.next
+ else error("No field " + quote(x) + " for " + obj)
+ }
+
def get[A](obj: AnyRef, x: String): A =
if (obj == null) null.asInstanceOf[A]
- else {
- val iterator =
- for {
- c <- classes(obj)
- field <- c.getDeclaredFields.iterator
- if field.getName == x
- } yield {
- field.setAccessible(true)
- field.get(obj)
- }
- if (iterator.hasNext) iterator.next.asInstanceOf[A]
- else error("No field " + quote(x) + " for " + obj)
- }
+ else field(obj, x).get(obj).asInstanceOf[A]
+
+ def set[A](obj: AnyRef, x: String, y: A): Unit =
+ field(obj, x).set(obj, y)
}
-
--- a/src/Pure/Isar/keyword.ML Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Pure/Isar/keyword.ML Mon Jul 11 11:16:10 2016 +0200
@@ -32,14 +32,17 @@
val prf_script: string
val prf_script_goal: string
val prf_script_asm_goal: string
+ val quasi_command: string
type spec = (string * string list) * string list
+ val no_spec: spec
+ val quasi_command_spec: spec
type keywords
val minor_keywords: keywords -> Scan.lexicon
val major_keywords: keywords -> Scan.lexicon
val no_command_keywords: keywords -> keywords
val empty_keywords: keywords
val merge_keywords: keywords * keywords -> keywords
- val add_keywords: ((string * Position.T) * spec option) list -> keywords -> keywords
+ val add_keywords: ((string * Position.T) * spec) list -> keywords -> keywords
val is_keyword: keywords -> string -> bool
val is_command: keywords -> string -> bool
val is_literal: keywords -> string -> bool
@@ -104,8 +107,9 @@
val prf_script = "prf_script";
val prf_script_goal = "prf_script_goal";
val prf_script_asm_goal = "prf_script_asm_goal";
+val quasi_command = "quasi_command";
-val kinds =
+val command_kinds =
[diag, document_heading, document_body, document_raw, thy_begin, thy_end, thy_load, thy_decl,
thy_decl_block, thy_goal, qed, qed_script, qed_block, qed_global, prf_goal, prf_block,
next_block, prf_open, prf_close, prf_chain, prf_decl, prf_asm, prf_asm_goal, prf_script,
@@ -116,6 +120,9 @@
type spec = (string * string list) * string list;
+val no_spec: spec = (("", []), []);
+val quasi_command_spec: spec = ((quasi_command, []), []);
+
type entry =
{pos: Position.T,
id: serial,
@@ -124,14 +131,13 @@
tags: string list};
fun check_spec pos ((kind, files), tags) : entry =
- if not (member (op =) kinds kind) then
+ if not (member (op =) command_kinds kind) then
error ("Unknown outer syntax keyword kind " ^ quote kind)
else if not (null files) andalso kind <> thy_load then
error ("Illegal specification of files for " ^ quote kind)
else {pos = pos, id = serial (), kind = kind, files = files, tags = tags};
-
(** keyword tables **)
(* type keywords *)
@@ -168,18 +174,17 @@
Symtab.merge (K true) (commands1, commands2));
val add_keywords =
- fold (fn ((name, pos), opt_spec) => map_keywords (fn (minor, major, commands) =>
- (case opt_spec of
- NONE =>
- let
- val minor' = Scan.extend_lexicon (Symbol.explode name) minor;
- in (minor', major, commands) end
- | SOME spec =>
- let
- val entry = check_spec pos spec;
- val major' = Scan.extend_lexicon (Symbol.explode name) major;
- val commands' = Symtab.update (name, entry) commands;
- in (minor, major', commands') end)));
+ fold (fn ((name, pos), spec as ((kind, _), _)) => map_keywords (fn (minor, major, commands) =>
+ if kind = "" orelse kind = quasi_command then
+ let
+ val minor' = Scan.extend_lexicon (Symbol.explode name) minor;
+ in (minor', major, commands) end
+ else
+ let
+ val entry = check_spec pos spec;
+ val major' = Scan.extend_lexicon (Symbol.explode name) major;
+ val commands' = Symtab.update (name, entry) commands;
+ in (minor, major', commands') end));
(* keyword status *)
--- a/src/Pure/Isar/keyword.scala Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Pure/Isar/keyword.scala Mon Jul 11 11:16:10 2016 +0200
@@ -39,6 +39,7 @@
val PRF_SCRIPT = "prf_script"
val PRF_SCRIPT_GOAL = "prf_script_goal"
val PRF_SCRIPT_ASM_GOAL = "prf_script_asm_goal"
+ val QUASI_COMMAND = "quasi_command"
/* command categories */
@@ -80,6 +81,7 @@
val proof_open = proof_goal + PRF_OPEN
val proof_close = qed + PRF_CLOSE
+ val proof_enclose = Set(PRF_BLOCK, NEXT_BLOCK, QED_BLOCK, PRF_CLOSE)
@@ -87,6 +89,9 @@
type Spec = ((String, List[String]), List[String])
+ val no_spec: Spec = (("", Nil), Nil)
+ val quasi_command_spec: Spec = ((QUASI_COMMAND, Nil), Nil)
+
object Keywords
{
def empty: Keywords = new Keywords()
@@ -95,17 +100,22 @@
class Keywords private(
val minor: Scan.Lexicon = Scan.Lexicon.empty,
val major: Scan.Lexicon = Scan.Lexicon.empty,
+ protected val quasi_commands: Set[String] = Set.empty,
protected val commands: Map[String, (String, List[String])] = Map.empty)
{
override def toString: String =
{
- val keywords1 = minor.iterator.map(quote(_)).toList
+ val keywords1 =
+ for (name <- minor.iterator.toList) yield {
+ if (quasi_commands.contains(name)) (name, quote(name) + " :: \"quasi_command\"")
+ else (name, quote(name))
+ }
val keywords2 =
- for ((name, (kind, files)) <- commands.toList.sortBy(_._1)) yield {
- quote(name) + " :: " + quote(kind) +
- (if (files.isEmpty) "" else " (" + commas_quote(files) + ")")
+ for ((name, (kind, files)) <- commands.toList) yield {
+ (name, quote(name) + " :: " + quote(kind) +
+ (if (files.isEmpty) "" else " (" + commas_quote(files) + ")"))
}
- (keywords1 ::: keywords2).mkString("keywords\n ", " and\n ", "")
+ (keywords1 ::: keywords2).sortBy(_._1).map(_._2).mkString("keywords\n ", " and\n ", "")
}
@@ -119,31 +129,32 @@
else {
val minor1 = minor ++ other.minor
val major1 = major ++ other.major
+ val quasi_commands1 = quasi_commands ++ other.quasi_commands
val commands1 =
if (commands eq other.commands) commands
else if (commands.isEmpty) other.commands
else (commands /: other.commands) { case (m, e) => if (m.isDefinedAt(e._1)) m else m + e }
- new Keywords(minor1, major1, commands1)
+ new Keywords(minor1, major1, quasi_commands1, commands1)
}
/* add keywords */
- def + (name: String): Keywords = new Keywords(minor + name, major, commands)
- def + (name: String, kind: String): Keywords = this + (name, (kind, Nil))
- def + (name: String, kind_tags: (String, List[String])): Keywords =
- {
- val major1 = major + name
- val commands1 = commands + (name -> kind_tags)
- new Keywords(minor, major1, commands1)
- }
+ def + (name: String, kind: String = "", tags: List[String] = Nil): Keywords =
+ if (kind == "") new Keywords(minor + name, major, quasi_commands, commands)
+ else if (kind == QUASI_COMMAND)
+ new Keywords(minor + name, major, quasi_commands + name, commands)
+ else {
+ val major1 = major + name
+ val commands1 = commands + (name -> (kind, tags))
+ new Keywords(minor, major1, quasi_commands, commands1)
+ }
def add_keywords(header: Thy_Header.Keywords): Keywords =
(this /: header) {
- case (keywords, (name, None, _)) =>
- keywords + Symbol.decode(name) + Symbol.encode(name)
- case (keywords, (name, Some((kind_tags, _)), _)) =>
- keywords + (Symbol.decode(name), kind_tags) + (Symbol.encode(name), kind_tags)
+ case (keywords, (name, ((kind, tags), _), _)) =>
+ if (kind == "") keywords + Symbol.decode(name) + Symbol.encode(name)
+ else keywords + (Symbol.decode(name), kind, tags) + (Symbol.encode(name), kind, tags)
}
@@ -151,11 +162,12 @@
def command_kind(name: String): Option[String] = commands.get(name).map(_._1)
- def is_command_kind(name: String, pred: String => Boolean): Boolean =
- command_kind(name) match {
- case Some(kind) => pred(kind)
- case None => false
- }
+ def is_command(token: Token, check_kind: String => Boolean): Boolean =
+ token.is_command &&
+ (command_kind(token.source) match { case Some(k) => check_kind(k) case None => false })
+
+ def is_quasi_command(token: Token): Boolean =
+ token.is_keyword && quasi_commands.contains(token.source)
/* load commands */
--- a/src/Pure/Isar/outer_syntax.scala Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Pure/Isar/outer_syntax.scala Mon Jul 11 11:16:10 2016 +0200
@@ -85,16 +85,10 @@
/* add keywords */
- def + (name: String): Outer_Syntax = this + (name, None, None)
- def + (name: String, kind: String): Outer_Syntax = this + (name, Some((kind, Nil)), None)
- def + (name: String, opt_kind: Option[(String, List[String])], replace: Option[String])
+ def + (name: String, kind: String = "", tags: List[String] = Nil, replace: Option[String] = None)
: Outer_Syntax =
{
- val keywords1 =
- opt_kind match {
- case None => keywords + name
- case Some(kind) => keywords + (name, kind)
- }
+ val keywords1 = keywords + (name, kind, tags)
val completion1 =
if (replace == Some("")) completion
else completion + (name, replace getOrElse name)
@@ -103,11 +97,10 @@
def add_keywords(keywords: Thy_Header.Keywords): Outer_Syntax =
(this /: keywords) {
- case (syntax, (name, opt_spec, replace)) =>
- val opt_kind = opt_spec.map(_._1)
+ case (syntax, (name, ((kind, tags), _), replace)) =>
syntax +
- (Symbol.decode(name), opt_kind, replace) +
- (Symbol.encode(name), opt_kind, replace)
+ (Symbol.decode(name), kind, tags, replace) +
+ (Symbol.encode(name), kind, tags, replace)
}
@@ -156,7 +149,7 @@
val command1 = tokens.exists(_.is_command)
val depth1 =
- if (tokens.exists(tok => tok.is_command_kind(keywords, Keyword.theory))) 0
+ if (tokens.exists(keywords.is_command(_, Keyword.theory))) 0
else if (command1) structure.after_span_depth
else structure.span_depth
@@ -164,13 +157,13 @@
((structure.span_depth, structure.after_span_depth) /: tokens) {
case ((x, y), tok) =>
if (tok.is_command) {
- if (tok.is_command_kind(keywords, Keyword.theory_goal)) (2, 1)
- else if (tok.is_command_kind(keywords, Keyword.theory)) (1, 0)
- else if (tok.is_command_kind(keywords, Keyword.proof_open)) (y + 2, y + 1)
- else if (tok.is_command_kind(keywords, Keyword.PRF_BLOCK == _)) (y + 2, y + 1)
- else if (tok.is_command_kind(keywords, Keyword.QED_BLOCK == _)) (y + 1, y - 2)
- else if (tok.is_command_kind(keywords, Keyword.proof_close)) (y + 1, y - 1)
- else if (tok.is_command_kind(keywords, Keyword.qed_global)) (1, 0)
+ if (keywords.is_command(tok, Keyword.theory_goal)) (2, 1)
+ else if (keywords.is_command(tok, Keyword.theory)) (1, 0)
+ else if (keywords.is_command(tok, Keyword.proof_open)) (y + 2, y + 1)
+ else if (keywords.is_command(tok, Keyword.PRF_BLOCK == _)) (y + 2, y + 1)
+ else if (keywords.is_command(tok, Keyword.QED_BLOCK == _)) (y + 1, y - 2)
+ else if (keywords.is_command(tok, Keyword.proof_close)) (y + 1, y - 1)
+ else if (keywords.is_command(tok, Keyword.qed_global)) (1, 0)
else (x, y)
}
else (x, y)
--- a/src/Pure/Isar/token.scala Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Pure/Isar/token.scala Mon Jul 11 11:16:10 2016 +0200
@@ -225,8 +225,6 @@
sealed case class Token(kind: Token.Kind.Value, source: String)
{
def is_command: Boolean = kind == Token.Kind.COMMAND
- def is_command_kind(keywords: Keyword.Keywords, pred: String => Boolean): Boolean =
- is_command && keywords.is_command_kind(source, pred)
def is_keyword: Boolean = kind == Token.Kind.KEYWORD
def is_delimiter: Boolean = is_keyword && !Symbol.is_ascii_identifier(source)
def is_ident: Boolean = kind == Token.Kind.IDENT
--- a/src/Pure/PIDE/protocol.ML Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Pure/PIDE/protocol.ML Mon Jul 11 11:16:10 2016 +0200
@@ -72,7 +72,7 @@
val (master, (name, (imports, (keywords, errors)))) =
pair string (pair string (pair (list string)
(pair (list (pair string
- (option (pair (pair string (list string)) (list string)))))
+ (pair (pair string (list string)) (list string))))
(list YXML.string_of_body)))) a;
val imports' = map (rpair Position.none) imports;
val keywords' = map (fn (x, y) => ((x, Position.none), y)) keywords;
--- a/src/Pure/PIDE/protocol.scala Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Pure/PIDE/protocol.scala Mon Jul 11 11:16:10 2016 +0200
@@ -378,7 +378,7 @@
(Nil,
pair(Encode.string, pair(Encode.string, pair(list(Encode.string),
pair(list(pair(Encode.string,
- option(pair(pair(Encode.string, list(Encode.string)), list(Encode.string))))),
+ pair(pair(Encode.string, list(Encode.string)), list(Encode.string)))),
list(Encode.string)))))(
(master_dir, (theory, (imports, (keywords, header.errors)))))) },
{ case Document.Node.Perspective(a, b, c) =>
--- a/src/Pure/Pure.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Pure/Pure.thy Mon Jul 11 11:16:10 2016 +0200
@@ -6,13 +6,12 @@
theory Pure
keywords
- "!!" "!" "+" "--" ":" ";" "<" "<=" "=" "=>" "?" "[" "\<comment>" "\<equiv>"
- "\<leftharpoondown>" "\<rightharpoonup>" "\<rightleftharpoons>"
- "\<subseteq>" "]" "assumes" "attach" "binder" "constrains"
- "defines" "rewrites" "fixes" "for" "if" "in" "includes" "infix"
- "infixl" "infixr" "is" "notes" "obtains" "open" "output"
- "overloaded" "pervasive" "premises" "private" "qualified" "rewrites"
- "shows" "structure" "unchecked" "where" "when" "|"
+ "!!" "!" "+" "--" ":" ";" "<" "<=" "=" "=>" "?" "[" "\<comment>" "\<equiv>" "\<leftharpoondown>" "\<rightharpoonup>" "\<rightleftharpoons>"
+ "\<subseteq>" "]" "attach" "binder" "for" "if" "in" "infix" "infixl" "infixr" "is"
+ "open" "output" "overloaded" "pervasive" "premises" "private" "qualified"
+ "structure" "unchecked" "where" "when" "|"
+ and "assumes" "constrains" "defines" "fixes" "includes" "notes" "rewrites"
+ "obtains" "shows" :: quasi_command
and "text" "txt" :: document_body
and "text_raw" :: document_raw
and "default_sort" :: thy_decl == ""
--- a/src/Pure/Thy/thy_header.ML Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Pure/Thy/thy_header.ML Mon Jul 11 11:16:10 2016 +0200
@@ -6,7 +6,7 @@
signature THY_HEADER =
sig
- type keywords = ((string * Position.T) * Keyword.spec option) list
+ type keywords = ((string * Position.T) * Keyword.spec) list
type header =
{name: string * Position.T,
imports: (string * Position.T) list,
@@ -32,7 +32,7 @@
(* header *)
-type keywords = ((string * Position.T) * Keyword.spec option) list;
+type keywords = ((string * Position.T) * Keyword.spec) list;
type header =
{name: string * Position.T,
@@ -63,27 +63,27 @@
val bootstrap_keywords =
Keyword.empty_keywords
|> Keyword.add_keywords
- [(("%", @{here}), NONE),
- (("(", @{here}), NONE),
- ((")", @{here}), NONE),
- ((",", @{here}), NONE),
- (("::", @{here}), NONE),
- (("==", @{here}), NONE),
- (("and", @{here}), NONE),
- ((beginN, @{here}), NONE),
- ((importsN, @{here}), NONE),
- ((keywordsN, @{here}), NONE),
- ((chapterN, @{here}), SOME ((Keyword.document_heading, []), [])),
- ((sectionN, @{here}), SOME ((Keyword.document_heading, []), [])),
- ((subsectionN, @{here}), SOME ((Keyword.document_heading, []), [])),
- ((subsubsectionN, @{here}), SOME ((Keyword.document_heading, []), [])),
- ((paragraphN, @{here}), SOME ((Keyword.document_heading, []), [])),
- ((subparagraphN, @{here}), SOME ((Keyword.document_heading, []), [])),
- ((textN, @{here}), SOME ((Keyword.document_body, []), [])),
- ((txtN, @{here}), SOME ((Keyword.document_body, []), [])),
- ((text_rawN, @{here}), SOME ((Keyword.document_raw, []), [])),
- ((theoryN, @{here}), SOME ((Keyword.thy_begin, []), ["theory"])),
- (("ML", @{here}), SOME ((Keyword.thy_decl, []), ["ML"]))];
+ [(("%", @{here}), Keyword.no_spec),
+ (("(", @{here}), Keyword.no_spec),
+ ((")", @{here}), Keyword.no_spec),
+ ((",", @{here}), Keyword.no_spec),
+ (("::", @{here}), Keyword.no_spec),
+ (("==", @{here}), Keyword.no_spec),
+ (("and", @{here}), Keyword.no_spec),
+ ((beginN, @{here}), Keyword.no_spec),
+ ((importsN, @{here}), Keyword.no_spec),
+ ((keywordsN, @{here}), Keyword.no_spec),
+ ((chapterN, @{here}), ((Keyword.document_heading, []), [])),
+ ((sectionN, @{here}), ((Keyword.document_heading, []), [])),
+ ((subsectionN, @{here}), ((Keyword.document_heading, []), [])),
+ ((subsubsectionN, @{here}), ((Keyword.document_heading, []), [])),
+ ((paragraphN, @{here}), ((Keyword.document_heading, []), [])),
+ ((subparagraphN, @{here}), ((Keyword.document_heading, []), [])),
+ ((textN, @{here}), ((Keyword.document_body, []), [])),
+ ((txtN, @{here}), ((Keyword.document_body, []), [])),
+ ((text_rawN, @{here}), ((Keyword.document_raw, []), [])),
+ ((theoryN, @{here}), ((Keyword.thy_begin, []), ["theory"])),
+ (("ML", @{here}), ((Keyword.thy_decl, []), ["ML"]))];
(* theory data *)
@@ -133,7 +133,7 @@
val keyword_decl =
Scan.repeat1 (Parse.position Parse.string) --
- Scan.option (Parse.$$$ "::" |-- Parse.!!! keyword_spec) --
+ Scan.optional (Parse.$$$ "::" |-- Parse.!!! keyword_spec) Keyword.no_spec --
Scan.option (Parse.$$$ "==" |-- Parse.!!! keyword_compl)
>> (fn ((names, spec), _) => map (rpair spec) names);
--- a/src/Pure/Thy/thy_header.scala Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Pure/Thy/thy_header.scala Mon Jul 11 11:16:10 2016 +0200
@@ -17,7 +17,7 @@
{
/* bootstrap keywords */
- type Keywords = List[(String, Option[Keyword.Spec], Option[String])]
+ type Keywords = List[(String, Keyword.Spec, Option[String])]
val CHAPTER = "chapter"
val SECTION = "section"
@@ -37,27 +37,27 @@
private val bootstrap_header: Keywords =
List(
- ("%", None, None),
- ("(", None, None),
- (")", None, None),
- (",", None, None),
- ("::", None, None),
- ("==", None, None),
- (AND, None, None),
- (BEGIN, None, None),
- (IMPORTS, None, None),
- (KEYWORDS, None, None),
- (CHAPTER, Some(((Keyword.DOCUMENT_HEADING, Nil), Nil)), None),
- (SECTION, Some(((Keyword.DOCUMENT_HEADING, Nil), Nil)), None),
- (SUBSECTION, Some(((Keyword.DOCUMENT_HEADING, Nil), Nil)), None),
- (SUBSUBSECTION, Some(((Keyword.DOCUMENT_HEADING, Nil), Nil)), None),
- (PARAGRAPH, Some(((Keyword.DOCUMENT_HEADING, Nil), Nil)), None),
- (SUBPARAGRAPH, Some(((Keyword.DOCUMENT_HEADING, Nil), Nil)), None),
- (TEXT, Some(((Keyword.DOCUMENT_BODY, Nil), Nil)), None),
- (TXT, Some(((Keyword.DOCUMENT_BODY, Nil), Nil)), None),
- (TEXT_RAW, Some(((Keyword.DOCUMENT_RAW, Nil), Nil)), None),
- (THEORY, Some((Keyword.THY_BEGIN, Nil), List("theory")), None),
- ("ML", Some((Keyword.THY_DECL, Nil), List("ML")), None))
+ ("%", Keyword.no_spec, None),
+ ("(", Keyword.no_spec, None),
+ (")", Keyword.no_spec, None),
+ (",", Keyword.no_spec, None),
+ ("::", Keyword.no_spec, None),
+ ("==", Keyword.no_spec, None),
+ (AND, Keyword.no_spec, None),
+ (BEGIN, Keyword.no_spec, None),
+ (IMPORTS, Keyword.no_spec, None),
+ (KEYWORDS, Keyword.no_spec, None),
+ (CHAPTER, (((Keyword.DOCUMENT_HEADING, Nil), Nil)), None),
+ (SECTION, (((Keyword.DOCUMENT_HEADING, Nil), Nil)), None),
+ (SUBSECTION, (((Keyword.DOCUMENT_HEADING, Nil), Nil)), None),
+ (SUBSUBSECTION, (((Keyword.DOCUMENT_HEADING, Nil), Nil)), None),
+ (PARAGRAPH, (((Keyword.DOCUMENT_HEADING, Nil), Nil)), None),
+ (SUBPARAGRAPH, (((Keyword.DOCUMENT_HEADING, Nil), Nil)), None),
+ (TEXT, (((Keyword.DOCUMENT_BODY, Nil), Nil)), None),
+ (TXT, (((Keyword.DOCUMENT_BODY, Nil), Nil)), None),
+ (TEXT_RAW, (((Keyword.DOCUMENT_RAW, Nil), Nil)), None),
+ (THEORY, ((Keyword.THY_BEGIN, Nil), List("theory")), None),
+ ("ML", ((Keyword.THY_DECL, Nil), List("ML")), None))
private val bootstrap_keywords =
Keyword.Keywords.empty.add_keywords(bootstrap_header)
@@ -108,7 +108,7 @@
rep1(string) ~
opt($$$("::") ~! keyword_spec ^^ { case _ ~ x => x }) ~
opt($$$("==") ~! name ^^ { case _ ~ x => x }) ^^
- { case xs ~ y ~ z => xs.map((_, y, z)) }
+ { case xs ~ y ~ z => xs.map((_, y.getOrElse(Keyword.no_spec), z)) }
val keyword_decls =
keyword_decl ~ rep($$$(AND) ~! keyword_decl ^^ { case _ ~ x => x }) ^^
@@ -177,7 +177,7 @@
{
val f = Symbol.decode _
Thy_Header((f(name._1), name._2), imports.map({ case (a, b) => (f(a), b) }),
- keywords.map({ case (a, b, c) =>
- (f(a), b.map({ case ((x, y), z) => ((f(x), y.map(f)), z.map(f)) }), c.map(f)) }))
+ keywords.map({ case (a, ((x, y), z), c) =>
+ (f(a), ((f(x), y.map(f)), z.map(f)), c.map(f)) }))
}
}
--- a/src/Pure/Tools/find_consts.ML Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Pure/Tools/find_consts.ML Mon Jul 11 11:16:10 2016 +0200
@@ -140,7 +140,8 @@
Parse.reserved "strict" |-- Parse.!!! (Parse.$$$ ":" |-- Parse.typ) >> Strict ||
Parse.typ >> Loose;
-val query_keywords = Keyword.add_keywords [((":", @{here}), NONE)] Keyword.empty_keywords;
+val query_keywords =
+ Keyword.add_keywords [((":", @{here}), Keyword.no_spec)] Keyword.empty_keywords;
in
--- a/src/Pure/Tools/find_theorems.ML Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Pure/Tools/find_theorems.ML Mon Jul 11 11:16:10 2016 +0200
@@ -521,7 +521,8 @@
Parse.reserved "simp" |-- Parse.!!! (Parse.$$$ ":" |-- Parse.term) >> Simp ||
Parse.term >> Pattern;
-val query_keywords = Keyword.add_keywords [((":", @{here}), NONE)] Keyword.empty_keywords;
+val query_keywords =
+ Keyword.add_keywords [((":", @{here}), Keyword.no_spec)] Keyword.empty_keywords;
in
--- a/src/Tools/Adhoc_Overloading.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Tools/Adhoc_Overloading.thy Mon Jul 11 11:16:10 2016 +0200
@@ -6,7 +6,8 @@
theory Adhoc_Overloading
imports Pure
-keywords "adhoc_overloading" :: thy_decl and "no_adhoc_overloading" :: thy_decl
+keywords
+ "adhoc_overloading" "no_adhoc_overloading" :: thy_decl
begin
ML_file "adhoc_overloading.ML"
--- a/src/Tools/Code_Generator.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Tools/Code_Generator.thy Mon Jul 11 11:16:10 2016 +0200
@@ -10,8 +10,10 @@
"print_codeproc" "code_thms" "code_deps" :: diag and
"export_code" "code_identifier" "code_printing" "code_reserved"
"code_monad" "code_reflect" :: thy_decl and
- "datatypes" "functions" "module_name" "file" "checking"
- "constant" "type_constructor" "type_class" "class_relation" "class_instance" "code_module"
+ "checking" and
+ "datatypes" "functions" "module_name" "file"
+ "constant" "type_constructor" "type_class" "class_relation" "class_instance" "code_module"
+ :: quasi_command
begin
ML_file "~~/src/Tools/cache_io.ML"
--- a/src/Tools/jEdit/lib/Tools/jedit Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Tools/jEdit/lib/Tools/jedit Mon Jul 11 11:16:10 2016 +0200
@@ -61,10 +61,10 @@
"src/sledgehammer_dockable.scala"
"src/spell_checker.scala"
"src/state_dockable.scala"
- "src/structure_matching.scala"
"src/symbols_dockable.scala"
"src/syslog_dockable.scala"
"src/text_overview.scala"
+ "src/text_structure.scala"
"src/theories_dockable.scala"
"src/timing_dockable.scala"
"src/token_markup.scala"
--- a/src/Tools/jEdit/src/isabelle.scala Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Tools/jEdit/src/isabelle.scala Mon Jul 11 11:16:10 2016 +0200
@@ -18,6 +18,7 @@
import org.gjt.sp.jedit.buffer.JEditBuffer
import org.gjt.sp.jedit.textarea.{JEditTextArea, StructureMatcher, Selection}
import org.gjt.sp.jedit.syntax.TokenMarker
+import org.gjt.sp.jedit.indent.IndentRule
import org.gjt.sp.jedit.gui.{DockableWindowManager, CompleteWord}
import org.jedit.options.CombinedOptions
@@ -83,10 +84,13 @@
}
- /* structure matchers */
+ /* text structure */
- def structure_matchers(name: String): List[StructureMatcher] =
- if (name == "isabelle") List(Structure_Matching.Isabelle_Matcher) else Nil
+ def indent_rule(mode: String): Option[IndentRule] =
+ if (mode == "isabelle") Some(Text_Structure.Indent_Rule) else None
+
+ def structure_matchers(mode: String): List[StructureMatcher] =
+ if (mode == "isabelle") List(Text_Structure.Matcher) else Nil
/* dockable windows */
--- a/src/Tools/jEdit/src/jedit_lib.scala Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Tools/jEdit/src/jedit_lib.scala Mon Jul 11 11:16:10 2016 +0200
@@ -13,7 +13,6 @@
import java.awt.event.{InputEvent, KeyEvent, KeyListener}
import javax.swing.{Icon, ImageIcon, JWindow, SwingUtilities}
-import scala.annotation.tailrec
import scala.util.parsing.input.CharSequenceReader
import org.gjt.sp.jedit.{jEdit, Buffer, View, GUIUtilities, Debug, EditPane}
--- a/src/Tools/jEdit/src/structure_matching.scala Sun Jul 10 22:05:18 2016 +0200
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,167 +0,0 @@
-/* Title: Tools/jEdit/src/structure_matching.scala
- Author: Makarius
-
-Structure matcher for Isabelle/Isar outer syntax.
-*/
-
-package isabelle.jedit
-
-
-import isabelle._
-
-import org.gjt.sp.jedit.textarea.{TextArea, StructureMatcher, Selection}
-
-
-object Structure_Matching
-{
- object Isabelle_Matcher extends StructureMatcher
- {
- private def find_block(
- open: Token => Boolean,
- close: Token => Boolean,
- reset: Token => Boolean,
- restrict: Token => Boolean,
- it: Iterator[Text.Info[Token]]): Option[(Text.Range, Text.Range)] =
- {
- val range1 = it.next.range
- it.takeWhile(info => !info.info.is_command || restrict(info.info)).
- scanLeft((range1, 1))(
- { case ((r, d), Text.Info(range, tok)) =>
- if (open(tok)) (range, d + 1)
- else if (close(tok)) (range, d - 1)
- else if (reset(tok)) (range, 0)
- else (r, d) }
- ).collectFirst({ case (range2, 0) => (range1, range2) })
- }
-
- private def find_pair(text_area: TextArea): Option[(Text.Range, Text.Range)] =
- {
- val buffer = text_area.getBuffer
- val caret_line = text_area.getCaretLine
- val caret = text_area.getCaretPosition
-
- Isabelle.buffer_syntax(text_area.getBuffer) match {
- case Some(syntax) =>
- val limit = PIDE.options.value.int("jedit_structure_limit") max 0
-
- def is_command_kind(token: Token, pred: String => Boolean): Boolean =
- token.is_command_kind(syntax.keywords, pred)
-
- def iterator(line: Int, lim: Int = limit): Iterator[Text.Info[Token]] =
- Token_Markup.line_token_iterator(syntax, buffer, line, line + lim).
- filter(_.info.is_proper)
-
- def rev_iterator(line: Int, lim: Int = limit): Iterator[Text.Info[Token]] =
- Token_Markup.line_token_reverse_iterator(syntax, buffer, line, line - lim).
- filter(_.info.is_proper)
-
- def caret_iterator(): Iterator[Text.Info[Token]] =
- iterator(caret_line).dropWhile(info => !info.range.touches(caret))
-
- def rev_caret_iterator(): Iterator[Text.Info[Token]] =
- rev_iterator(caret_line).dropWhile(info => !info.range.touches(caret))
-
- iterator(caret_line, 1).find(info => info.range.touches(caret))
- match {
- case Some(Text.Info(range1, tok)) if is_command_kind(tok, Keyword.theory_goal) =>
- find_block(
- is_command_kind(_, Keyword.proof_goal),
- is_command_kind(_, Keyword.qed),
- is_command_kind(_, Keyword.qed_global),
- t =>
- is_command_kind(t, Keyword.diag) ||
- is_command_kind(t, Keyword.proof),
- caret_iterator())
-
- case Some(Text.Info(range1, tok)) if is_command_kind(tok, Keyword.proof_goal) =>
- find_block(
- is_command_kind(_, Keyword.proof_goal),
- is_command_kind(_, Keyword.qed),
- _ => false,
- t =>
- is_command_kind(t, Keyword.diag) ||
- is_command_kind(t, Keyword.proof),
- caret_iterator())
-
- case Some(Text.Info(range1, tok)) if is_command_kind(tok, Keyword.qed_global) =>
- rev_caret_iterator().find(info => is_command_kind(info.info, Keyword.theory))
- match {
- case Some(Text.Info(range2, tok))
- if is_command_kind(tok, Keyword.theory_goal) => Some((range1, range2))
- case _ => None
- }
-
- case Some(Text.Info(range1, tok)) if is_command_kind(tok, Keyword.qed) =>
- find_block(
- is_command_kind(_, Keyword.qed),
- t =>
- is_command_kind(t, Keyword.proof_goal) ||
- is_command_kind(t, Keyword.theory_goal),
- _ => false,
- t =>
- is_command_kind(t, Keyword.diag) ||
- is_command_kind(t, Keyword.proof) ||
- is_command_kind(t, Keyword.theory_goal),
- rev_caret_iterator())
-
- case Some(Text.Info(range1, tok)) if tok.is_begin =>
- find_block(_.is_begin, _.is_end, _ => false, _ => true, caret_iterator())
-
- case Some(Text.Info(range1, tok)) if tok.is_end =>
- find_block(_.is_end, _.is_begin, _ => false, _ => true, rev_caret_iterator())
- match {
- case Some((_, range2)) =>
- rev_caret_iterator().
- dropWhile(info => info.range != range2).
- dropWhile(info => info.range == range2).
- find(info => info.info.is_command || info.info.is_begin)
- match {
- case Some(Text.Info(range3, tok)) =>
- if (is_command_kind(tok, Keyword.theory_block)) Some((range1, range3))
- else Some((range1, range2))
- case None => None
- }
- case None => None
- }
-
- case _ => None
- }
- case None => None
- }
- }
-
- def getMatch(text_area: TextArea): StructureMatcher.Match =
- find_pair(text_area) match {
- case Some((_, range)) =>
- val line = text_area.getBuffer.getLineOfOffset(range.start)
- new StructureMatcher.Match(Structure_Matching.Isabelle_Matcher,
- line, range.start, line, range.stop)
- case None => null
- }
-
- def selectMatch(text_area: TextArea)
- {
- def get_span(offset: Text.Offset): Option[Text.Range] =
- for {
- syntax <- Isabelle.buffer_syntax(text_area.getBuffer)
- span <- Token_Markup.command_span(syntax, text_area.getBuffer, offset)
- } yield span.range
-
- find_pair(text_area) match {
- case Some((r1, r2)) =>
- (get_span(r1.start), get_span(r2.start)) match {
- case (Some(range1), Some(range2)) =>
- val start = range1.start min range2.start
- val stop = range1.stop max range2.stop
-
- text_area.moveCaretPosition(stop, false)
- if (!text_area.isMultipleSelectionEnabled) text_area.selectNone
- text_area.addToSelection(new Selection.Range(start, stop))
- case _ =>
- }
- case None =>
- }
- }
- }
-}
-
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/jEdit/src/text_structure.scala Mon Jul 11 11:16:10 2016 +0200
@@ -0,0 +1,272 @@
+/* Title: Tools/jEdit/src/text_structure.scala
+ Author: Makarius
+
+Text structure based on Isabelle/Isar outer syntax.
+*/
+
+package isabelle.jedit
+
+
+import isabelle._
+
+import org.gjt.sp.jedit.indent.{IndentRule, IndentAction}
+import org.gjt.sp.jedit.textarea.{TextArea, StructureMatcher, Selection}
+import org.gjt.sp.jedit.buffer.JEditBuffer
+import org.gjt.sp.jedit.Buffer
+
+
+object Text_Structure
+{
+ /* token navigator */
+
+ class Navigator(syntax: Outer_Syntax, buffer: Buffer)
+ {
+ val limit = PIDE.options.value.int("jedit_structure_limit") max 0
+
+ def iterator(line: Int, lim: Int = limit): Iterator[Text.Info[Token]] =
+ Token_Markup.line_token_iterator(syntax, buffer, line, line + lim).
+ filter(_.info.is_proper)
+
+ def reverse_iterator(line: Int, lim: Int = limit): Iterator[Text.Info[Token]] =
+ Token_Markup.line_token_reverse_iterator(syntax, buffer, line, line - lim).
+ filter(_.info.is_proper)
+ }
+
+
+ /* indentation */
+
+ object Indent_Rule extends IndentRule
+ {
+ private val keyword_open = Keyword.theory_goal ++ Keyword.proof_open
+ private val keyword_close = Keyword.proof_close
+
+ def apply(buffer: JEditBuffer, current_line: Int, prev_line: Int, prev_prev_line: Int,
+ actions: java.util.List[IndentAction])
+ {
+ Isabelle.buffer_syntax(buffer) match {
+ case Some(syntax) if buffer.isInstanceOf[Buffer] =>
+ val keywords = syntax.keywords
+ val nav = new Navigator(syntax, buffer.asInstanceOf[Buffer])
+
+ def head_token(line: Int): Option[Token] =
+ nav.iterator(line, 1).toStream.headOption.map(_.info)
+
+ def head_is_quasi_command(line: Int): Boolean =
+ head_token(line) match {
+ case None => false
+ case Some(tok) => keywords.is_quasi_command(tok)
+ }
+
+ def prev_command: Option[Token] =
+ nav.reverse_iterator(prev_line, 1).
+ collectFirst({ case Text.Info(_, tok) if tok.is_command => tok })
+
+ def prev_span: Iterator[Token] =
+ nav.reverse_iterator(prev_line).map(_.info).takeWhile(tok => !tok.is_command)
+
+
+ def line_indent(line: Int): Int =
+ if (line < 0 || line >= buffer.getLineCount) 0
+ else buffer.getCurrentIndentForLine(line, null)
+
+ val indent_size = buffer.getIndentSize
+
+ def indent_indent(tok: Token): Int =
+ if (keywords.is_command(tok, keyword_open)) indent_size
+ else if (keywords.is_command(tok, keyword_close)) - indent_size
+ else 0
+
+ def indent_offset(tok: Token): Int =
+ if (keywords.is_command(tok, Keyword.proof_enclose)) indent_size
+ else 0
+
+ def indent_extra: Int =
+ if (prev_span.exists(keywords.is_quasi_command(_))) indent_size
+ else 0
+
+ def indent_structure: Int =
+ nav.reverse_iterator(current_line - 1).scanLeft((0, false))(
+ { case ((ind, _), Text.Info(range, tok)) =>
+ val ind1 = ind + indent_indent(tok)
+ if (tok.is_command) {
+ val line = buffer.getLineOfOffset(range.start)
+ if (head_token(line) == Some(tok))
+ (ind1 + indent_offset(tok) + line_indent(line), true)
+ else (ind1, false)
+ }
+ else (ind1, false)
+ }).collectFirst({ case (i, true) => i }).getOrElse(0)
+
+ val indent =
+ head_token(current_line) match {
+ case None => indent_structure + indent_extra
+ case Some(tok) =>
+ if (keywords.is_command(tok, Keyword.theory)) 0
+ else if (tok.is_command) indent_structure - indent_offset(tok)
+ else {
+ prev_command match {
+ case None =>
+ val extra =
+ (keywords.is_quasi_command(tok), head_is_quasi_command(prev_line)) match {
+ case (true, true) | (false, false) => 0
+ case (true, false) => - indent_extra
+ case (false, true) => indent_extra
+ }
+ line_indent(prev_line) + extra
+ case Some(prev_tok) =>
+ indent_structure - indent_offset(prev_tok) -
+ indent_indent(prev_tok) + indent_size
+ }
+ }
+ }
+
+ actions.clear()
+ actions.add(new IndentAction.AlignOffset(indent))
+ case _ =>
+ }
+ }
+ }
+
+
+ /* structure matching */
+
+ object Matcher extends StructureMatcher
+ {
+ private def find_block(
+ open: Token => Boolean,
+ close: Token => Boolean,
+ reset: Token => Boolean,
+ restrict: Token => Boolean,
+ it: Iterator[Text.Info[Token]]): Option[(Text.Range, Text.Range)] =
+ {
+ val range1 = it.next.range
+ it.takeWhile(info => !info.info.is_command || restrict(info.info)).
+ scanLeft((range1, 1))(
+ { case ((r, d), Text.Info(range, tok)) =>
+ if (open(tok)) (range, d + 1)
+ else if (close(tok)) (range, d - 1)
+ else if (reset(tok)) (range, 0)
+ else (r, d) }
+ ).collectFirst({ case (range2, 0) => (range1, range2) })
+ }
+
+ private def find_pair(text_area: TextArea): Option[(Text.Range, Text.Range)] =
+ {
+ val buffer = text_area.getBuffer
+ val caret_line = text_area.getCaretLine
+ val caret = text_area.getCaretPosition
+
+ Isabelle.buffer_syntax(text_area.getBuffer) match {
+ case Some(syntax) if buffer.isInstanceOf[Buffer] =>
+ val keywords = syntax.keywords
+
+ val nav = new Navigator(syntax, buffer.asInstanceOf[Buffer])
+
+ def caret_iterator(): Iterator[Text.Info[Token]] =
+ nav.iterator(caret_line).dropWhile(info => !info.range.touches(caret))
+
+ def reverse_caret_iterator(): Iterator[Text.Info[Token]] =
+ nav.reverse_iterator(caret_line).dropWhile(info => !info.range.touches(caret))
+
+ nav.iterator(caret_line, 1).find(info => info.range.touches(caret))
+ match {
+ case Some(Text.Info(range1, tok)) if keywords.is_command(tok, Keyword.theory_goal) =>
+ find_block(
+ keywords.is_command(_, Keyword.proof_goal),
+ keywords.is_command(_, Keyword.qed),
+ keywords.is_command(_, Keyword.qed_global),
+ t =>
+ keywords.is_command(t, Keyword.diag) ||
+ keywords.is_command(t, Keyword.proof),
+ caret_iterator())
+
+ case Some(Text.Info(range1, tok)) if keywords.is_command(tok, Keyword.proof_goal) =>
+ find_block(
+ keywords.is_command(_, Keyword.proof_goal),
+ keywords.is_command(_, Keyword.qed),
+ _ => false,
+ t =>
+ keywords.is_command(t, Keyword.diag) ||
+ keywords.is_command(t, Keyword.proof),
+ caret_iterator())
+
+ case Some(Text.Info(range1, tok)) if keywords.is_command(tok, Keyword.qed_global) =>
+ reverse_caret_iterator().find(info => keywords.is_command(info.info, Keyword.theory))
+ match {
+ case Some(Text.Info(range2, tok))
+ if keywords.is_command(tok, Keyword.theory_goal) => Some((range1, range2))
+ case _ => None
+ }
+
+ case Some(Text.Info(range1, tok)) if keywords.is_command(tok, Keyword.qed) =>
+ find_block(
+ keywords.is_command(_, Keyword.qed),
+ t =>
+ keywords.is_command(t, Keyword.proof_goal) ||
+ keywords.is_command(t, Keyword.theory_goal),
+ _ => false,
+ t =>
+ keywords.is_command(t, Keyword.diag) ||
+ keywords.is_command(t, Keyword.proof) ||
+ keywords.is_command(t, Keyword.theory_goal),
+ reverse_caret_iterator())
+
+ case Some(Text.Info(range1, tok)) if tok.is_begin =>
+ find_block(_.is_begin, _.is_end, _ => false, _ => true, caret_iterator())
+
+ case Some(Text.Info(range1, tok)) if tok.is_end =>
+ find_block(_.is_end, _.is_begin, _ => false, _ => true, reverse_caret_iterator())
+ match {
+ case Some((_, range2)) =>
+ reverse_caret_iterator().
+ dropWhile(info => info.range != range2).
+ dropWhile(info => info.range == range2).
+ find(info => info.info.is_command || info.info.is_begin)
+ match {
+ case Some(Text.Info(range3, tok)) =>
+ if (keywords.is_command(tok, Keyword.theory_block)) Some((range1, range3))
+ else Some((range1, range2))
+ case None => None
+ }
+ case None => None
+ }
+
+ case _ => None
+ }
+ case _ => None
+ }
+ }
+
+ def getMatch(text_area: TextArea): StructureMatcher.Match =
+ find_pair(text_area) match {
+ case Some((_, range)) =>
+ val line = text_area.getBuffer.getLineOfOffset(range.start)
+ new StructureMatcher.Match(Matcher, line, range.start, line, range.stop)
+ case None => null
+ }
+
+ def selectMatch(text_area: TextArea)
+ {
+ def get_span(offset: Text.Offset): Option[Text.Range] =
+ for {
+ syntax <- Isabelle.buffer_syntax(text_area.getBuffer)
+ span <- Token_Markup.command_span(syntax, text_area.getBuffer, offset)
+ } yield span.range
+
+ find_pair(text_area) match {
+ case Some((r1, r2)) =>
+ (get_span(r1.start), get_span(r2.start)) match {
+ case (Some(range1), Some(range2)) =>
+ val start = range1.start min range2.start
+ val stop = range1.stop max range2.stop
+
+ text_area.moveCaretPosition(stop, false)
+ if (!text_area.isMultipleSelectionEnabled) text_area.selectNone
+ text_area.addToSelection(new Selection.Range(start, stop))
+ case _ =>
+ }
+ case None =>
+ }
+ }
+ }
+}
--- a/src/Tools/jEdit/src/token_markup.scala Sun Jul 10 22:05:18 2016 +0200
+++ b/src/Tools/jEdit/src/token_markup.scala Mon Jul 11 11:16:10 2016 +0200
@@ -12,16 +12,18 @@
import java.awt.{Font, Color}
import java.awt.font.TextAttribute
import java.awt.geom.AffineTransform
+import javax.swing.text.Segment
+
+import scala.collection.convert.WrapAsJava
import org.gjt.sp.util.SyntaxUtilities
import org.gjt.sp.jedit.{jEdit, Mode, Buffer}
import org.gjt.sp.jedit.syntax.{Token => JEditToken, TokenMarker, TokenHandler, DummyTokenHandler,
ParserRuleSet, ModeProvider, XModeHandler, SyntaxStyle}
+import org.gjt.sp.jedit.indent.IndentRule
import org.gjt.sp.jedit.textarea.{TextArea, Selection}
import org.gjt.sp.jedit.buffer.JEditBuffer
-import javax.swing.text.Segment
-
object Token_Markup
{
@@ -446,6 +448,9 @@
{
super.loadMode(mode, xmh)
Isabelle.mode_token_marker(mode.getName).foreach(mode.setTokenMarker _)
+ Isabelle.indent_rule(mode.getName).foreach(indent_rule =>
+ Untyped.set[java.util.List[IndentRule]](
+ mode, "indentRules", WrapAsJava.seqAsJavaList(List(indent_rule))))
}
}
}
--- a/src/ZF/Inductive_ZF.thy Sun Jul 10 22:05:18 2016 +0200
+++ b/src/ZF/Inductive_ZF.thy Mon Jul 11 11:16:10 2016 +0200
@@ -16,7 +16,7 @@
keywords
"inductive" "coinductive" "inductive_cases" "rep_datatype" "primrec" :: thy_decl and
"domains" "intros" "monos" "con_defs" "type_intros" "type_elims"
- "elimination" "induction" "case_eqns" "recursor_eqns"
+ "elimination" "induction" "case_eqns" "recursor_eqns" :: quasi_command
begin
lemma def_swap_iff: "a == b ==> a = c \<longleftrightarrow> c = b"