--- a/Admin/components/bundled Sat Feb 15 00:11:17 2014 +0100
+++ b/Admin/components/bundled Sat Feb 15 17:10:57 2014 +0100
@@ -1,3 +1,2 @@
#additional components to be bundled for release
-ProofGeneral-4.2-1
--- a/Admin/components/main Sat Feb 15 00:11:17 2014 +0100
+++ b/Admin/components/main Sat Feb 15 17:10:57 2014 +0100
@@ -12,3 +12,4 @@
spass-3.8ds
z3-3.2-1
xz-java-1.2-1
+ProofGeneral-4.2-1
\ No newline at end of file
--- a/src/HOL/Decision_Procs/cooper_tac.ML Sat Feb 15 00:11:17 2014 +0100
+++ b/src/HOL/Decision_Procs/cooper_tac.ML Sat Feb 15 17:10:57 2014 +0100
@@ -4,16 +4,12 @@
signature COOPER_TAC =
sig
- val trace: bool Unsynchronized.ref
val linz_tac: Proof.context -> bool -> int -> tactic
end
structure Cooper_Tac: COOPER_TAC =
struct
-val trace = Unsynchronized.ref false;
-fun trace_msg s = if !trace then tracing s else ();
-
val cooper_ss = simpset_of @{context};
val nT = HOLogic.natT;
@@ -21,15 +17,11 @@
val zdvd_int = @{thm zdvd_int};
val zdiff_int_split = @{thm zdiff_int_split};
-val all_nat = @{thm all_nat};
-val ex_nat = @{thm ex_nat};
val split_zdiv = @{thm split_zdiv};
val split_zmod = @{thm split_zmod};
val mod_div_equality' = @{thm mod_div_equality'};
val split_div' = @{thm split_div'};
val Suc_eq_plus1 = @{thm Suc_eq_plus1};
-val imp_le_cong = @{thm imp_le_cong};
-val conj_le_cong = @{thm conj_le_cong};
val mod_add_left_eq = @{thm mod_add_left_eq} RS sym;
val mod_add_right_eq = @{thm mod_add_right_eq} RS sym;
val mod_add_eq = @{thm mod_add_eq} RS sym;
--- a/src/HOL/Decision_Procs/ferrack_tac.ML Sat Feb 15 00:11:17 2014 +0100
+++ b/src/HOL/Decision_Procs/ferrack_tac.ML Sat Feb 15 17:10:57 2014 +0100
@@ -4,16 +4,12 @@
signature FERRACK_TAC =
sig
- val trace: bool Unsynchronized.ref
val linr_tac: Proof.context -> bool -> int -> tactic
end
-structure Ferrack_Tac =
+structure Ferrack_Tac: FERRACK_TAC =
struct
-val trace = Unsynchronized.ref false;
-fun trace_msg s = if !trace then tracing s else ();
-
val ferrack_ss = let val ths = [@{thm real_of_int_inject}, @{thm real_of_int_less_iff},
@{thm real_of_int_le_iff}]
in @{context} delsimps ths addsimps (map (fn th => th RS sym) ths)
@@ -22,23 +18,7 @@
val binarith = @{thms arith_simps}
val comp_arith = binarith @ @{thms simp_thms}
-val zdvd_int = @{thm zdvd_int};
-val zdiff_int_split = @{thm zdiff_int_split};
-val all_nat = @{thm all_nat};
-val ex_nat = @{thm ex_nat};
-val split_zdiv = @{thm split_zdiv};
-val split_zmod = @{thm split_zmod};
-val mod_div_equality' = @{thm mod_div_equality'};
-val split_div' = @{thm split_div'};
-val Suc_eq_plus1 = @{thm Suc_eq_plus1};
-val imp_le_cong = @{thm imp_le_cong};
-val conj_le_cong = @{thm conj_le_cong};
-val mod_add_left_eq = @{thm mod_add_left_eq} RS sym;
-val mod_add_right_eq = @{thm mod_add_right_eq} RS sym;
-val nat_div_add_eq = @{thm div_add1_eq} RS sym;
-val int_div_add_eq = @{thm zdiv_zadd1_eq} RS sym;
-
-fun prepare_for_linr sg q fm =
+fun prepare_for_linr q fm =
let
val ps = Logic.strip_params fm
val hs = map HOLogic.dest_Trueprop (Logic.strip_assums_hyp fm)
@@ -72,7 +52,7 @@
let
val thy = Proof_Context.theory_of ctxt
(* Transform the term*)
- val (t,np,nh) = prepare_for_linr thy q g
+ val (t,np,nh) = prepare_for_linr q g
(* Some simpsets for dealing with mod div abs and nat*)
val simpset0 = put_simpset HOL_basic_ss ctxt addsimps comp_arith
val ct = cterm_of thy (HOLogic.mk_Trueprop t)
@@ -87,10 +67,8 @@
Const ("==>", _) $ (Const (@{const_name Trueprop}, _) $ t1) $ _ =>
let val pth = linr_oracle (ctxt, Envir.eta_long [] t1)
in
- (trace_msg ("calling procedure with term:\n" ^
- Syntax.string_of_term ctxt t1);
- ((pth RS iffD2) RS pre_thm,
- assm_tac (i + 1) THEN (if q then I else TRY) (rtac TrueI i)))
+ ((pth RS iffD2) RS pre_thm,
+ assm_tac (i + 1) THEN (if q then I else TRY) (rtac TrueI i))
end
| _ => (pre_thm, assm_tac i)
in rtac ((mp_step nh o spec_step np) th) i THEN tac end);
--- a/src/HOL/Multivariate_Analysis/Brouwer_Fixpoint.thy Sat Feb 15 00:11:17 2014 +0100
+++ b/src/HOL/Multivariate_Analysis/Brouwer_Fixpoint.thy Sat Feb 15 17:10:57 2014 +0100
@@ -541,30 +541,31 @@
and "\<exists>k. 1 \<le> k \<and> k \<le> n \<and> x k < y k"
apply (rule kle_imp_pointwise[OF assms(1)])
proof -
- guess k using assms(1)[unfolded kle_def] .. note k = this
+ obtain k where k: "k \<subseteq> {1..n} \<and> (\<forall>j. y j = x j + (if j \<in> k then 1 else 0))"
+ using assms(1)[unfolded kle_def] ..
show "\<exists>k. 1 \<le> k \<and> k \<le> n \<and> x k < y k"
-proof (cases "k = {}")
- case True
- then have "x = y"
- apply -
- apply (rule ext)
- using k
- apply auto
- done
- then show ?thesis
- using assms(2) by auto
-next
- case False
- then have "(SOME k'. k' \<in> k) \<in> k"
- apply -
- apply (rule someI_ex)
- apply auto
- done
- then show ?thesis
- apply (rule_tac x = "SOME k'. k' \<in> k" in exI)
- using k
- apply auto
- done
+ proof (cases "k = {}")
+ case True
+ then have "x = y"
+ apply -
+ apply (rule ext)
+ using k
+ apply auto
+ done
+ then show ?thesis
+ using assms(2) by auto
+ next
+ case False
+ then have "(SOME k'. k' \<in> k) \<in> k"
+ apply -
+ apply (rule someI_ex)
+ apply auto
+ done
+ then show ?thesis
+ apply (rule_tac x = "SOME k'. k' \<in> k" in exI)
+ using k
+ apply auto
+ done
qed
qed
@@ -583,7 +584,7 @@
using kle_imp_pointwise
apply auto
done
- then guess a .. note a = this
+ then obtain a where a: "a \<in> s" "\<forall>x\<in>s. \<forall>j. a j \<le> x j" ..
show ?thesis
apply (rule_tac x = a in bexI)
proof
@@ -620,7 +621,7 @@
using kle_imp_pointwise
apply auto
done
- then guess a .. note a = this
+ then obtain a where a: "a \<in> s" "\<forall>x\<in>s. \<forall>j. x j \<le> a j" ..
show ?thesis
apply (rule_tac x = a in bexI)
proof
@@ -649,7 +650,8 @@
and "x \<noteq> y"
shows "1 \<le> card {k\<in>{1..n}. x k < y k}"
proof -
- guess i using kle_strict(2)[OF assms] ..
+ obtain i where "1 \<le> i" "i \<le> n" "x i < y i"
+ using kle_strict(2)[OF assms] by blast
then have "card {i} \<le> card {k\<in>{1..n}. x k < y k}"
apply -
apply (rule card_mono)
@@ -693,7 +695,8 @@
proof -
fix i j
assume as: "i \<in> {1..n}" "x i < y i" "j \<in> {1..n}" "y j < z j" "i = j"
- guess kx using assms(1)[unfolded kle_def] .. note kx = this
+ obtain kx where kx: "kx \<subseteq> {1..n} \<and> (\<forall>j. y j = x j + (if j \<in> kx then 1 else 0))"
+ using assms(1)[unfolded kle_def] ..
have "x i < y i"
using as by auto
then have "i \<in> kx"
@@ -705,7 +708,8 @@
then have "x i + 1 = y i"
using kx by auto
moreover
- guess ky using assms(2)[unfolded kle_def] .. note ky = this
+ obtain ky where ky: "ky \<subseteq> {1..n} \<and> (\<forall>j. z j = y j + (if j \<in> ky then 1 else 0))"
+ using assms(2)[unfolded kle_def] ..
have "y i < z i"
using as by auto
then have "i \<in> ky"
@@ -829,8 +833,10 @@
and "\<forall>j. c j \<le> a j + 1"
shows "kle n a c"
proof -
- guess kk1 using assms(1)[unfolded kle_def] .. note kk1 = this
- guess kk2 using assms(2)[unfolded kle_def] .. note kk2 = this
+ obtain kk1 where kk1: "kk1 \<subseteq> {1..n} \<and> (\<forall>j. b j = a j + (if j \<in> kk1 then 1 else 0))"
+ using assms(1)[unfolded kle_def] ..
+ obtain kk2 where kk2: "kk2 \<subseteq> {1..n} \<and> (\<forall>j. c j = b j + (if j \<in> kk2 then 1 else 0))"
+ using assms(2)[unfolded kle_def] ..
show ?thesis
unfolding kle_def
apply (rule_tac x="kk1 \<union> kk2" in exI)
@@ -1023,7 +1029,8 @@
apply (subst *[symmetric])
unfolding mem_Collect_eq
proof
- guess k using a(2)[rule_format,OF b(1),unfolded kle_def] .. note k = this
+ obtain k where k: "k \<subseteq> {1..n} \<and> (\<forall>j. b j = a j + (if j \<in> k then 1 else 0))"
+ using a(2)[rule_format, OF b(1), unfolded kle_def] ..
fix i
show "b i = (if i \<in> {1..n} \<and> a i < b i then a i + 1 else a i)"
proof (cases "i \<in> {1..n}")
@@ -1166,7 +1173,10 @@
ultimately show False
unfolding n by auto
qed
- then guess k unfolding card_1_exists .. note k = this[unfolded mem_Collect_eq]
+ then obtain k where k:
+ "k \<in> {1..n} \<and> a k < b k"
+ "\<And>y y'. (y \<in> {1..n} \<and> a y < b y) \<and> y' \<in> {1..n} \<and> a y' < b y' \<Longrightarrow> y = y'"
+ unfolding card_1_exists by blast
show ?thesis
apply (rule disjI2)
@@ -1177,12 +1187,12 @@
have "kle n a b"
using b and assm(6)[rule_format, OF `a\<in>s` `b\<in>s`]
by auto
- then guess kk unfolding kle_def .. note kk_raw = this
- note kk = this[THEN conjunct2, rule_format]
+ then obtain kk where kk: "kk \<subseteq> {1..n}" "\<And>j. b j = a j + (if j \<in> kk then 1 else 0)"
+ unfolding kle_def by blast
have kkk: "k \<in> kk"
apply (rule ccontr)
using k(1)
- unfolding kk
+ unfolding kk(2)
apply auto
done
show "b j = (if j = k then a j + 1 else a j)"
@@ -1190,19 +1200,19 @@
case True
then have "j = k"
apply -
- apply (rule k(2)[rule_format])
- using kk_raw kkk
+ apply (rule k(2))
+ using kk kkk
apply auto
done
then show ?thesis
- unfolding kk using kkk by auto
+ unfolding kk(2) using kkk by auto
next
case False
then have "j \<noteq> k"
- using k(2)[rule_format, of j k] and kk_raw kkk
+ using k(2)[of j k] and kkk
by auto
then show ?thesis
- unfolding kk
+ unfolding kk(2)
using kkk and False
by auto
qed
@@ -1298,7 +1308,10 @@
ultimately show False
unfolding n by auto
qed
- then guess k unfolding card_1_exists .. note k = this[unfolded mem_Collect_eq]
+ then obtain k where k:
+ "k \<in> {1..n} \<and> b k < a k"
+ "\<And>y y'. (y \<in> {1..n} \<and> b y < a y) \<and> y' \<in> {1..n} \<and> b y' < a y' \<Longrightarrow> y = y'"
+ unfolding card_1_exists by blast
show ?thesis
apply (rule disjI2)
@@ -1308,12 +1321,12 @@
fix j :: nat
have "kle n b a"
using b and assm(6)[rule_format, OF `a\<in>s` `b\<in>s`] by auto
- then guess kk unfolding kle_def .. note kk_raw = this
- note kk = this[THEN conjunct2,rule_format]
+ then obtain kk where kk: "kk \<subseteq> {1..n}" "\<And>j. a j = b j + (if j \<in> kk then 1 else 0)"
+ unfolding kle_def by blast
have kkk: "k \<in> kk"
apply (rule ccontr)
using k(1)
- unfolding kk
+ unfolding kk(2)
apply auto
done
show "a j = (if j = k then b j + 1 else b j)"
@@ -1321,20 +1334,20 @@
case True
then have "j = k"
apply -
- apply (rule k(2)[rule_format])
- using kk_raw kkk
+ apply (rule k(2))
+ using kk kkk
apply auto
done
then show ?thesis
- unfolding kk using kkk by auto
+ unfolding kk(2) using kkk by auto
next
case False
then have "j \<noteq> k"
- using k(2)[rule_format, of j k]
- using kk_raw kkk
+ using k(2)[of j k]
+ using kkk
by auto
then show ?thesis
- unfolding kk
+ unfolding kk(2)
using kkk and False
by auto
qed
@@ -1367,8 +1380,12 @@
by auto
next
case (Suc m)
- guess a using card_eq_SucD[OF Suc(4)] ..
- then guess s0 by (elim exE conjE) note as0 = this
+ obtain a s0 where as0:
+ "s = insert a s0"
+ "a \<notin> s0"
+ "card s0 = m"
+ "m = 0 \<longrightarrow> s0 = {}"
+ using card_eq_SucD[OF Suc(4)] by auto
have **: "card s0 = m"
using as0 using Suc(2) Suc(4)
by auto
@@ -1489,8 +1506,11 @@
(is "?ls = ?rs")
proof
assume ?ls
- then guess s ..
- then guess a by (elim exE conjE) note sa = this
+ then obtain s a where sa:
+ "ksimplex p (n + 1) s"
+ "a \<in> s"
+ "f = s - {a}"
+ by auto
show ?rs
unfolding ksimplex_def sa(3)
apply rule
@@ -1510,7 +1530,8 @@
show "kle n x y \<or> kle n y x"
proof (cases "kle (n + 1) x y")
case True
- then guess k unfolding kle_def .. note k = this
+ then obtain k where k: "k \<subseteq> {1..n + 1}" "\<And>j. y j = x j + (if j \<in> k then 1 else 0)"
+ unfolding kle_def by blast
then have *: "n + 1 \<notin> k" using xyp by auto
have "\<not> (\<exists>x\<in>k. x \<notin> {1..n})"
apply (rule notI)
@@ -1521,13 +1542,13 @@
have "x \<noteq> n + 1"
using as and * by auto
then show False
- using as and k[THEN conjunct1,unfolded subset_eq] by auto
+ using as and k(1) by auto
qed
then show ?thesis
apply -
apply (rule disjI1)
unfolding kle_def
- using k
+ using k(2)
apply (rule_tac x=k in exI)
apply auto
done
@@ -1536,7 +1557,8 @@
then have "kle (n + 1) y x"
using ksimplexD(6)[OF sa(1),rule_format, of x y] and as
by auto
- then guess k unfolding kle_def .. note k = this
+ then obtain k where k: "k \<subseteq> {1..n + 1}" "\<And>j. x j = y j + (if j \<in> k then 1 else 0)"
+ unfolding kle_def by auto
then have *: "n + 1 \<notin> k"
using xyp by auto
then have "\<not> (\<exists>x\<in>k. x \<notin> {1..n})"
@@ -1549,14 +1571,14 @@
have "x \<noteq> n + 1"
using as and * by auto
then show False
- using as and k[THEN conjunct1,unfolded subset_eq]
+ using as and k(1)
by auto
qed
then show ?thesis
apply -
apply (rule disjI2)
unfolding kle_def
- using k
+ using k(2)
apply (rule_tac x = k in exI)
apply auto
done
@@ -1573,7 +1595,12 @@
qed (insert sa ksimplexD[OF sa(1)], auto)
next
assume ?rs note rs=ksimplexD[OF this]
- guess a b by (rule ksimplex_extrema[OF `?rs`]) note ab = this
+ obtain a b where ab:
+ "a \<in> f"
+ "b \<in> f"
+ "\<forall>x\<in>f. kle n a x \<and> kle n x b"
+ "\<forall>i. b i = (if i \<in> {1..n} then a i + 1 else a i)"
+ by (rule ksimplex_extrema[OF `?rs`])
def c \<equiv> "\<lambda>i. if i = (n + 1) then p - 1 else a i"
have "c \<notin> f"
apply (rule notI)
@@ -1625,11 +1652,9 @@
case False
then have "z \<in> f"
using z by auto
- then guess k
- apply (drule_tac ab(3)[THEN bspec[where x=z], THEN conjunct1])
- unfolding kle_def
- apply (erule exE)
- done
+ with ab(3) have "kle n a z" by blast
+ then obtain k where "k \<subseteq> {1..n}" "\<And>j. z j = a j + (if j \<in> k then 1 else 0)"
+ unfolding kle_def by blast
then show "kle (n + 1) c z"
unfolding kle_def
apply (rule_tac x="insert (n + 1) k" in exI)
--- a/src/Pure/General/multi_map.scala Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Pure/General/multi_map.scala Sat Feb 15 17:10:57 2014 +0100
@@ -25,6 +25,8 @@
{
/* Multi_Map operations */
+ def iterator_list: Iterator[(A, List[B])] = rep.iterator
+
def get_list(a: A): List[B] = rep.getOrElse(a, Nil)
def insert[B1 >: B](a: A, b: B1): Multi_Map[A, B1] =
--- a/src/Pure/General/position.scala Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Pure/General/position.scala Sat Feb 15 17:10:57 2014 +0100
@@ -14,6 +14,8 @@
{
type T = Properties.T
+ val none: T = Nil
+
val Line = new Properties.Int(Markup.LINE)
val Offset = new Properties.Int(Markup.OFFSET)
val End_Offset = new Properties.Int(Markup.END_OFFSET)
--- a/src/Pure/General/scan.scala Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Pure/General/scan.scala Sat Feb 15 17:10:57 2014 +0100
@@ -17,9 +17,9 @@
object Scan
{
- /** context of partial scans **/
+ /** context of partial scans (line boundary) **/
- sealed abstract class Context
+ abstract class Context
case object Finished extends Context
case class Quoted(quote: String) extends Context
case object Verbatim extends Context
@@ -28,99 +28,12 @@
- /** Lexicon -- position tree **/
-
- object Lexicon
- {
- /* representation */
-
- sealed case class Tree(val branches: Map[Char, (String, Tree)])
- private val empty_tree = Tree(Map())
-
- val empty: Lexicon = new Lexicon(empty_tree)
- def apply(elems: String*): Lexicon = empty ++ elems
- }
-
- final class Lexicon private(main_tree: Lexicon.Tree) extends RegexParsers
- {
- import Lexicon.Tree
-
-
- /* auxiliary operations */
-
- private def content(tree: Tree, result: List[String]): List[String] =
- (result /: tree.branches.toList) ((res, entry) =>
- entry match { case (_, (s, tr)) =>
- if (s.isEmpty) content(tr, res) else content(tr, s :: res) })
-
- private def lookup(str: CharSequence): Option[(Boolean, Tree)] =
- {
- val len = str.length
- def look(tree: Tree, tip: Boolean, i: Int): Option[(Boolean, Tree)] =
- {
- if (i < len) {
- tree.branches.get(str.charAt(i)) match {
- case Some((s, tr)) => look(tr, !s.isEmpty, i + 1)
- case None => None
- }
- } else Some(tip, tree)
- }
- look(main_tree, false, 0)
- }
-
- def completions(str: CharSequence): List[String] =
- lookup(str) match {
- case Some((true, tree)) => content(tree, List(str.toString))
- case Some((false, tree)) => content(tree, Nil)
- case None => Nil
- }
+ /** parser combinators **/
-
- /* pseudo Set methods */
-
- def iterator: Iterator[String] = content(main_tree, Nil).sorted.iterator
-
- override def toString: String = iterator.mkString("Lexicon(", ", ", ")")
-
- def empty: Lexicon = Lexicon.empty
- def isEmpty: Boolean = main_tree.branches.isEmpty
-
- def contains(elem: String): Boolean =
- lookup(elem) match {
- case Some((tip, _)) => tip
- case _ => false
- }
-
-
- /* add elements */
+ object Parsers extends Parsers
- def + (elem: String): Lexicon =
- if (contains(elem)) this
- else {
- val len = elem.length
- def extend(tree: Tree, i: Int): Tree =
- if (i < len) {
- val c = elem.charAt(i)
- val end = (i + 1 == len)
- tree.branches.get(c) match {
- case Some((s, tr)) =>
- Tree(tree.branches +
- (c -> (if (end) elem else s, extend(tr, i + 1))))
- case None =>
- Tree(tree.branches +
- (c -> (if (end) elem else "", extend(Lexicon.empty_tree, i + 1))))
- }
- }
- else tree
- new Lexicon(extend(main_tree, 0))
- }
-
- def ++ (elems: TraversableOnce[String]): Lexicon = (this /: elems)(_ + _)
-
-
-
- /** RegexParsers methods **/
-
+ trait Parsers extends RegexParsers
+ {
override val whiteSpace = "".r
@@ -130,36 +43,9 @@
p ^^ (x => Some(x)) | """\z""".r ^^ (_ => None)
- /* keywords from lexicon */
-
- def keyword: Parser[String] = new Parser[String]
- {
- def apply(in: Input) =
- {
- val source = in.source
- val offset = in.offset
- val len = source.length - offset
+ /* repeated symbols */
- def scan(tree: Tree, result: String, i: Int): String =
- {
- if (i < len) {
- tree.branches.get(source.charAt(offset + i)) match {
- case Some((s, tr)) => scan(tr, if (s.isEmpty) result else s, i + 1)
- case None => result
- }
- }
- else result
- }
- val result = scan(main_tree, "", 0)
- if (result.isEmpty) Failure("keyword expected", in)
- else Success(result, in.drop(result.length))
- }
- }.named("keyword")
-
-
- /* symbol range */
-
- def symbol_range(pred: Symbol.Symbol => Boolean, min_count: Int, max_count: Int): Parser[String] =
+ def repeated(pred: Symbol.Symbol => Boolean, min_count: Int, max_count: Int): Parser[String] =
new Parser[String]
{
def apply(in: Input) =
@@ -180,16 +66,22 @@
if (count < min_count) Failure("bad input", in)
else Success(in.source.subSequence(start, i).toString, in.drop(i - start))
}
- }.named("symbol_range")
+ }.named("repeated")
def one(pred: Symbol.Symbol => Boolean): Parser[String] =
- symbol_range(pred, 1, 1)
+ repeated(pred, 1, 1)
def many(pred: Symbol.Symbol => Boolean): Parser[String] =
- symbol_range(pred, 0, Integer.MAX_VALUE)
+ repeated(pred, 0, Integer.MAX_VALUE)
def many1(pred: Symbol.Symbol => Boolean): Parser[String] =
- symbol_range(pred, 1, Integer.MAX_VALUE)
+ repeated(pred, 1, Integer.MAX_VALUE)
+
+
+ /* character */
+
+ def character(pred: Char => Boolean): Symbol.Symbol => Boolean =
+ (s: Symbol. Symbol) => s.length == 1 && pred(s.charAt(0))
/* quoted strings */
@@ -200,7 +92,7 @@
(("""\\\d\d\d""".r) ^? { case x if x.substring(1, 4).toInt <= 255 => x })) ^^ (_.mkString)
}
- private def quoted(quote: Symbol.Symbol): Parser[String] =
+ def quoted(quote: Symbol.Symbol): Parser[String] =
{
quote ~ quoted_body(quote) ~ quote ^^ { case x ~ y ~ z => x + y + z }
}.named("quoted")
@@ -242,7 +134,7 @@
private def verbatim_body: Parser[String] =
rep(many1(sym => sym != "*") | """\*(?!\})""".r) ^^ (_.mkString)
- private def verbatim: Parser[String] =
+ def verbatim: Parser[String] =
{
"{*" ~ verbatim_body ~ "*}" ^^ { case x ~ y ~ z => x + y + z }
}.named("verbatim")
@@ -391,81 +283,127 @@
}
- /* outer syntax tokens */
+ /* keyword */
- private def delimited_token: Parser[Token] =
+ def literal(lexicon: Lexicon): Parser[String] = new Parser[String]
{
- val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
- val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x))
- val verb = verbatim ^^ (x => Token(Token.Kind.VERBATIM, x))
- val cart = cartouche ^^ (x => Token(Token.Kind.CARTOUCHE, x))
- val cmt = comment ^^ (x => Token(Token.Kind.COMMENT, x))
+ def apply(in: Input) =
+ {
+ val result = lexicon.scan(in)
+ if (result.isEmpty) Failure("keyword expected", in)
+ else Success(result, in.drop(result.length))
+ }
+ }.named("keyword")
+ }
+
+
+
+ /** Lexicon -- position tree **/
+
+ object Lexicon
+ {
+ /* representation */
+
+ private sealed case class Tree(val branches: Map[Char, (String, Tree)])
+ private val empty_tree = Tree(Map())
- string | (alt_string | (verb | (cart | cmt)))
+ val empty: Lexicon = new Lexicon(empty_tree)
+ def apply(elems: String*): Lexicon = empty ++ elems
+ }
+
+ final class Lexicon private(rep: Lexicon.Tree)
+ {
+ /* auxiliary operations */
+
+ private def content(tree: Lexicon.Tree, result: List[String]): List[String] =
+ (result /: tree.branches.toList) ((res, entry) =>
+ entry match { case (_, (s, tr)) =>
+ if (s.isEmpty) content(tr, res) else content(tr, s :: res) })
+
+ private def lookup(str: CharSequence): Option[(Boolean, Lexicon.Tree)] =
+ {
+ val len = str.length
+ def look(tree: Lexicon.Tree, tip: Boolean, i: Int): Option[(Boolean, Lexicon.Tree)] =
+ {
+ if (i < len) {
+ tree.branches.get(str.charAt(i)) match {
+ case Some((s, tr)) => look(tr, !s.isEmpty, i + 1)
+ case None => None
+ }
+ } else Some(tip, tree)
+ }
+ look(rep, false, 0)
}
- private def other_token(is_command: String => Boolean)
- : Parser[Token] =
- {
- val letdigs1 = many1(Symbol.is_letdig)
- val sub = one(s => s == Symbol.sub_decoded || s == "\\<^sub>")
- val id =
- one(Symbol.is_letter) ~
- (rep(letdigs1 | (sub ~ letdigs1 ^^ { case x ~ y => x + y })) ^^ (_.mkString)) ^^
- { case x ~ y => x + y }
+ def completions(str: CharSequence): List[String] =
+ lookup(str) match {
+ case Some((true, tree)) => content(tree, List(str.toString))
+ case Some((false, tree)) => content(tree, Nil)
+ case None => Nil
+ }
- val nat = many1(Symbol.is_digit)
- val natdot = nat ~ "." ~ nat ^^ { case x ~ y ~ z => x + y + z }
- val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x }
+
+ /* pseudo Set methods */
+
+ def iterator: Iterator[String] = content(rep, Nil).sorted.iterator
- val ident = id ~ rep("." ~> id) ^^
- { case x ~ Nil => Token(Token.Kind.IDENT, x)
- case x ~ ys => Token(Token.Kind.LONG_IDENT, (x :: ys).mkString(".")) }
+ override def toString: String = iterator.mkString("Lexicon(", ", ", ")")
+
+ def empty: Lexicon = Lexicon.empty
+ def isEmpty: Boolean = rep.branches.isEmpty
- val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.VAR, x + y) }
- val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token.Kind.TYPE_IDENT, x + y) }
- val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.TYPE_VAR, x + y) }
- val nat_ = nat ^^ (x => Token(Token.Kind.NAT, x))
- val float =
- ("-" ~ natdot ^^ { case x ~ y => x + y } | natdot) ^^ (x => Token(Token.Kind.FLOAT, x))
+ def contains(elem: String): Boolean =
+ lookup(elem) match {
+ case Some((tip, _)) => tip
+ case _ => false
+ }
- val sym_ident =
- (many1(Symbol.is_symbolic_char) | one(sym => Symbol.is_symbolic(sym))) ^^
- (x => Token(Token.Kind.SYM_IDENT, x))
+
+ /* add elements */
- val command_keyword =
- keyword ^^ (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x))
-
- val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
-
- val recover_delimited =
- (recover_quoted("\"") |
- (recover_quoted("`") |
- (recover_verbatim |
- (recover_cartouche | recover_comment)))) ^^ (x => Token(Token.Kind.ERROR, x))
-
- val bad = one(_ => true) ^^ (x => Token(Token.Kind.ERROR, x))
+ def + (elem: String): Lexicon =
+ if (contains(elem)) this
+ else {
+ val len = elem.length
+ def extend(tree: Lexicon.Tree, i: Int): Lexicon.Tree =
+ if (i < len) {
+ val c = elem.charAt(i)
+ val end = (i + 1 == len)
+ tree.branches.get(c) match {
+ case Some((s, tr)) =>
+ Lexicon.Tree(tree.branches +
+ (c -> (if (end) elem else s, extend(tr, i + 1))))
+ case None =>
+ Lexicon.Tree(tree.branches +
+ (c -> (if (end) elem else "", extend(Lexicon.empty_tree, i + 1))))
+ }
+ }
+ else tree
+ new Lexicon(extend(rep, 0))
+ }
- space | (recover_delimited |
- (((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) |||
- command_keyword) | bad))
- }
+ def ++ (elems: TraversableOnce[String]): Lexicon = (this /: elems)(_ + _)
+
- def token(is_command: String => Boolean): Parser[Token] =
- delimited_token | other_token(is_command)
+ /* scan */
- def token_context(is_command: String => Boolean, ctxt: Context): Parser[(Token, Context)] =
+ def scan(in: Reader[Char]): String =
{
- val string =
- quoted_context("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) }
- val alt_string =
- quoted_context("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) }
- val verb = verbatim_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
- val cart = cartouche_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) }
- val cmt = comment_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) }
- val other = other_token(is_command) ^^ { case x => (x, Finished) }
+ val source = in.source
+ val offset = in.offset
+ val len = source.length - offset
- string | (alt_string | (verb | (cart | (cmt | other))))
+ def scan_tree(tree: Lexicon.Tree, result: String, i: Int): String =
+ {
+ if (i < len) {
+ tree.branches.get(source.charAt(offset + i)) match {
+ case Some((s, tr)) => scan_tree(tr, if (s.isEmpty) result else s, i + 1)
+ case None => result
+ }
+ }
+ else result
+ }
+ scan_tree(rep, "", 0)
}
}
}
--- a/src/Pure/General/symbol.scala Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Pure/General/symbol.scala Sat Feb 15 17:10:57 2014 +0100
@@ -19,9 +19,16 @@
/* ASCII characters */
def is_ascii_letter(c: Char): Boolean = 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z'
+
def is_ascii_digit(c: Char): Boolean = '0' <= c && c <= '9'
+
+ def is_ascii_hex(c: Char): Boolean =
+ '0' <= c && c <= '9' || 'A' <= c && c <= 'F' || 'a' <= c && c <= 'f'
+
def is_ascii_quasi(c: Char): Boolean = c == '_' || c == '\''
+ def is_ascii_blank(c: Char): Boolean = " \t\n\u000b\f\r".contains(c)
+
def is_ascii_letdig(c: Char): Boolean =
is_ascii_letter(c) || is_ascii_digit(c) || is_ascii_quasi(c)
--- a/src/Pure/Isar/completion.scala Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Pure/Isar/completion.scala Sat Feb 15 17:10:57 2014 +0100
@@ -185,8 +185,8 @@
line: CharSequence): Option[Completion.Result] =
{
val raw_result =
- abbrevs_lex.parse(abbrevs_lex.keyword, new Library.Reverse(line)) match {
- case abbrevs_lex.Success(reverse_a, _) =>
+ Scan.Parsers.parse(Scan.Parsers.literal(abbrevs_lex), new Library.Reverse(line)) match {
+ case Scan.Parsers.Success(reverse_a, _) =>
val abbrevs = abbrevs_map.get_list(reverse_a)
abbrevs match {
case Nil => None
--- a/src/Pure/Isar/outer_syntax.ML Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Pure/Isar/outer_syntax.ML Sat Feb 15 17:10:57 2014 +0100
@@ -167,7 +167,7 @@
val _ = warning (redefining ());
val _ =
if ! batch_mode then
- Output.physical_stderr ("Legacy feature! " ^ redefining () ^ "\n")
+ Output.physical_stderr ("### Legacy feature! " ^ redefining () ^ "\n")
else ();
in () end;
Symtab.update (name, cmd) commands)))
--- a/src/Pure/Isar/outer_syntax.scala Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Pure/Isar/outer_syntax.scala Sat Feb 15 17:10:57 2014 +0100
@@ -128,10 +128,8 @@
def scan(input: Reader[Char]): List[Token] =
{
- import lexicon._
-
- parseAll(rep(token(is_command)), input) match {
- case Success(tokens, _) => tokens
+ Token.Parsers.parseAll(Token.Parsers.rep(Token.Parsers.token(lexicon, is_command)), input) match {
+ case Token.Parsers.Success(tokens, _) => tokens
case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
}
}
@@ -141,15 +139,13 @@
def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
{
- import lexicon._
-
var in: Reader[Char] = new CharSequenceReader(input)
val toks = new mutable.ListBuffer[Token]
var ctxt = context
while (!in.atEnd) {
- parse(token_context(is_command, ctxt), in) match {
- case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
- case NoSuccess(_, rest) =>
+ Token.Parsers.parse(Token.Parsers.token_context(lexicon, is_command, ctxt), in) match {
+ case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
+ case Token.Parsers.NoSuccess(_, rest) =>
error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
}
}
--- a/src/Pure/Isar/token.scala Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Pure/Isar/token.scala Sat Feb 15 17:10:57 2014 +0100
@@ -34,6 +34,91 @@
}
+ /* parsers */
+
+ object Parsers extends Parsers
+
+ trait Parsers extends Scan.Parsers
+ {
+ private def delimited_token: Parser[Token] =
+ {
+ val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
+ val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x))
+ val verb = verbatim ^^ (x => Token(Token.Kind.VERBATIM, x))
+ val cart = cartouche ^^ (x => Token(Token.Kind.CARTOUCHE, x))
+ val cmt = comment ^^ (x => Token(Token.Kind.COMMENT, x))
+
+ string | (alt_string | (verb | (cart | cmt)))
+ }
+
+ private def other_token(lexicon: Scan.Lexicon, is_command: String => Boolean)
+ : Parser[Token] =
+ {
+ val letdigs1 = many1(Symbol.is_letdig)
+ val sub = one(s => s == Symbol.sub_decoded || s == "\\<^sub>")
+ val id =
+ one(Symbol.is_letter) ~
+ (rep(letdigs1 | (sub ~ letdigs1 ^^ { case x ~ y => x + y })) ^^ (_.mkString)) ^^
+ { case x ~ y => x + y }
+
+ val nat = many1(Symbol.is_digit)
+ val natdot = nat ~ "." ~ nat ^^ { case x ~ y ~ z => x + y + z }
+ val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x }
+
+ val ident = id ~ rep("." ~> id) ^^
+ { case x ~ Nil => Token(Token.Kind.IDENT, x)
+ case x ~ ys => Token(Token.Kind.LONG_IDENT, (x :: ys).mkString(".")) }
+
+ val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.VAR, x + y) }
+ val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token.Kind.TYPE_IDENT, x + y) }
+ val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.TYPE_VAR, x + y) }
+ val nat_ = nat ^^ (x => Token(Token.Kind.NAT, x))
+ val float =
+ ("-" ~ natdot ^^ { case x ~ y => x + y } | natdot) ^^ (x => Token(Token.Kind.FLOAT, x))
+
+ val sym_ident =
+ (many1(Symbol.is_symbolic_char) | one(sym => Symbol.is_symbolic(sym))) ^^
+ (x => Token(Token.Kind.SYM_IDENT, x))
+
+ val command_keyword =
+ literal(lexicon) ^^
+ (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x))
+
+ val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
+
+ val recover_delimited =
+ (recover_quoted("\"") |
+ (recover_quoted("`") |
+ (recover_verbatim |
+ (recover_cartouche | recover_comment)))) ^^ (x => Token(Token.Kind.ERROR, x))
+
+ val bad = one(_ => true) ^^ (x => Token(Token.Kind.ERROR, x))
+
+ space | (recover_delimited |
+ (((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) |||
+ command_keyword) | bad))
+ }
+
+ def token(lexicon: Scan.Lexicon, is_command: String => Boolean): Parser[Token] =
+ delimited_token | other_token(lexicon, is_command)
+
+ def token_context(lexicon: Scan.Lexicon, is_command: String => Boolean, ctxt: Scan.Context)
+ : Parser[(Token, Scan.Context)] =
+ {
+ val string =
+ quoted_context("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) }
+ val alt_string =
+ quoted_context("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) }
+ val verb = verbatim_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
+ val cart = cartouche_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.CARTOUCHE, x), c) }
+ val cmt = comment_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) }
+ val other = other_token(lexicon, is_command) ^^ { case x => (x, Scan.Finished) }
+
+ string | (alt_string | (verb | (cart | (cmt | other))))
+ }
+ }
+
+
/* token reader */
class Position(val line: Int, val file: String) extends scala.util.parsing.input.Position
@@ -100,11 +185,11 @@
def is_end: Boolean = is_keyword && source == "end"
def content: String =
- if (kind == Token.Kind.STRING) Scan.Lexicon.empty.quoted_content("\"", source)
- else if (kind == Token.Kind.ALT_STRING) Scan.Lexicon.empty.quoted_content("`", source)
- else if (kind == Token.Kind.VERBATIM) Scan.Lexicon.empty.verbatim_content(source)
- else if (kind == Token.Kind.CARTOUCHE) Scan.Lexicon.empty.cartouche_content(source)
- else if (kind == Token.Kind.COMMENT) Scan.Lexicon.empty.comment_content(source)
+ if (kind == Token.Kind.STRING) Scan.Parsers.quoted_content("\"", source)
+ else if (kind == Token.Kind.ALT_STRING) Scan.Parsers.quoted_content("`", source)
+ else if (kind == Token.Kind.VERBATIM) Scan.Parsers.verbatim_content(source)
+ else if (kind == Token.Kind.CARTOUCHE) Scan.Parsers.cartouche_content(source)
+ else if (kind == Token.Kind.COMMENT) Scan.Parsers.comment_content(source)
else source
def text: (String, String) =
--- a/src/Pure/ML/ml_lex.ML Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Pure/ML/ml_lex.ML Sat Feb 15 17:10:57 2014 +0100
@@ -142,12 +142,6 @@
fun !!! msg = Symbol_Pos.!!! (fn () => err_prefix ^ msg);
-(* blanks *)
-
-val scan_blank = Scan.one (Symbol.is_ascii_blank o Symbol_Pos.symbol);
-val scan_blanks1 = Scan.repeat1 scan_blank;
-
-
(* keywords *)
val keywords = ["#", "(", ")", ",", "->", "...", ":", ":>", ";", "=",
@@ -167,9 +161,7 @@
local
val scan_letdigs =
- Scan.many
- ((Symbol.is_ascii_letter orf Symbol.is_ascii_digit orf Symbol.is_ascii_quasi) o
- Symbol_Pos.symbol);
+ Scan.many (Symbol.is_ascii_letdig o Symbol_Pos.symbol);
val scan_alphanumeric =
Scan.one (Symbol.is_ascii_letter o Symbol_Pos.symbol) -- scan_letdigs >> op ::;
@@ -181,10 +173,10 @@
val scan_ident = scan_alphanumeric || scan_symbolic;
-val scan_longident =
+val scan_long_ident =
(Scan.repeat1 (scan_alphanumeric @@@ $$$ ".") >> flat) @@@ (scan_ident || $$$ "=");
-val scan_typevar = $$$ "'" @@@ scan_letdigs;
+val scan_type_var = $$$ "'" @@@ scan_letdigs;
end;
@@ -197,6 +189,7 @@
val scan_hex = Scan.many1 (Symbol.is_ascii_hex o Symbol_Pos.symbol);
val scan_sign = Scan.optional ($$$ "~") [];
val scan_decint = scan_sign @@@ scan_dec;
+val scan_exp = ($$$ "E" || $$$ "e") @@@ scan_decint;
in
@@ -206,8 +199,6 @@
val scan_int = scan_sign @@@ ($$$ "0" @@@ $$$ "x" @@@ scan_hex || scan_dec);
-val scan_exp = ($$$ "E" || $$$ "e") @@@ scan_decint;
-
val scan_real =
scan_decint @@@ $$$ "." @@@ scan_dec @@@ Scan.optional scan_exp [] ||
scan_decint @@@ scan_exp;
@@ -217,6 +208,8 @@
(* chars and strings *)
+val scan_blanks1 = Scan.many1 (Symbol.is_ascii_blank o Symbol_Pos.symbol);
+
local
val scan_escape =
@@ -269,9 +262,9 @@
(scan_word >> token Word ||
scan_real >> token Real ||
scan_int >> token Int ||
- scan_longident >> token LongIdent ||
+ scan_long_ident >> token LongIdent ||
scan_ident >> token Ident ||
- scan_typevar >> token TypeVar));
+ scan_type_var >> token TypeVar));
val scan_antiq = Antiquote.scan_antiq >> Antiquote.Antiq || scan_ml >> Antiquote.Text;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Pure/ML/ml_lex.scala Sat Feb 15 17:10:57 2014 +0100
@@ -0,0 +1,226 @@
+/* Title: Pure/ML/ml_lex.scala
+ Author: Makarius
+
+Lexical syntax for SML.
+*/
+
+package isabelle
+
+
+import scala.collection.mutable
+import scala.util.parsing.input.{Reader, CharSequenceReader}
+
+
+object ML_Lex
+{
+ /** tokens **/
+
+ object Kind extends Enumeration
+ {
+ val KEYWORD = Value("keyword")
+ val IDENT = Value("identifier")
+ val LONG_IDENT = Value("long identifier")
+ val TYPE_VAR = Value("type variable")
+ val WORD = Value("word")
+ val INT = Value("integer")
+ val REAL = Value("real")
+ val CHAR = Value("character")
+ val STRING = Value("quoted string")
+ val SPACE = Value("white space")
+ val COMMENT = Value("comment text")
+ val ERROR = Value("bad input")
+ }
+
+ sealed case class Token(val kind: Kind.Value, val source: String)
+ {
+ def is_keyword: Boolean = kind == Kind.KEYWORD
+ def is_operator: Boolean = is_keyword && !Symbol.is_ascii_identifier(source)
+ }
+
+
+
+ /** parsers **/
+
+ case object ML_String extends Scan.Context
+
+ private val lexicon =
+ Scan.Lexicon("#", "(", ")", ",", "->", "...", ":", ":>", ";", "=",
+ "=>", "[", "]", "_", "{", "|", "}", "abstype", "and", "andalso", "as",
+ "case", "datatype", "do", "else", "end", "eqtype", "exception", "fn",
+ "fun", "functor", "handle", "if", "in", "include", "infix", "infixr",
+ "let", "local", "nonfix", "of", "op", "open", "orelse", "raise", "rec",
+ "sharing", "sig", "signature", "struct", "structure", "then", "type",
+ "val", "where", "while", "with", "withtype")
+
+ private object Parsers extends Scan.Parsers
+ {
+ /* string material */
+
+ private val blanks = many(character(Symbol.is_ascii_blank))
+ private val blanks1 = many1(character(Symbol.is_ascii_blank))
+
+ private val gap = "\\" ~ blanks1 ~ "\\" ^^ { case x ~ y ~ z => x + y + z }
+ private val gap_start = "\\" ~ blanks ~ """\z""".r ^^ { case x ~ y ~ _ => x + y }
+
+ private val escape =
+ one(character("\"\\abtnvfr".contains(_))) |
+ "^" ~ one(character(c => '@' <= c && c <= '_')) ^^ { case x ~ y => x + y } |
+ repeated(character(Symbol.is_ascii_digit), 3, 3)
+
+ private val str =
+ one(Symbol.is_symbolic) |
+ one(character(c => c != '"' && c != '\\' && ' ' <= c && c <= '~')) |
+ "\\" ~ escape ^^ { case x ~ y => x + y }
+
+
+ /* ML char -- without gaps */
+
+ private val ml_char: Parser[Token] =
+ "#\"" ~ str ~ "\"" ^^ { case x ~ y ~ z => Token(Kind.CHAR, x + y + z) }
+
+ private val recover_ml_char: Parser[String] =
+ "#\"" ~ opt(str) ^^ { case x ~ Some(y) => x + y case x ~ None => x }
+
+
+ /* ML string */
+
+ private val ml_string_body: Parser[String] =
+ rep(gap | str) ^^ (_.mkString)
+
+ private val recover_ml_string: Parser[String] =
+ "\"" ~ ml_string_body ^^ { case x ~ y => x + y }
+
+ private val ml_string: Parser[Token] =
+ "\"" ~ ml_string_body ~ "\"" ^^ { case x ~ y ~ z => Token(Kind.STRING, x + y + z) }
+
+ private def ml_string_context(ctxt: Scan.Context): Parser[(Token, Scan.Context)] =
+ {
+ def result(x: String, c: Scan.Context) = (Token(Kind.STRING, x), c)
+
+ ctxt match {
+ case Scan.Finished =>
+ "\"" ~ ml_string_body ~ ("\"" | gap_start) ^^
+ { case x ~ y ~ z => result(x + y + z, if (z == "\"") Scan.Finished else ML_String) }
+ case ML_String =>
+ blanks ~ opt_term("\\" ~ ml_string_body ~ ("\"" | gap_start)) ^^
+ { case x ~ Some(y ~ z ~ w) =>
+ result(x + y + z + w, if (w == "\"") Scan.Finished else ML_String)
+ case x ~ None => result(x, ML_String) }
+ case _ => failure("")
+ }
+ }
+
+
+ /* ML comment */
+
+ private val ml_comment: Parser[Token] =
+ comment ^^ (x => Token(Kind.COMMENT, x))
+
+ private def ml_comment_context(ctxt: Scan.Context): Parser[(Token, Scan.Context)] =
+ comment_context(ctxt) ^^ { case (x, c) => (Token(Kind.COMMENT, x), c) }
+
+
+ /* delimited token */
+
+ private def delimited_token: Parser[Token] =
+ ml_char | (ml_string | ml_comment)
+
+ private val recover_delimited: Parser[Token] =
+ (recover_ml_char | (recover_ml_string | recover_comment)) ^^ (x => Token(Kind.ERROR, x))
+
+
+ private def other_token: Parser[Token] =
+ {
+ /* identifiers */
+
+ val letdigs = many(character(Symbol.is_ascii_letdig))
+
+ val alphanumeric =
+ one(character(Symbol.is_ascii_letter)) ~ letdigs ^^ { case x ~ y => x + y }
+
+ val symbolic = many1(character("!#$%&*+-/:<=>?@\\^`|~".contains(_)))
+
+ val ident = (alphanumeric | symbolic) ^^ (x => Token(Kind.IDENT, x))
+
+ val long_ident =
+ rep1(alphanumeric ~ "." ^^ { case x ~ y => x + y }) ~
+ (alphanumeric | (symbolic | "=")) ^^
+ { case x ~ y => Token(Kind.LONG_IDENT, x.mkString + y) }
+
+ val type_var = "'" ~ letdigs ^^ { case x ~ y => Token(Kind.TYPE_VAR, x + y) }
+
+
+ /* numerals */
+
+ val dec = many1(character(Symbol.is_ascii_digit))
+ val hex = many1(character(Symbol.is_ascii_hex))
+ val sign = opt("~") ^^ { case Some(x) => x case None => "" }
+ val decint = sign ~ dec ^^ { case x ~ y => x + y }
+ val exp = ("E" | "e") ~ decint ^^ { case x ~ y => x + y }
+
+ val word =
+ ("0wx" ~ hex ^^ { case x ~ y => x + y } | "0w" ~ dec ^^ { case x ~ y => x + y }) ^^
+ (x => Token(Kind.WORD, x))
+
+ val int =
+ sign ~ ("0x" ~ hex ^^ { case x ~ y => x + y } | dec) ^^
+ { case x ~ y => Token(Kind.INT, x + y) }
+
+ val real =
+ (decint ~ "." ~ dec ~ (opt(exp) ^^ { case Some(x) => x case None => "" }) ^^
+ { case x ~ y ~ z ~ w => x + y + z + w } |
+ decint ~ exp ^^ { case x ~ y => x + y }) ^^ (x => Token(Kind.REAL, x))
+
+
+ /* main */
+
+ val space = blanks1 ^^ (x => Token(Kind.SPACE, x))
+
+ val keyword = literal(lexicon) ^^ (x => Token(Kind.KEYWORD, x))
+
+ val bad = one(_ => true) ^^ (x => Token(Kind.ERROR, x))
+
+ space | (recover_delimited |
+ (((word | (real | (int | (long_ident | (ident | type_var))))) ||| keyword) | bad))
+ }
+
+
+ /* token */
+
+ def token: Parser[Token] = delimited_token | other_token
+
+ def token_context(ctxt: Scan.Context): Parser[(Token, Scan.Context)] =
+ {
+ val other = (ml_char | other_token) ^^ (x => (x, Scan.Finished))
+
+ ml_string_context(ctxt) | (ml_comment_context(ctxt) | other)
+ }
+ }
+
+
+ /* tokenize */
+
+ def tokenize(input: CharSequence): List[Token] =
+ {
+ Parsers.parseAll(Parsers.rep(Parsers.token), new CharSequenceReader(input)) match {
+ case Parsers.Success(tokens, _) => tokens
+ case _ => error("Unexpected failure of tokenizing input:\n" + input.toString)
+ }
+ }
+
+ def tokenize_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
+ {
+ var in: Reader[Char] = new CharSequenceReader(input)
+ val toks = new mutable.ListBuffer[Token]
+ var ctxt = context
+ while (!in.atEnd) {
+ Parsers.parse(Parsers.token_context(ctxt), in) match {
+ case Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
+ case Parsers.NoSuccess(_, rest) =>
+ error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
+ }
+ }
+ (toks.toList, ctxt)
+ }
+}
+
--- a/src/Pure/Thy/thy_header.scala Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Pure/Thy/thy_header.scala Sat Feb 15 17:10:57 2014 +0100
@@ -82,13 +82,13 @@
def read(reader: Reader[Char]): Thy_Header =
{
- val token = lexicon.token(_ => false)
+ val token = Token.Parsers.token(lexicon, _ => false)
val toks = new mutable.ListBuffer[Token]
@tailrec def scan_to_begin(in: Reader[Char])
{
token(in) match {
- case lexicon.Success(tok, rest) =>
+ case Token.Parsers.Success(tok, rest) =>
toks += tok
if (!tok.is_begin) scan_to_begin(rest)
case _ =>
--- a/src/Pure/Thy/thy_info.ML Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Pure/Thy/thy_info.ML Sat Feb 15 17:10:57 2014 +0100
@@ -324,8 +324,8 @@
else
error (loader_msg "incoherent imports for theory" [name] ^
Position.here require_pos ^ ":\n" ^
- Path.print node_name ^ " versus\n" ^
- Path.print node_name')
+ " " ^ Path.print node_name ^ "\n" ^
+ " " ^ Path.print node_name')
| check_entry _ = ();
in
(case try (String_Graph.get_node tasks) name of
--- a/src/Pure/Thy/thy_info.scala Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Pure/Thy/thy_info.scala Sat Feb 15 17:10:57 2014 +0100
@@ -42,23 +42,45 @@
object Dependencies
{
- val empty = new Dependencies(Nil, Nil, Set.empty)
+ val empty = new Dependencies(Nil, Nil, Multi_Map.empty, Multi_Map.empty)
}
final class Dependencies private(
rev_deps: List[Dep],
val keywords: Thy_Header.Keywords,
- val seen: Set[Document.Node.Name])
+ val seen_names: Multi_Map[String, Document.Node.Name],
+ val seen_positions: Multi_Map[String, Position.T])
{
def :: (dep: Dep): Dependencies =
- new Dependencies(dep :: rev_deps, dep.header.keywords ::: keywords, seen)
+ new Dependencies(dep :: rev_deps, dep.header.keywords ::: keywords,
+ seen_names, seen_positions)
- def + (name: Document.Node.Name): Dependencies =
- new Dependencies(rev_deps, keywords, seen = seen + name)
+ def + (thy: (Document.Node.Name, Position.T)): Dependencies =
+ {
+ val (name, pos) = thy
+ new Dependencies(rev_deps, keywords,
+ seen_names + (name.theory -> name),
+ seen_positions + (name.theory -> pos))
+ }
def deps: List[Dep] = rev_deps.reverse
- def errors: List[String] = deps.flatMap(dep => dep.header.errors)
+ def errors: List[String] =
+ {
+ val header_errors = deps.flatMap(dep => dep.header.errors)
+ val import_errors =
+ (for {
+ (theory, names) <- seen_names.iterator_list
+ if !thy_load.loaded_theories(theory)
+ if names.length > 1
+ } yield
+ "Incoherent imports for theory " + quote(theory) + ":\n" +
+ cat_lines(names.flatMap(name =>
+ seen_positions.get_list(theory).map(pos =>
+ " " + quote(name.node) + Position.here(pos))))
+ ).toList
+ header_errors ::: import_errors
+ }
lazy val syntax: Outer_Syntax = thy_load.base_syntax.add_keywords(keywords)
@@ -79,32 +101,38 @@
}
private def require_thys(initiators: List[Document.Node.Name],
- required: Dependencies, names: List[Document.Node.Name]): Dependencies =
- (required /: names)(require_thy(initiators, _, _))
+ required: Dependencies, thys: List[(Document.Node.Name, Position.T)]): Dependencies =
+ (required /: thys)(require_thy(initiators, _, _))
private def require_thy(initiators: List[Document.Node.Name],
- required: Dependencies, name: Document.Node.Name): Dependencies =
+ required: Dependencies, thy: (Document.Node.Name, Position.T)): Dependencies =
{
- if (required.seen(name)) required
- else if (thy_load.loaded_theories(name.theory)) required + name
+ val (name, require_pos) = thy
+ val theory = name.theory
+
+ def message: String =
+ "The error(s) above occurred for theory " + quote(theory) +
+ required_by(initiators) + Position.here(require_pos)
+
+ val required1 = required + thy
+ if (required.seen_names.isDefinedAt(theory) || thy_load.loaded_theories(theory))
+ required1
else {
- def message: String =
- "The error(s) above occurred for theory " + quote(name.theory) + required_by(initiators)
-
try {
if (initiators.contains(name)) error(cycle_msg(initiators))
val header =
try { thy_load.check_thy(name).cat_errors(message) }
catch { case ERROR(msg) => cat_error(msg, message) }
- Dep(name, header) :: require_thys(name :: initiators, required + name, header.imports)
+ val imports = header.imports.map((_, Position.File(name.node)))
+ Dep(name, header) :: require_thys(name :: initiators, required1, imports)
}
catch {
case e: Throwable =>
- Dep(name, Document.Node.bad_header(Exn.message(e))) :: (required + name)
+ Dep(name, Document.Node.bad_header(Exn.message(e))) :: required1
}
}
}
- def dependencies(names: List[Document.Node.Name]): Dependencies =
- require_thys(Nil, Dependencies.empty, names)
+ def dependencies(thys: List[(Document.Node.Name, Position.T)]): Dependencies =
+ require_thys(Nil, Dependencies.empty, thys)
}
--- a/src/Pure/Tools/build.scala Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Pure/Tools/build.scala Sat Feb 15 17:10:57 2014 +0100
@@ -432,7 +432,7 @@
val thy_deps =
thy_info.dependencies(
info.theories.map(_._2).flatten.
- map(thy => thy_load.node_name(info.dir + Thy_Load.thy_path(thy))))
+ map(thy => (thy_load.node_name(info.dir + Thy_Load.thy_path(thy)), info.pos)))
thy_deps.errors match {
case Nil =>
--- a/src/Pure/build-jars Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Pure/build-jars Sat Feb 15 17:10:57 2014 +0100
@@ -43,6 +43,7 @@
Isar/outer_syntax.scala
Isar/parse.scala
Isar/token.scala
+ ML/ml_lex.scala
PIDE/command.scala
PIDE/document.scala
PIDE/document_id.scala
--- a/src/Tools/jEdit/lib/Tools/jedit Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Tools/jEdit/lib/Tools/jedit Sat Feb 15 17:10:57 2014 +0100
@@ -55,6 +55,7 @@
"src/Isabelle.props"
"src/jEdit.props"
"src/services.xml"
+ "src/modes/isabelle-ml.xml"
"src/modes/isabelle-news.xml"
"src/modes/isabelle-options.xml"
"src/modes/isabelle-root.xml"
@@ -275,12 +276,19 @@
cp -p -R -f src/modes/. dist/modes/.
perl -i -e 'while (<>) {
- if (m/NAME="javacc"/) {
+ if (m/FILE_NAME_GLOB="\*\.{sml,ml}"/) {
+ print qq,\t\t\t\tFILE_NAME_GLOB="*.sml" />\n,;
+ }
+ elsif (m/NAME="javacc"/) {
print qq,<MODE NAME="isabelle" FILE="isabelle.xml" FILE_NAME_GLOB="*.thy"/>\n\n,;
+ print qq,<MODE NAME="isabelle-ml" FILE="isabelle-ml.xml" FILE_NAME_GLOB="*.ML"/>\n\n,;
print qq,<MODE NAME="isabelle-news" FILE="isabelle-news.xml"/>\n\n,;
print qq,<MODE NAME="isabelle-options" FILE="isabelle-options.xml"/>\n\n,;
- print qq,<MODE NAME="isabelle-root" FILE="isabelle-root.xml" FILE_NAME_GLOB="ROOT"/>\n\n,; }
- print; }' dist/modes/catalog
+ print qq,<MODE NAME="isabelle-root" FILE="isabelle-root.xml" FILE_NAME_GLOB="ROOT"/>\n\n,;
+ print;
+ }
+ else { print; }
+ }' dist/modes/catalog
cd dist
isabelle_jdk jar xf jedit.jar
--- a/src/Tools/jEdit/src/isabelle.scala Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Tools/jEdit/src/isabelle.scala Sat Feb 15 17:10:57 2014 +0100
@@ -24,13 +24,14 @@
val modes =
List(
"isabelle", // theory source
+ "isabelle-ml", // ML source
"isabelle-markup", // SideKick markup tree
"isabelle-news", // NEWS
"isabelle-options", // etc/options
"isabelle-output", // pretty text area output
"isabelle-root") // session ROOT
- private lazy val news_syntax = Outer_Syntax.init().no_tokens
+ private lazy val symbols_syntax = Outer_Syntax.init().no_tokens
def mode_syntax(name: String): Option[Outer_Syntax] =
name match {
@@ -39,7 +40,7 @@
if (syntax == Outer_Syntax.empty) None else Some(syntax)
case "isabelle-options" => Some(Options.options_syntax)
case "isabelle-root" => Some(Build.root_syntax)
- case "isabelle-news" => Some(news_syntax)
+ case "isabelle-ml" | "isabelle-news" => Some(symbols_syntax)
case "isabelle-output" => None
case _ => None
}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/jEdit/src/modes/isabelle-ml.xml Sat Feb 15 17:10:57 2014 +0100
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE MODE SYSTEM "xmode.dtd">
+
+<!-- Isabelle/ML mode -->
+<MODE>
+ <PROPS>
+ <PROPERTY NAME="commentStart" VALUE="(*"/>
+ <PROPERTY NAME="commentEnd" VALUE="*)"/>
+ <PROPERTY NAME="noWordSep" VALUE="_'.?"/>
+ <PROPERTY NAME="unalignedOpenBrackets" VALUE="{[(«‹⟨⌈⌊⦇⟦⦃" />
+ <PROPERTY NAME="unalignedCloseBrackets" VALUE="⦄⟧⦈⌋⌉⟩›»)]}" />
+ <PROPERTY NAME="tabSize" VALUE="2" />
+ <PROPERTY NAME="indentSize" VALUE="2" />
+ </PROPS>
+</MODE>
--- a/src/Tools/jEdit/src/plugin.scala Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Tools/jEdit/src/plugin.scala Sat Feb 15 17:10:57 2014 +0100
@@ -173,7 +173,7 @@
buffer <- buffers
model <- PIDE.document_model(buffer)
if model.is_theory
- } yield model.node_name
+ } yield (model.node_name, Position.none)
val thy_info = new Thy_Info(PIDE.thy_load)
// FIXME avoid I/O in Swing thread!?!
--- a/src/Tools/jEdit/src/rendering.scala Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Tools/jEdit/src/rendering.scala Sat Feb 15 17:10:57 2014 +0100
@@ -67,7 +67,7 @@
def popup_bounds: Double = (PIDE.options.real("jedit_popup_bounds") max 0.2) min 0.8
- /* token markup -- text styles */
+ /* Isabelle/Isar token markup */
private val command_style: Map[String, Byte] =
{
@@ -110,6 +110,41 @@
if (token.is_command) command_style(syntax.keyword_kind(token.content).getOrElse(""))
else if (token.is_operator) JEditToken.OPERATOR
else token_style(token.kind)
+
+
+ /* Isabelle/ML token markup */
+
+ private val ml_keyword2: Set[String] =
+ Set("case", "do", "else", "end", "if", "in", "let", "local", "of",
+ "sig", "struct", "then", "while", "with")
+
+ private val ml_keyword3: Set[String] =
+ Set("handle", "open", "raise")
+
+ private val ml_token_style: Map[ML_Lex.Kind.Value, Byte] =
+ {
+ import JEditToken._
+ Map[ML_Lex.Kind.Value, Byte](
+ ML_Lex.Kind.IDENT -> NULL,
+ ML_Lex.Kind.LONG_IDENT -> NULL,
+ ML_Lex.Kind.TYPE_VAR -> NULL,
+ ML_Lex.Kind.WORD -> DIGIT,
+ ML_Lex.Kind.INT -> DIGIT,
+ ML_Lex.Kind.REAL -> DIGIT,
+ ML_Lex.Kind.CHAR -> LITERAL2,
+ ML_Lex.Kind.STRING -> LITERAL1,
+ ML_Lex.Kind.SPACE -> NULL,
+ ML_Lex.Kind.COMMENT -> COMMENT1,
+ ML_Lex.Kind.ERROR -> INVALID
+ ).withDefaultValue(NULL)
+ }
+
+ def ml_token_markup(token: ML_Lex.Token): Byte =
+ if (!token.is_keyword) ml_token_style(token.kind)
+ else if (token.is_operator) JEditToken.OPERATOR
+ else if (ml_keyword2(token.source)) JEditToken.KEYWORD2
+ else if (ml_keyword3(token.source)) JEditToken.KEYWORD3
+ else JEditToken.KEYWORD1
}
--- a/src/Tools/jEdit/src/token_markup.scala Sat Feb 15 00:11:17 2014 +0100
+++ b/src/Tools/jEdit/src/token_markup.scala Sat Feb 15 17:10:57 2014 +0100
@@ -203,21 +203,29 @@
val context1 =
{
val (styled_tokens, context1) =
- Isabelle.mode_syntax(mode) match {
- case Some(syntax) if syntax.has_tokens && line_ctxt.isDefined =>
- val (tokens, ctxt1) = syntax.scan_context(line, line_ctxt.get)
- val styled_tokens = tokens.map(tok => (Rendering.token_markup(syntax, tok), tok))
- (styled_tokens, new Line_Context(Some(ctxt1)))
- case _ =>
- val token = Token(Token.Kind.UNPARSED, line.subSequence(0, line.count).toString)
- (List((JEditToken.NULL, token)), new Line_Context(None))
+ if (mode == "isabelle-ml") {
+ val (tokens, ctxt1) = ML_Lex.tokenize_context(line, line_ctxt.get)
+ val styled_tokens = tokens.map(tok => (Rendering.ml_token_markup(tok), tok.source))
+ (styled_tokens, new Line_Context(Some(ctxt1)))
+ }
+ else {
+ Isabelle.mode_syntax(mode) match {
+ case Some(syntax) if syntax.has_tokens && line_ctxt.isDefined =>
+ val (tokens, ctxt1) = syntax.scan_context(line, line_ctxt.get)
+ val styled_tokens =
+ tokens.map(tok => (Rendering.token_markup(syntax, tok), tok.source))
+ (styled_tokens, new Line_Context(Some(ctxt1)))
+ case _ =>
+ val styled_token = (JEditToken.NULL, line.subSequence(0, line.count).toString)
+ (List(styled_token), new Line_Context(None))
+ }
}
val extended = extended_styles(line)
var offset = 0
for ((style, token) <- styled_tokens) {
- val length = token.source.length
+ val length = token.length
val end_offset = offset + length
if ((offset until end_offset) exists
(i => extended.isDefinedAt(i) || line.charAt(i) == '\t')) {