merged
authorAndreas Lochbihler
Fri, 09 Jan 2015 09:17:10 +0100
changeset 59326 46491010b83a
parent 59325 922d31f5c3f5 (current diff)
parent 59324 f5f9993a168d (diff)
child 59327 8a779359df67
merged
--- a/src/Doc/Classes/Setup.thy	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Doc/Classes/Setup.thy	Fri Jan 09 09:17:10 2015 +0100
@@ -5,9 +5,7 @@
 ML_file "../antiquote_setup.ML"
 ML_file "../more_antiquote.ML"
 
-setup {*
-  Code_Target.set_default_code_width 74
-*}
+declare [[default_code_width = 74]]
 
 syntax
   "_alpha" :: "type"  ("\<alpha>")
--- a/src/Doc/Codegen/Setup.thy	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Doc/Codegen/Setup.thy	Fri Jan 09 09:17:10 2015 +0100
@@ -28,9 +28,8 @@
 end
 *}
 
-setup {* Code_Target.set_default_code_width 74 *}
+declare [[default_code_width = 74]]
 
 declare [[names_unique = false]]
 
 end
-
--- a/src/Doc/Isar_Ref/Spec.thy	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Doc/Isar_Ref/Spec.thy	Fri Jan 09 09:17:10 2015 +0100
@@ -805,7 +805,7 @@
   and provides
   \begin{enumerate}
     \item a unified view on arbitrary suitable local theories as interpretation target; 
-    \item rewrite morphisms by means of \emph{mixin definitions}. 
+    \item rewrite morphisms by means of \emph{rewrite definitions}. 
   \end{enumerate}
   
   \begin{matharray}{rcl}
@@ -841,30 +841,37 @@
   corresponding to @{command "interpretation"}) and locales
   (where @{command "permanent_interpretation"} is technically
   corresponding to @{command "sublocale"}).  Unnamed contexts
-  \secref{sec:target} are not admissible since they are
+  (see \secref{sec:target}) are not admissible since they are
   non-permanent due to their very nature.  
 
   In additions to \emph{rewrite morphisms} specified by @{text eqns},
-  also \emph{mixin definitions} may be specified.  Semantically, a
-  mixin definition results in a corresponding definition in
-  the local theory's underlying target \emph{and} a mixin equation
-  in the resulting rewrite morphisms which is symmetric to the
-  original definition equation.
+  also \emph{rewrite definitions} may be specified.  Semantically, a
+  rewrite definition
+  
+  \begin{itemize}
+  
+    \item produces a corresponding definition in
+      the local theory's underlying target \emph{and}
+    
+    \item augments the rewrite morphism with the equation
+      stemming from the symmetric of the corresponding definition.
   
-  The technical difference to a conventional definition plus
-  an explicit mixin equation is that
+  \end{itemize}
   
-  \begin{enumerate}
+  This is technically different to to a naive combination
+  of a conventional definition and an explicit rewrite equation:
   
-    \item definitions are parsed in the syntactic interpretation
-      context, just like equations;
+  \begin{itemize}
   
-    \item the proof needs not consider the equations stemming from
+    \item Definitions are parsed in the syntactic interpretation
+      context, just like equations.
+  
+    \item The proof needs not consider the equations stemming from
       definitions -- they are proved implicitly by construction.
       
-  \end{enumerate}
+  \end{itemize}
   
-  Mixin definitions yield a pattern for introducing new explicit
+  Rewrite definitions yield a pattern for introducing new explicit
   operations for existing terms after interpretation.
   
   \end{description}
--- a/src/HOL/Groups.thy	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/HOL/Groups.thy	Fri Jan 09 09:17:10 2015 +0100
@@ -208,57 +208,6 @@
 
 end
 
-class comm_monoid_diff = comm_monoid_add + minus +
-  assumes diff_zero [simp]: "a - 0 = a"
-    and zero_diff [simp]: "0 - a = 0"
-    and add_diff_cancel_left [simp]: "(c + a) - (c + b) = a - b"
-    and diff_diff_add: "a - b - c = a - (b + c)"
-begin
-
-lemma add_diff_cancel_right [simp]:
-  "(a + c) - (b + c) = a - b"
-  using add_diff_cancel_left [symmetric] by (simp add: add.commute)
-
-lemma add_diff_cancel_left' [simp]:
-  "(b + a) - b = a"
-proof -
-  have "(b + a) - (b + 0) = a" by (simp only: add_diff_cancel_left diff_zero)
-  then show ?thesis by simp
-qed
-
-lemma add_diff_cancel_right' [simp]:
-  "(a + b) - b = a"
-  using add_diff_cancel_left' [symmetric] by (simp add: add.commute)
-
-lemma diff_add_zero [simp]:
-  "a - (a + b) = 0"
-proof -
-  have "a - (a + b) = (a + 0) - (a + b)" by simp
-  also have "\<dots> = 0" by (simp only: add_diff_cancel_left zero_diff)
-  finally show ?thesis .
-qed
-
-lemma diff_cancel [simp]:
-  "a - a = 0"
-proof -
-  have "(a + 0) - (a + 0) = 0" by (simp only: add_diff_cancel_left diff_zero)
-  then show ?thesis by simp
-qed
-
-lemma diff_right_commute:
-  "a - c - b = a - b - c"
-  by (simp add: diff_diff_add add.commute)
-
-lemma add_implies_diff:
-  assumes "c + b = a"
-  shows "c = a - b"
-proof -
-  from assms have "(b + c) - (b + 0) = a - b" by (simp add: add.commute)
-  then show "c = a - b" by simp
-qed
-
-end
-
 class monoid_mult = one + semigroup_mult +
   assumes mult_1_left: "1 * a  = a"
     and mult_1_right: "a * 1 = a"
@@ -319,6 +268,57 @@
 
 class cancel_comm_monoid_add = cancel_ab_semigroup_add + comm_monoid_add
 
+class comm_monoid_diff = comm_monoid_add + minus +
+  assumes diff_zero [simp]: "a - 0 = a"
+    and zero_diff [simp]: "0 - a = 0"
+    and add_diff_cancel_left [simp]: "(c + a) - (c + b) = a - b"
+    and diff_diff_add: "a - b - c = a - (b + c)"
+begin
+
+lemma add_diff_cancel_right [simp]:
+  "(a + c) - (b + c) = a - b"
+  using add_diff_cancel_left [symmetric] by (simp add: add.commute)
+
+lemma add_diff_cancel_left' [simp]:
+  "(b + a) - b = a"
+proof -
+  have "(b + a) - (b + 0) = a" by (simp only: add_diff_cancel_left diff_zero)
+  then show ?thesis by simp
+qed
+
+lemma add_diff_cancel_right' [simp]:
+  "(a + b) - b = a"
+  using add_diff_cancel_left' [symmetric] by (simp add: add.commute)
+
+lemma diff_add_zero [simp]:
+  "a - (a + b) = 0"
+proof -
+  have "a - (a + b) = (a + 0) - (a + b)" by simp
+  also have "\<dots> = 0" by (simp only: add_diff_cancel_left zero_diff)
+  finally show ?thesis .
+qed
+
+lemma diff_cancel [simp]:
+  "a - a = 0"
+proof -
+  have "(a + 0) - (a + 0) = 0" by (simp only: add_diff_cancel_left diff_zero)
+  then show ?thesis by simp
+qed
+
+lemma diff_right_commute:
+  "a - c - b = a - b - c"
+  by (simp add: diff_diff_add add.commute)
+
+lemma add_implies_diff:
+  assumes "c + b = a"
+  shows "c = a - b"
+proof -
+  from assms have "(b + c) - (b + 0) = a - b" by (simp add: add.commute)
+  then show "c = a - b" by simp
+qed
+
+end
+
 
 subsection {* Groups *}
 
--- a/src/HOL/Library/code_test.ML	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/HOL/Library/code_test.ML	Fri Jan 09 09:17:10 2015 +0100
@@ -571,16 +571,15 @@
     "compile test cases to target languages, execute them and report results"
       (test_codeP >> (fn (raw_ts, targets) => Toplevel.keep (test_code_cmd raw_ts targets)))
 
-val _ = Context.>> (Context.map_theory (
-  fold add_driver
-    [(polymlN, (evaluate_in_polyml, Code_ML.target_SML)),
-     (mltonN, (evaluate_in_mlton, Code_ML.target_SML)),
-     (smlnjN, (evaluate_in_smlnj, Code_ML.target_SML)),
-     (ocamlN, (evaluate_in_ocaml, Code_ML.target_OCaml)),
-     (ghcN, (evaluate_in_ghc, Code_Haskell.target)),
-     (scalaN, (evaluate_in_scala, Code_Scala.target))]
-    #> fold (fn target => Value.add_evaluator (target, eval_term target))
-      [polymlN, mltonN, smlnjN, ocamlN, ghcN, scalaN]
-    ))
+val _ = Theory.setup (fold add_driver
+  [(polymlN, (evaluate_in_polyml, Code_ML.target_SML)),
+   (mltonN, (evaluate_in_mlton, Code_ML.target_SML)),
+   (smlnjN, (evaluate_in_smlnj, Code_ML.target_SML)),
+   (ocamlN, (evaluate_in_ocaml, Code_ML.target_OCaml)),
+   (ghcN, (evaluate_in_ghc, Code_Haskell.target)),
+   (scalaN, (evaluate_in_scala, Code_Scala.target))]
+  #> fold (fn target => Value.add_evaluator (target, eval_term target))
+    [polymlN, mltonN, smlnjN, ocamlN, ghcN, scalaN]);
+
 end
 
--- a/src/HOL/Tools/code_evaluation.ML	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/HOL/Tools/code_evaluation.ML	Fri Jan 09 09:17:10 2015 +0100
@@ -134,11 +134,11 @@
 
 (* setup *)
 
-val _ = Context.>> (Context.map_theory
+val _ = Theory.setup
   (Code.datatype_interpretation ensure_term_of
   #> Code.abstype_interpretation ensure_term_of
   #> Code.datatype_interpretation ensure_term_of_code
-  #> Code.abstype_interpretation ensure_abs_term_of_code));
+  #> Code.abstype_interpretation ensure_abs_term_of_code);
 
 
 (** termifying syntax **)
--- a/src/HOL/Tools/semiring_normalizer.ML	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/HOL/Tools/semiring_normalizer.ML	Fri Jan 09 09:17:10 2015 +0100
@@ -75,7 +75,7 @@
     fun match_inst
         ({vars, semiring = (sr_ops, sr_rules), 
           ring = (r_ops, r_rules), field = (f_ops, f_rules), idom, ideal},
-         fns as {is_const, dest_const, mk_const, conv}) pat =
+         fns) pat =
        let
         fun h instT =
           let
@@ -287,7 +287,7 @@
      pthm_09, pthm_10, pthm_11, pthm_12, pthm_13, pthm_14, pthm_15, pthm_16,
      pthm_17, pthm_18, pthm_19, pthm_21, pthm_22, pthm_23, pthm_24,
      pthm_25, pthm_26, pthm_27, pthm_28, pthm_29, pthm_30, pthm_31, pthm_32,
-     pthm_33, pthm_34, pthm_35, pthm_36, pthm_37, pthm_38,pthm_39,pthm_40] = sr_rules;
+     pthm_33, pthm_34, pthm_35, pthm_36, pthm_37, pthm_38, _, _] = sr_rules;
 
 val [ca, cb, cc, cd, cm, cn, cp, cq, cx, cy, cz, clx, crx, cly, cry] = vars;
 val [add_pat, mul_pat, pow_pat, zero_tm, one_tm] = sr_ops;
@@ -301,29 +301,27 @@
  end;
 val is_add = is_binop add_tm
 val is_mul = is_binop mul_tm
-fun is_pow tm = is_binop pow_tm tm andalso is_numeral(Thm.dest_arg tm);
 
-val (neg_mul,sub_add,sub_tm,neg_tm,dest_sub,is_sub,cx',cy') =
+val (neg_mul, sub_add, sub_tm, neg_tm, dest_sub, cx', cy') =
   (case (r_ops, r_rules) of
     ([sub_pat, neg_pat], [neg_mul, sub_add]) =>
       let
         val sub_tm = Thm.dest_fun (Thm.dest_fun sub_pat)
         val neg_tm = Thm.dest_fun neg_pat
         val dest_sub = dest_binop sub_tm
-        val is_sub = is_binop sub_tm
-      in (neg_mul,sub_add,sub_tm,neg_tm,dest_sub,is_sub, neg_mul |> concl |> Thm.dest_arg,
+      in (neg_mul,sub_add,sub_tm,neg_tm,dest_sub, neg_mul |> concl |> Thm.dest_arg,
           sub_add |> concl |> Thm.dest_arg |> Thm.dest_arg)
       end
-    | _ => (TrueI, TrueI, true_tm, true_tm, (fn t => (t,t)), K false, true_tm, true_tm));
+    | _ => (TrueI, TrueI, true_tm, true_tm, (fn t => (t,t)), true_tm, true_tm));
 
-val (divide_inverse, inverse_divide, divide_tm, inverse_tm, is_divide) = 
+val (divide_inverse, divide_tm, inverse_tm) = 
   (case (f_ops, f_rules) of 
-   ([divide_pat, inverse_pat], [div_inv, inv_div]) => 
+   ([divide_pat, inverse_pat], [div_inv, _]) => 
      let val div_tm = funpow 2 Thm.dest_fun divide_pat
          val inv_tm = Thm.dest_fun inverse_pat
-     in (div_inv, inv_div, div_tm, inv_tm, is_binop div_tm)
+     in (div_inv, div_tm, inv_tm)
      end
-   | _ => (TrueI, TrueI, true_tm, true_tm, K false));
+   | _ => (TrueI, true_tm, true_tm));
 
 in fn variable_order =>
  let
@@ -341,7 +339,7 @@
       ((let
          val (lx,ln) = dest_pow l
         in
-         ((let val (rx,rn) = dest_pow r
+         ((let val (_, rn) = dest_pow r
                val th1 = inst_thm [(cx,lx),(cp,ln),(cq,rn)] pthm_29
                 val (tm1,tm2) = Thm.dest_comb(concl th1) in
                Thm.transitive th1 (Drule.arg_cong_rule tm1 (nat_add_conv ctxt tm2)) end)
@@ -589,8 +587,8 @@
  let fun lexorder l1 l2 =
   case (l1,l2) of
     ([],[]) => 0
-  | (vps,[]) => ~1
-  | ([],vps) => 1
+  | (_ ,[]) => ~1
+  | ([], _) => 1
   | (((x1,n1)::vs1),((x2,n2)::vs2)) =>
      if variable_order x1 x2 then 1
      else if variable_order x2 x1 then ~1
@@ -885,16 +883,11 @@
 
 local
 
-fun keyword k = Scan.lift (Args.$$$ k -- Args.colon) >> K ();
 fun keyword2 k1 k2 = Scan.lift (Args.$$$ k1 -- Args.$$$ k2 -- Args.colon) >> K ();
-fun keyword3 k1 k2 k3 =
-  Scan.lift (Args.$$$ k1 -- Args.$$$ k2 -- Args.$$$ k3 -- Args.colon) >> K ();
 
 val opsN = "ops";
 val rulesN = "rules";
 
-val normN = "norm";
-val constN = "const";
 val delN = "del";
 
 val any_keyword =
--- a/src/HOL/Tools/value.ML	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/HOL/Tools/value.ML	Fri Jan 09 09:17:10 2015 +0100
@@ -74,7 +74,7 @@
     (opt_evaluator -- opt_modes -- Parse.term
       >> (fn ((some_name, modes), t) => Toplevel.keep (value_cmd some_name modes t)));
 
-val _ = Context.>> (Context.map_theory
+val _ = Theory.setup
   (Thy_Output.antiquotation @{binding value}
     (Scan.lift opt_evaluator -- Term_Style.parse -- Args.term)
     (fn {source, context, ...} => fn ((some_name, style), t) => Thy_Output.output context
@@ -82,7 +82,6 @@
         [style (value_maybe_select some_name context t)]))
   #> add_evaluator ("simp", Code_Simp.dynamic_value)
   #> add_evaluator ("nbe", Nbe.dynamic_value)
-  #> add_evaluator ("code", Code_Evaluation.dynamic_value_strict))
-);
+  #> add_evaluator ("code", Code_Evaluation.dynamic_value_strict));
 
 end;
--- a/src/Pure/General/completion.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Pure/General/completion.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -416,7 +416,7 @@
       }
 
     (abbrevs_result orElse words_result) match {
-      case Some((original, completions)) if !completions.isEmpty =>
+      case Some((original, completions)) if completions.nonEmpty =>
         val range = Text.Range(- original.length, 0) + caret + start
         val immediate =
           explicit ||
--- a/src/Pure/General/linear_set.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Pure/General/linear_set.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -122,7 +122,7 @@
   override def size: Int = if (isEmpty) 0 else nexts.size + 1
 
   def contains(elem: A): Boolean =
-    !isEmpty && (end.get == elem || nexts.isDefinedAt(elem))
+    nonEmpty && (end.get == elem || nexts.isDefinedAt(elem))
 
   private def make_iterator(from: Option[A]): Iterator[A] = new Iterator[A] {
     private var next_elem = from
--- a/src/Pure/General/path.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Pure/General/path.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -118,7 +118,7 @@
 final class Path private(private val elems: List[Path.Elem]) // reversed elements
 {
   def is_current: Boolean = elems.isEmpty
-  def is_absolute: Boolean = !elems.isEmpty && elems.last.isInstanceOf[Path.Root]
+  def is_absolute: Boolean = elems.nonEmpty && elems.last.isInstanceOf[Path.Root]
   def is_basic: Boolean = elems match { case List(Path.Basic(_)) => true case _ => false }
 
   def +(other: Path): Path = new Path((other.elems :\ elems)(Path.apply_elem))
--- a/src/Pure/General/scan.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Pure/General/scan.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -330,7 +330,7 @@
       {
         if (i < len) {
           tree.branches.get(str.charAt(i)) match {
-            case Some((s, tr)) => look(tr, !s.isEmpty, i + 1)
+            case Some((s, tr)) => look(tr, s.nonEmpty, i + 1)
             case None => None
           }
         }
--- a/src/Pure/General/word.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Pure/General/word.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -61,7 +61,7 @@
         case Capitalized => capitalize(str)
       }
     def unapply(str: String): Option[Case] =
-      if (!str.isEmpty) {
+      if (str.nonEmpty) {
         if (codepoint_iterator(str).forall(Character.isLowerCase(_))) Some(Lowercase)
         else if (codepoint_iterator(str).forall(Character.isUpperCase(_))) Some(Uppercase)
         else {
--- a/src/Pure/Isar/outer_syntax.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Pure/Isar/outer_syntax.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -194,7 +194,7 @@
     def ship(span: List[Token])
     {
       val kind =
-        if (!span.isEmpty && span.head.is_command && !span.exists(_.is_error)) {
+        if (span.nonEmpty && span.head.is_command && !span.exists(_.is_error)) {
           val name = span.head.source
           val pos = Position.Range(Text.Range(0, Symbol.iterator(name).length) + 1)
           Command_Span.Command_Span(name, pos)
@@ -206,8 +206,8 @@
 
     def flush()
     {
-      if (!content.isEmpty) { ship(content.toList); content.clear }
-      if (!improper.isEmpty) { ship(improper.toList); improper.clear }
+      if (content.nonEmpty) { ship(content.toList); content.clear }
+      if (improper.nonEmpty) { ship(improper.toList); improper.clear }
     }
 
     for (tok <- toks) {
--- a/src/Pure/PIDE/document.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Pure/PIDE/document.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -113,7 +113,7 @@
           case _ => false
         }
 
-      def is_theory: Boolean = !theory.isEmpty
+      def is_theory: Boolean = theory.nonEmpty
       override def toString: String = if (is_theory) theory else node
 
       def map(f: String => String): Name = copy(f(node), f(master_dir), theory)
@@ -208,7 +208,7 @@
     final class Commands private(val commands: Linear_Set[Command])
     {
       lazy val load_commands: List[Command] =
-        commands.iterator.filter(cmd => !cmd.blobs.isEmpty).toList
+        commands.iterator.filter(cmd => cmd.blobs.nonEmpty).toList
 
       private lazy val full_index: (Array[(Command, Text.Offset)], Text.Range) =
       {
@@ -229,7 +229,7 @@
 
       def iterator(i: Text.Offset = 0): Iterator[(Command, Text.Offset)] =
       {
-        if (!commands.isEmpty && full_range.contains(i)) {
+        if (commands.nonEmpty && full_range.contains(i)) {
           val (cmd0, start0) = full_index._1(i / Commands.block_size)
           Node.Commands.starts(commands.iterator(cmd0), start0) dropWhile {
             case (cmd, start) => start + cmd.length <= i }
@@ -628,7 +628,7 @@
       history.prune(is_stable, retain) match {
         case Some((dropped, history1)) =>
           val old_versions = dropped.map(change => change.version.get_finished)
-          val removing = !old_versions.isEmpty
+          val removing = old_versions.nonEmpty
           val state1 = copy(history = history1, removing_versions = removing)
           (old_versions, state1)
         case None => fail
@@ -750,7 +750,7 @@
 
         val state = State.this
         val version = stable.version.get_finished
-        val is_outdated = !(pending_edits.isEmpty && latest == stable)
+        val is_outdated = pending_edits.nonEmpty || latest != stable
 
 
         /* local node content */
@@ -770,7 +770,7 @@
           if (node_name.is_theory) Nil
           else version.nodes.load_commands(node_name)
 
-        val is_loaded: Boolean = node_name.is_theory || !load_commands.isEmpty
+        val is_loaded: Boolean = node_name.is_theory || load_commands.nonEmpty
 
         def eq_content(other: Snapshot): Boolean =
         {
--- a/src/Pure/PIDE/session.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Pure/PIDE/session.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -131,7 +131,7 @@
               (a, (msg: Prover.Protocol_Output) => f(prover, msg))
 
           val dups = for ((a, _) <- new_functions if functions1.isDefinedAt(a)) yield a
-          if (!dups.isEmpty) error("Duplicate protocol functions: " + commas_quote(dups))
+          if (dups.nonEmpty) error("Duplicate protocol functions: " + commas_quote(dups))
 
           (handlers1 + (name -> new_handler), functions1 ++ new_functions)
         }
@@ -287,7 +287,7 @@
     private var commands: Set[Command] = Set.empty
 
     def flush(): Unit = synchronized {
-      if (assignment || !nodes.isEmpty || !commands.isEmpty)
+      if (assignment || nodes.nonEmpty || commands.nonEmpty)
         commands_changed.post(Session.Commands_Changed(assignment, nodes, commands))
       assignment = false
       nodes = Set.empty
@@ -533,7 +533,7 @@
           case Prune_History =>
             if (prover.defined) {
               val old_versions = global_state.change_result(_.remove_versions(prune_size))
-              if (!old_versions.isEmpty) prover.get.remove_versions(old_versions)
+              if (old_versions.nonEmpty) prover.get.remove_versions(old_versions)
             }
 
           case Update_Options(options) =>
@@ -603,7 +603,7 @@
   { manager.send(Cancel_Exec(exec_id)) }
 
   def update(doc_blobs: Document.Blobs, edits: List[Document.Edit_Text])
-  { if (!edits.isEmpty) manager.send_wait(Session.Raw_Edits(doc_blobs, edits)) }
+  { if (edits.nonEmpty) manager.send_wait(Session.Raw_Edits(doc_blobs, edits)) }
 
   def update_options(options: Options)
   { manager.send_wait(Update_Options(options)) }
--- a/src/Pure/Thy/thy_syntax.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Pure/Thy/thy_syntax.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -79,7 +79,7 @@
       case (name, Document.Node.Deps(header)) =>
         val node = nodes(name)
         val update_header =
-          !node.header.errors.isEmpty || !header.errors.isEmpty || node.header != header
+          node.header.errors.nonEmpty || header.errors.nonEmpty || node.header != header
         if (update_header) {
           val node1 = node.update_header(header)
           if (node.header.imports != node1.header.imports ||
@@ -334,7 +334,7 @@
             val commands = node.commands
 
             val node1 =
-              if (reparse_set(name) && !commands.isEmpty)
+              if (reparse_set(name) && commands.nonEmpty)
                 node.update_commands(
                   reparse_spans(resources, syntax, get_blob,
                     name, commands, commands.head, commands.last))
@@ -352,6 +352,6 @@
         (doc_edits.toList.filterNot(_._2.is_void), Document.Version.make(nodes))
       }
 
-    Session.Change(previous, syntax_changed, !syntax_changed.isEmpty, doc_edits, version)
+    Session.Change(previous, syntax_changed, syntax_changed.nonEmpty, doc_edits, version)
   }
 }
--- a/src/Pure/Tools/bibtex.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Pure/Tools/bibtex.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -155,7 +155,7 @@
 
     private val content: Option[List[Token]] =
       tokens match {
-        case Token(Token.Kind.KEYWORD, "@") :: body if !body.isEmpty =>
+        case Token(Token.Kind.KEYWORD, "@") :: body if body.nonEmpty =>
           (body.init.filterNot(_.is_ignored), body.last) match {
             case (tok :: Token(Token.Kind.KEYWORD, "{") :: toks, Token(Token.Kind.KEYWORD, "}"))
             if tok.is_kind => Some(toks)
--- a/src/Pure/Tools/build.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Pure/Tools/build.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -166,7 +166,7 @@
       session_groups: List[String], sessions: List[String]): (List[String], Session_Tree) =
     {
       val bad_sessions = sessions.filterNot(isDefinedAt(_))
-      if (!bad_sessions.isEmpty) error("Undefined session(s): " + commas_quote(bad_sessions))
+      if (bad_sessions.nonEmpty) error("Undefined session(s): " + commas_quote(bad_sessions))
 
       val pre_selected =
       {
@@ -805,7 +805,7 @@
       for (name <- full_tree.graph.all_succs(selected)) {
         val files =
           List(Path.basic(name), log(name), log_gz(name)).map(output_dir + _).filter(_.is_file)
-        if (!files.isEmpty) progress.echo("Cleaning " + name + " ...")
+        if (files.nonEmpty) progress.echo("Cleaning " + name + " ...")
         if (!files.forall(p => p.file.delete)) progress.echo(name + " FAILED to delete")
       }
     }
@@ -944,7 +944,7 @@
       for ((chapter, entries) <- browser_chapters)
         Present.update_chapter_index(browser_info, chapter, entries)
 
-      if (!browser_chapters.isEmpty && !(browser_info + Path.explode("index.html")).is_file)
+      if (browser_chapters.nonEmpty && !(browser_info + Path.explode("index.html")).is_file)
       {
         Isabelle_System.mkdirs(browser_info)
         File.copy(Path.explode("~~/lib/logo/isabelle.gif"),
--- a/src/Tools/Code/code_haskell.ML	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/Code/code_haskell.ML	Fri Jan 09 09:17:10 2015 +0100
@@ -9,7 +9,6 @@
   val language_params: string
   val target: string
   val print_numeral: string -> int -> string
-  val setup: theory -> theory
 end;
 
 structure Code_Haskell : CODE_HASKELL =
@@ -493,13 +492,8 @@
 
 (** Isar setup **)
 
-val _ =
-  Outer_Syntax.command @{command_spec "code_monad"} "define code syntax for monads"
-    (Parse.term -- Parse.name >> (fn (raw_bind, target) =>
-      Toplevel.theory (add_monad target raw_bind)));
-
-val setup =
-  Code_Target.add_language
+val _ = Theory.setup
+  (Code_Target.add_language
     (target, { serializer = serializer, literals = literals,
       check = { env_var = "ISABELLE_GHC", make_destination = I,
         make_command = fn module_name =>
@@ -519,6 +513,11 @@
     ]
   #> fold (Code_Target.add_reserved target) prelude_import_unqualified
   #> fold (Code_Target.add_reserved target o fst) prelude_import_unqualified_constr
-  #> fold (fold (Code_Target.add_reserved target) o snd) prelude_import_unqualified_constr;
+  #> fold (fold (Code_Target.add_reserved target) o snd) prelude_import_unqualified_constr);
+
+val _ =
+  Outer_Syntax.command @{command_spec "code_monad"} "define code syntax for monads"
+    (Parse.term -- Parse.name >> (fn (raw_bind, target) =>
+      Toplevel.theory (add_monad target raw_bind)));
 
 end; (*struct*)
--- a/src/Tools/Code/code_ml.ML	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/Code/code_ml.ML	Fri Jan 09 09:17:10 2015 +0100
@@ -8,7 +8,6 @@
 sig
   val target_SML: string
   val target_OCaml: string
-  val setup: theory -> theory
 end;
 
 structure Code_ML : CODE_ML =
@@ -860,8 +859,8 @@
     print_typ (INFX (1, R)) ty2
   );
 
-val setup =
-  Code_Target.add_language
+val _ = Theory.setup
+  (Code_Target.add_language
     (target_SML, { serializer = serializer_sml, literals = literals_sml,
       check = { env_var = "ISABELLE_PROCESS",
         make_destination = fn p => Path.append p (Path.explode "ROOT.ML"),
@@ -887,6 +886,6 @@
       "sig", "struct", "then", "to", "true", "try", "type", "val",
       "virtual", "when", "while", "with"
     ]
-  #> fold (Code_Target.add_reserved target_OCaml) ["failwith", "mod", "Big_int"];
+  #> fold (Code_Target.add_reserved target_OCaml) ["failwith", "mod", "Big_int"]);
 
 end; (*struct*)
--- a/src/Tools/Code/code_preproc.ML	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/Code/code_preproc.ML	Fri Jan 09 09:17:10 2015 +0100
@@ -280,8 +280,8 @@
   type T = string list option;
   val empty = SOME [];
   val extend = I;
-  fun merge (NONE, d2) = NONE
-    | merge (d1, NONE) = NONE
+  fun merge (NONE, _) = NONE
+    | merge (_, NONE) = NONE
     | merge (SOME cs1, SOME cs2) = SOME (Library.merge (op =) (cs1, cs2));
 );
 
@@ -567,21 +567,26 @@
 
 (** setup **)
 
-val _ =
+val _ = Theory.setup (
   let
     fun mk_attribute f = Thm.declaration_attribute (fn thm => Context.mapping (f thm) I);
     fun add_del_attribute_parser process =
       Attrib.add_del (mk_attribute (process Simplifier.add_simp))
         (mk_attribute (process Simplifier.del_simp));
   in
-    Context.>> (Context.map_theory
-      (Attrib.setup @{binding code_unfold} (add_del_attribute_parser process_unfold)
-        "preprocessing equations for code generator"
-      #> Attrib.setup @{binding code_post} (add_del_attribute_parser process_post)
-        "postprocessing equations for code generator"
-      #> Attrib.setup @{binding code_abbrev} (add_del_attribute_parser process_abbrev)
-        "post- and preprocessing equations for code generator"))
-  end;
+    Attrib.setup @{binding code_unfold} (add_del_attribute_parser process_unfold)
+      "preprocessing equations for code generator"
+    #> Attrib.setup @{binding code_post} (add_del_attribute_parser process_post)
+      "postprocessing equations for code generator"
+    #> Attrib.setup @{binding code_abbrev} (add_del_attribute_parser process_abbrev)
+      "post- and preprocessing equations for code generator"
+    #> Attrib.setup @{binding code_preproc_trace}
+      ((Scan.lift (Args.$$$ "off" >> K trace_none)
+      || (Scan.lift (Args.$$$ "only" |-- Args.colon |-- Scan.repeat1 Parse.term))
+         >> trace_only_ext
+      || Scan.succeed trace_all)
+      >> (Thm.declaration_attribute o K)) "tracing of the code generator preprocessor"
+  end);
 
 val _ =
   Outer_Syntax.command @{command_spec "print_codeproc"} "print code preprocessor setup"
--- a/src/Tools/Code/code_scala.ML	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/Code/code_scala.ML	Fri Jan 09 09:17:10 2015 +0100
@@ -8,7 +8,6 @@
 sig
   val target: string
   val case_insensitive: bool Config.T;
-  val setup: theory -> theory
 end;
 
 structure Code_Scala : CODE_SCALA =
@@ -423,8 +422,8 @@
 
 (** Isar setup **)
 
-val setup =
-  Code_Target.add_language
+val _ = Theory.setup
+  (Code_Target.add_language
     (target, { serializer = serializer, literals = literals,
       check = { env_var = "SCALA_HOME",
         make_destination = fn p => Path.append p (Path.explode "ROOT.scala"),
@@ -446,6 +445,6 @@
     ]
   #> fold (Code_Target.add_reserved target) [
       "apply", "sys", "scala", "BigInt", "Nil", "List"
-    ];
+    ]);
 
 end; (*struct*)
--- a/src/Tools/Code/code_simp.ML	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/Code/code_simp.ML	Fri Jan 09 09:17:10 2015 +0100
@@ -14,7 +14,6 @@
     -> Proof.context -> conv
   val static_tac: { ctxt: Proof.context, simpset: simpset option, consts: string list }
     -> Proof.context -> int -> tactic
-  val setup: theory -> theory
   val trace: bool Config.T
 end;
 
@@ -86,10 +85,10 @@
 fun dynamic_value ctxt =
   snd o Logic.dest_equals o Thm.prop_of o dynamic_conv ctxt o Thm.cterm_of (Proof_Context.theory_of ctxt);
 
-val setup =
-  Method.setup @{binding code_simp}
+val _ = Theory.setup 
+  (Method.setup @{binding code_simp}
     (Scan.succeed (SIMPLE_METHOD' o (CHANGED_PROP oo dynamic_tac)))
-    "simplification with code equations";
+    "simplification with code equations");
 
 
 (* evaluation with static code context *)
--- a/src/Tools/Code/code_target.ML	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/Code/code_target.ML	Fri Jan 09 09:17:10 2015 +0100
@@ -45,7 +45,7 @@
   val serialization: (int -> Path.T option -> 'a -> unit)
     -> (Code_Symbol.T list -> int -> 'a -> (string * string) list * (Code_Symbol.T -> string option))
     -> 'a -> serialization
-  val set_default_code_width: int -> theory -> theory
+  val default_code_width: int Config.T
 
   type ('a, 'b, 'c, 'd, 'e, 'f) symbol_attr_decl
   val set_identifiers: (string, string, string, string, string, string) symbol_attr_decl
@@ -55,8 +55,6 @@
   val add_reserved: string -> string -> theory -> theory
 
   val codegen_tool: string (*theory name*) -> string (*export_code expr*) -> unit
-
-  val setup: theory -> theory
 end;
 
 structure Code_Target : CODE_TARGET =
@@ -190,21 +188,21 @@
 
 structure Targets = Theory_Data
 (
-  type T = (target * Code_Printer.data) Symtab.table * int;
-  val empty = (Symtab.empty, 80);
+  type T = (target * Code_Printer.data) Symtab.table;
+  val empty = Symtab.empty;
   val extend = I;
-  fun merge ((targets1, width1), (targets2, width2)) : T =
-    (Symtab.join (fn target_name => fn ((target1, data1), (target2, data2)) =>
+  fun merge (targets1, targets2) : T =
+    Symtab.join (fn target_name => fn ((target1, data1), (target2, data2)) =>
       if #serial target1 = #serial target2 then
       ({ serial = #serial target1, language = #language target1,
         ancestry = merge_ancestry (#ancestry target1, #ancestry target2)},
         Code_Printer.merge_data (data1, data2))
       else error ("Incompatible targets: " ^ quote target_name) 
-    ) (targets1, targets2), Int.max (width1, width2))
+    ) (targets1, targets2)
 );
 
-fun exists_target thy = Symtab.defined (fst (Targets.get thy));
-fun lookup_target_data thy = Symtab.lookup (fst (Targets.get thy));
+fun exists_target thy = Symtab.defined (Targets.get thy);
+fun lookup_target_data thy = Symtab.lookup (Targets.get thy);
 
 fun fold1 f xs = fold f (tl xs) (hd xs);
 
@@ -231,7 +229,7 @@
       else ();
   in
     thy
-    |> (Targets.map o apfst o Symtab.update) (target_name, (target, Code_Printer.empty_data))  
+    |> (Targets.map o Symtab.update) (target_name, (target, Code_Printer.empty_data))  
   end;
 
 fun add_language (target_name, language) =
@@ -261,7 +259,7 @@
     val _ = assert_target (Proof_Context.init_global thy) target_name;
   in
     thy
-    |> (Targets.map o apfst o Symtab.map_entry target_name o apsnd o Code_Printer.map_data) f
+    |> (Targets.map o Symtab.map_entry target_name o apsnd o Code_Printer.map_data) f
   end;
 
 fun map_reserved target_name =
@@ -271,11 +269,14 @@
 fun map_printings target_name =
   map_data target_name o @{apply 3 (3)};
 
-fun set_default_code_width k = (Targets.map o apsnd) (K k);
-
 
 (** serializer usage **)
 
+(* technical aside: pretty printing width *)
+
+val default_code_width = Attrib.setup_config_int @{binding "default_code_width"} (K 80);
+
+
 (* montage *)
 
 fun the_language ctxt =
@@ -287,10 +288,9 @@
 
 fun activate_target ctxt target_name =
   let
-    val thy = Proof_Context.theory_of ctxt
-    val (_, default_width) = Targets.get thy;
+    val thy = Proof_Context.theory_of ctxt;
     val (modify, target_data) = join_ancestry thy target_name;
-  in (default_width, target_data, modify) end;
+  in (target_data, modify) end;
 
 fun project_program ctxt syms_hidden syms1 program2 =
   let
@@ -328,7 +328,8 @@
 
 fun mount_serializer ctxt target_name some_width module_name args program syms =
   let
-    val (default_width, (target, data), modify) = activate_target ctxt target_name;
+    val default_width = Config.get ctxt default_code_width;
+    val ((target, data), modify) = activate_target ctxt target_name;
     val serializer = (#serializer o #language) target;
     val (prepared_serializer, (prepared_syms, prepared_program)) =
       prepare_serializer ctxt serializer
@@ -492,15 +493,15 @@
 
 in
 
-val antiq_setup =
-  Thy_Output.antiquotation @{binding code_stmts}
+val _ = Theory.setup
+  (Thy_Output.antiquotation @{binding code_stmts}
     (parse_const_terms --
       Scan.repeat (parse_consts || parse_types || parse_classes || parse_instances)
       -- Scan.lift (Args.parens (Args.name -- Scan.option Parse.int)))
     (fn {context = ctxt, ...} => fn ((mk_cs, mk_stmtss), (target_name, some_width)) =>
         present_code ctxt (mk_cs ctxt)
           (maps (fn f => f ctxt) mk_stmtss)
-          target_name some_width "Example" []);
+          target_name some_width "Example" []));
 
 end;
 
@@ -673,9 +674,4 @@
     | NONE => error ("Bad directive " ^ quote cmd_expr)
   end;
 
-
-(** theory setup **)
-
-val setup = antiq_setup;
-
 end; (*struct*)
--- a/src/Tools/Code_Generator.thy	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/Code_Generator.thy	Fri Jan 09 09:17:10 2015 +0100
@@ -16,15 +16,6 @@
 
 ML_file "~~/src/Tools/cache_io.ML"
 ML_file "~~/src/Tools/Code/code_preproc.ML"
-
-attribute_setup code_preproc_trace = {*
-  (Scan.lift (Args.$$$ "off" >> K Code_Preproc.trace_none)
-  || (Scan.lift (Args.$$$ "only" |-- Args.colon |-- Scan.repeat1 Parse.term))
-       >> Code_Preproc.trace_only_ext
-  || Scan.succeed Code_Preproc.trace_all)
-  >> (Thm.declaration_attribute o K)
-*} "tracing of the code generator preprocessor"
-
 ML_file "~~/src/Tools/Code/code_symbol.ML"
 ML_file "~~/src/Tools/Code/code_thingol.ML"
 ML_file "~~/src/Tools/Code/code_simp.ML"
@@ -35,14 +26,6 @@
 ML_file "~~/src/Tools/Code/code_haskell.ML"
 ML_file "~~/src/Tools/Code/code_scala.ML"
 
-setup {*
-  Code_Simp.setup
-  #> Code_Target.setup
-  #> Code_ML.setup
-  #> Code_Haskell.setup
-  #> Code_Scala.setup
-*}
-
 code_datatype "TYPE('a\<Colon>{})"
 
 definition holds :: "prop" where
--- a/src/Tools/Graphview/popups.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/Graphview/popups.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -126,7 +126,7 @@
 
       popup.add(new JPopupMenu.Separator)
     }
-    if (!selected_nodes.isEmpty) {
+    if (selected_nodes.nonEmpty) {
       val text =
         if (selected_nodes.length > 1) "multiple"
         else quote(selected_nodes.head.toString)
--- a/src/Tools/jEdit/src/bibtex_jedit.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/jEdit/src/bibtex_jedit.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -82,7 +82,7 @@
       original <- JEdit_Lib.try_get_text(text_area.getBuffer, r)
       orig = Library.perhaps_unquote(original)
       entries = complete(name).filter(_ != orig)
-      if !entries.isEmpty
+      if entries.nonEmpty
       items =
         entries.map({
           case entry =>
--- a/src/Tools/jEdit/src/completion_popup.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/jEdit/src/completion_popup.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -228,7 +228,7 @@
         r2 = Text.Range(r1.start + 1, r1.stop - 1)
         if Path.is_valid(s2)
         paths = complete(s2)
-        if !paths.isEmpty
+        if paths.nonEmpty
         items = paths.map(p => Completion.Item(r2, s2, "", p._2, p._1, 0, false))
       } yield Completion.Result(r2, s2, false, items)
     }
@@ -630,7 +630,7 @@
 
   GUI_Thread.require {}
 
-  require(!items.isEmpty)
+  require(items.nonEmpty)
   val multi = items.length > 1
 
 
--- a/src/Tools/jEdit/src/document_model.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/jEdit/src/document_model.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -123,7 +123,7 @@
           cmd <- snapshot.node.load_commands
           blob_name <- cmd.blobs_names
           blob_buffer <- JEdit_Lib.jedit_buffer(blob_name)
-          if !JEdit_Lib.jedit_text_areas(blob_buffer).isEmpty
+          if JEdit_Lib.jedit_text_areas(blob_buffer).nonEmpty
           start <- snapshot.node.command_start(cmd)
           range = snapshot.convert(cmd.proper_range + start)
         } yield range
@@ -221,7 +221,7 @@
     private val pending = new mutable.ListBuffer[Text.Edit]
     private var last_perspective = Document.Node.no_perspective_text
 
-    def is_pending(): Boolean = pending_clear || !pending.isEmpty
+    def is_pending(): Boolean = pending_clear || pending.nonEmpty
     def snapshot(): List[Text.Edit] = pending.toList
 
     def flushed_edits(doc_blobs: Document.Blobs): List[Document.Edit_Text] =
@@ -229,7 +229,7 @@
       val clear = pending_clear
       val edits = snapshot()
       val (reparse, perspective) = node_perspective(doc_blobs)
-      if (clear || reparse || !edits.isEmpty || last_perspective != perspective) {
+      if (clear || reparse || edits.nonEmpty || last_perspective != perspective) {
         pending_clear = false
         pending.clear
         last_perspective = perspective
--- a/src/Tools/jEdit/src/isabelle_options.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/jEdit/src/isabelle_options.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -58,7 +58,7 @@
       if (name.endsWith("_color") && opt.section == JEdit_Options.RENDERING_SECTION)
     } yield PIDE.options.make_color_component(opt))
 
-  assert(!predefined.isEmpty)
+  assert(predefined.nonEmpty)
 
   protected val components = PIDE.options.make_components(predefined, _ => false)
 }
--- a/src/Tools/jEdit/src/jedit_editor.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/jEdit/src/jedit_editor.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -41,7 +41,7 @@
         name => (name, Document.Node.no_perspective_text))
 
     val edits = models.flatMap(_.flushed_edits(doc_blobs)) ::: removed_perspective
-    if (!edits.isEmpty) session.update(doc_blobs, edits)
+    if (edits.nonEmpty) session.update(doc_blobs, edits)
   }
 
   private val delay_flush =
--- a/src/Tools/jEdit/src/jedit_resources.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/jEdit/src/jedit_resources.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -114,7 +114,7 @@
 
   override def commit(change: Session.Change)
   {
-    if (!change.syntax_changed.isEmpty)
+    if (change.syntax_changed.nonEmpty)
       GUI_Thread.later {
         val changed = change.syntax_changed.toSet
         for {
--- a/src/Tools/jEdit/src/plugin.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/jEdit/src/plugin.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -220,7 +220,7 @@
             val files = thy_info.dependencies("", thys).deps.map(_.name.node).
               filter(file => !loaded_buffer(file) && PIDE.resources.check_file(view, file))
 
-            if (!files.isEmpty) {
+            if (files.nonEmpty) {
               if (PIDE.options.bool("jedit_auto_load")) {
                 files.foreach(file => jEdit.openFile(null: View, file))
               }
--- a/src/Tools/jEdit/src/rendering.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/jEdit/src/rendering.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -465,7 +465,7 @@
         range, Nil, Rendering.tooltip_message_elements, _ =>
         {
           case (msgs, Text.Info(info_range, msg @ XML.Elem(Markup(Markup.BAD, _), body)))
-          if !body.isEmpty =>
+          if body.nonEmpty =>
             val entry: Command.Results.Entry = (Document_ID.make(), msg)
             Some(Text.Info(snapshot.convert(info_range), entry) :: msgs)
 
@@ -665,7 +665,7 @@
             command_states =>
               {
                 case (((status, color), Text.Info(_, XML.Elem(markup, _))))
-                if !status.isEmpty && Protocol.proper_status_elements(markup.name) =>
+                if status.nonEmpty && Protocol.proper_status_elements(markup.name) =>
                   Some((markup :: status, color))
                 case (_, Text.Info(_, XML.Elem(Markup(Markup.BAD, _), _))) =>
                   Some((Nil, Some(bad_color)))
@@ -686,7 +686,7 @@
               })
         color <-
           (result match {
-            case (markups, opt_color) if !markups.isEmpty =>
+            case (markups, opt_color) if markups.nonEmpty =>
               val status = Protocol.Status.make(markups.iterator)
               if (status.is_unprocessed) Some(unprocessed1_color)
               else if (status.is_running) Some(running1_color)
--- a/src/Tools/jEdit/src/rich_text_area.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/jEdit/src/rich_text_area.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -408,7 +408,7 @@
               val s2 = str.substring(i, j)
               val s3 = str.substring(j)
 
-              if (!s1.isEmpty) gfx.drawString(s1, x1, y)
+              if (s1.nonEmpty) gfx.drawString(s1, x1, y)
 
               val astr = new AttributedString(s2)
               astr.addAttribute(TextAttribute.FONT, chunk_font)
@@ -416,7 +416,7 @@
               astr.addAttribute(TextAttribute.SWAP_COLORS, TextAttribute.SWAP_COLORS_ON)
               gfx.drawString(astr.getIterator, x1 + string_width(s1), y)
 
-              if (!s3.isEmpty)
+              if (s3.nonEmpty)
                 gfx.drawString(s3, x1 + string_width(str.substring(0, j)), y)
 
             case _ =>
--- a/src/Tools/jEdit/src/spell_checker.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/jEdit/src/spell_checker.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -84,7 +84,7 @@
       range = JEdit_Lib.before_caret_range(text_area, rendering)
       word <- current_word(text_area, rendering, range)
       words = spell_checker.complete(word.info)
-      if !words.isEmpty
+      if words.nonEmpty
       descr = "(from dictionary " + quote(spell_checker.toString) + ")"
       items =
         words.map(w => Completion.Item(word.range, word.info, "", List(w, descr), w, 0, false))
@@ -274,7 +274,7 @@
         if upd.permanent
       } yield Spell_Checker.Decl(word, upd.include)).toList
 
-    if (!permanent_decls.isEmpty || dictionary.user_path.is_file) {
+    if (permanent_decls.nonEmpty || dictionary.user_path.is_file) {
       val header = """# User updates for spell-checker dictionary
 #
 #   * each line contains at most one word
@@ -358,7 +358,7 @@
       result.getOrElse(Nil).map(recover_case)
     }
 
-  def complete_enabled(word: String): Boolean = !complete(word).isEmpty
+  def complete_enabled(word: String): Boolean = complete(word).nonEmpty
 }
 
 
--- a/src/Tools/jEdit/src/timing_dockable.scala	Fri Jan 09 09:16:51 2015 +0100
+++ b/src/Tools/jEdit/src/timing_dockable.scala	Fri Jan 09 09:17:10 2015 +0100
@@ -161,7 +161,7 @@
     val timing = nodes_timing.getOrElse(name, Protocol.empty_node_timing)
 
     val theories =
-      (for ((node_name, node_timing) <- nodes_timing.toList if !node_timing.commands.isEmpty)
+      (for ((node_name, node_timing) <- nodes_timing.toList if node_timing.commands.nonEmpty)
         yield Theory_Entry(node_name, node_timing.total, false)).sorted(Entry.Ordering)
     val commands =
       (for ((command, command_timing) <- timing.commands.toList)