merged
authorwenzelm
Wed, 22 Jan 2014 17:14:27 +0100
changeset 55114 0ee5c17f2207
parent 55102 761e40ce91bc (current diff)
parent 55113 497693486858 (diff)
child 55115 fbf24a326206
merged
NEWS
src/Doc/Datatypes/Datatypes.thy
src/HOL/Library/LaTeXsugar.thy
--- a/NEWS	Wed Jan 22 10:13:40 2014 +0100
+++ b/NEWS	Wed Jan 22 17:14:27 2014 +0100
@@ -43,6 +43,10 @@
 context discipline.  See also Assumption.add_assumes and the more
 primitive Thm.assume_hyps.
 
+* Inner syntax token language allows regular quoted strings "..."
+(only makes sense in practice, if outer syntax is delimited
+differently).
+
 
 *** HOL ***
 
--- a/etc/isar-keywords.el	Wed Jan 22 10:13:40 2014 +0100
+++ b/etc/isar-keywords.el	Wed Jan 22 17:14:27 2014 +0100
@@ -272,8 +272,10 @@
     "syntax"
     "syntax_declaration"
     "term"
+    "term_cartouche"
     "termination"
     "text"
+    "text_cartouche"
     "text_raw"
     "then"
     "theorem"
@@ -453,6 +455,7 @@
     "solve_direct"
     "spark_status"
     "term"
+    "term_cartouche"
     "thm"
     "thm_deps"
     "thy_deps"
@@ -591,6 +594,7 @@
     "syntax"
     "syntax_declaration"
     "text"
+    "text_cartouche"
     "text_raw"
     "theorems"
     "translations"
--- a/src/Doc/Datatypes/Datatypes.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/Datatypes/Datatypes.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -451,12 +451,12 @@
   @{command_def "datatype_new"} & : & @{text "local_theory \<rightarrow> local_theory"}
 \end{matharray}
 
-@{rail "
+@{rail \<open>
   @@{command datatype_new} target? @{syntax dt_options}? \<newline>
     (@{syntax dt_name} '=' (@{syntax ctor} + '|') + @'and')
   ;
   @{syntax_def dt_options}: '(' (('no_discs_sels' | 'no_code' | 'rep_compat') + ',') ')'
-"}
+\<close>}
 
 The syntactic entity \synt{target} can be used to specify a local
 context---e.g., @{text "(in linorder)"}. It is documented in the Isar reference
@@ -485,13 +485,13 @@
 The left-hand sides of the datatype equations specify the name of the type to
 define, its type parameters, and additional information:
 
-@{rail "
+@{rail \<open>
   @{syntax_def dt_name}: @{syntax tyargs}? name @{syntax map_rel}? mixfix?
   ;
   @{syntax_def tyargs}: typefree | '(' ((name ':')? typefree + ',') ')'
   ;
   @{syntax_def map_rel}: '(' ((('map' | 'rel') ':' name) +) ')'
-"}
+\<close>}
 
 \noindent
 The syntactic entity \synt{name} denotes an identifier, \synt{typefree}
@@ -504,10 +504,10 @@
 Inside a mutually recursive specification, all defined datatypes must
 mention exactly the same type variables in the same order.
 
-@{rail "
+@{rail \<open>
   @{syntax_def ctor}: (name ':')? name (@{syntax ctor_arg} * ) \<newline>
     @{syntax dt_sel_defaults}? mixfix?
-"}
+\<close>}
 
 \medskip
 
@@ -517,9 +517,9 @@
 can be supplied at the front to override the default name
 (@{text t.is_C\<^sub>j}).
 
-@{rail "
+@{rail \<open>
   @{syntax_def ctor_arg}: type | '(' name ':' type ')'
-"}
+\<close>}
 
 \medskip
 
@@ -529,9 +529,9 @@
 (@{text un_C\<^sub>ji}). The same selector names can be reused for several
 constructors as long as they share the same type.
 
-@{rail "
+@{rail \<open>
   @{syntax_def dt_sel_defaults}: '(' 'defaults' (name ':' term +) ')'
-"}
+\<close>}
 
 \noindent
 Given a constructor
@@ -552,9 +552,9 @@
   @{command_def "datatype_new_compat"} & : & @{text "local_theory \<rightarrow> local_theory"}
 \end{matharray}
 
-@{rail "
+@{rail \<open>
   @@{command datatype_new_compat} names
-"}
+\<close>}
 
 \noindent
 The old datatype package provides some functionality that is not yet replicated
@@ -1346,11 +1346,11 @@
   @{command_def "primrec_new"} & : & @{text "local_theory \<rightarrow> local_theory"}
 \end{matharray}
 
-@{rail "
+@{rail \<open>
   @@{command primrec_new} target? fixes \<newline> @'where' (@{syntax pr_equation} + '|')
   ;
   @{syntax_def pr_equation}: thmdecl? prop
-"}
+\<close>}
 *}
 
 
@@ -1568,10 +1568,10 @@
   @{command_def "codatatype"} & : & @{text "local_theory \<rightarrow> local_theory"}
 \end{matharray}
 
-@{rail "
+@{rail \<open>
   @@{command codatatype} target? \<newline>
     (@{syntax dt_name} '=' (@{syntax ctor} + '|') + @'and')
-"}
+\<close>}
 
 \noindent
 Definitions of codatatypes have almost exactly the same syntax as for datatypes
@@ -2235,7 +2235,7 @@
   @{command_def "primcorecursive"} & : & @{text "local_theory \<rightarrow> proof(prove)"}
 \end{matharray}
 
-@{rail "
+@{rail \<open>
   (@@{command primcorec} | @@{command primcorecursive}) target? \<newline>
     @{syntax pcr_option}? fixes @'where'
     (@{syntax pcr_formula} + '|')
@@ -2243,7 +2243,7 @@
   @{syntax_def pcr_option}: '(' ('sequential' | 'exhaustive') ')'
   ;
   @{syntax_def pcr_formula}: thmdecl? prop (@'of' (term * ))?
-"}
+\<close>}
 
 The optional target is potentially followed by a corecursion-specific option:
 
@@ -2319,11 +2319,11 @@
   @{command_def "bnf"} & : & @{text "local_theory \<rightarrow> proof(prove)"}
 \end{matharray}
 
-@{rail "
+@{rail \<open>
   @@{command bnf} target? (name ':')? typ \<newline>
     'map:' term ('sets:' (term +))? 'bd:' term \<newline>
     ('wits:' (term +))? ('rel:' term)?
-"}
+\<close>}
 *}
 
 
@@ -2336,7 +2336,7 @@
   @{text "bnf_decl"} & : & @{text "local_theory \<rightarrow> local_theory"}
 \end{matharray}
 
-@{rail "
+@{rail \<open>
   @@{command bnf_decl} target? @{syntax dt_name}
   ;
   @{syntax_def dt_name}: @{syntax tyargs}? name @{syntax map_rel}? mixfix?
@@ -2344,7 +2344,7 @@
   @{syntax_def tyargs}: typefree | '(' (((name | '-') ':')? typefree + ',') ')'
   ;
   @{syntax_def map_rel}: '(' ((('map' | 'rel') ':' name) +) ')'
-"}
+\<close>}
 
 Declares a fresh type and fresh constants (map, set, relator, cardinal bound)
 and asserts the bnf properties for these constants as axioms. Additionally,
@@ -2364,9 +2364,9 @@
   @{command_def "print_bnfs"} & : & @{text "local_theory \<rightarrow>"}
 \end{matharray}
 
-@{rail "
+@{rail \<open>
   @@{command print_bnfs}
-"}
+\<close>}
 *}
 
 
@@ -2410,7 +2410,7 @@
   @{command_def "wrap_free_constructors"} & : & @{text "local_theory \<rightarrow> proof(prove)"}
 \end{matharray}
 
-@{rail "
+@{rail \<open>
   @@{command wrap_free_constructors} target? @{syntax dt_options} \<newline>
     term_list name @{syntax wfc_discs_sels}?
   ;
@@ -2419,7 +2419,7 @@
   @{syntax_def name_term}: (name ':' term)
   ;
   X_list: '[' (X + ',') ']'
-"}
+\<close>}
 
 % options: no_discs_sels no_code rep_compat
 
--- a/src/Doc/IsarImplementation/Isar.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/IsarImplementation/Isar.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -563,9 +563,9 @@
   @{ML_antiquotation_def attributes} & : & @{text ML_antiquotation} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
   @@{ML_antiquotation attributes} attributes
-  "}
+  \<close>}
 
   \begin{description}
 
--- a/src/Doc/IsarImplementation/Logic.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/IsarImplementation/Logic.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -196,7 +196,7 @@
   @{ML_antiquotation_def "typ"} & : & @{text ML_antiquotation} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
   @@{ML_antiquotation class} nameref
   ;
   @@{ML_antiquotation sort} sort
@@ -206,7 +206,7 @@
    @@{ML_antiquotation nonterminal}) nameref
   ;
   @@{ML_antiquotation typ} type
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -444,7 +444,7 @@
   @{ML_antiquotation_def "prop"} & : & @{text ML_antiquotation} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
   (@@{ML_antiquotation const_name} |
    @@{ML_antiquotation const_abbrev}) nameref
   ;
@@ -453,7 +453,7 @@
   @@{ML_antiquotation term} term
   ;
   @@{ML_antiquotation prop} prop
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -780,7 +780,7 @@
   @{ML_antiquotation_def "lemma"} & : & @{text ML_antiquotation} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
   @@{ML_antiquotation ctyp} typ
   ;
   @@{ML_antiquotation cterm} term
@@ -793,7 +793,7 @@
   ;
   @@{ML_antiquotation lemma} ('(' @'open' ')')? ((prop +) + @'and') \<newline>
     @'by' method method?
-  "}
+  \<close>}
 
   \begin{description}
 
--- a/src/Doc/IsarImplementation/ML.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/IsarImplementation/ML.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -666,9 +666,9 @@
   concept of \emph{ML antiquotation}.  The standard token language of
   ML is augmented by special syntactic entities of the following form:
 
-  @{rail "
+  @{rail \<open>
   @{syntax_def antiquote}: '@{' nameref args '}'
-  "}
+  \<close>}
 
   Here @{syntax nameref} and @{syntax args} are regular outer syntax
   categories \cite{isabelle-isar-ref}.  Attributes and proof methods
@@ -699,9 +699,9 @@
   @{ML_antiquotation_def "make_string"} & : & @{text ML_antiquotation} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
   @@{ML_antiquotation make_string}
-  "}
+  \<close>}
 
   \begin{description}
 
--- a/src/Doc/IsarImplementation/Prelim.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/IsarImplementation/Prelim.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -165,11 +165,11 @@
   @{ML_antiquotation_def "theory_context"} & : & @{text ML_antiquotation} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
   @@{ML_antiquotation theory} nameref?
   ;
   @@{ML_antiquotation theory_context} nameref
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1025,9 +1025,9 @@
   @{ML_antiquotation_def "binding"} & : & @{text ML_antiquotation} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
   @@{ML_antiquotation binding} name
-  "}
+  \<close>}
 
   \begin{description}
 
--- a/src/Doc/IsarRef/Document_Preparation.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/IsarRef/Document_Preparation.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -47,13 +47,13 @@
   markup commands, but have a different status within Isabelle/Isar
   syntax.
 
-  @{rail "
+  @{rail \<open>
     (@@{command chapter} | @@{command section} | @@{command subsection} |
       @@{command subsubsection} | @@{command text}) @{syntax target}? @{syntax text}
     ;
     (@@{command header} | @@{command text_raw} | @@{command sect} | @@{command subsect} |
       @@{command subsubsect} | @@{command txt} | @@{command txt_raw}) @{syntax text}
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -148,7 +148,7 @@
   context.
 
   %% FIXME less monolithic presentation, move to individual sections!?
-  @{rail "
+  @{rail \<open>
     '@{' antiquotation '}'
     ;
     @{syntax_def antiquotation}:
@@ -176,7 +176,7 @@
       @@{antiquotation ML_op} options @{syntax name} |
       @@{antiquotation ML_type} options @{syntax name} |
       @@{antiquotation ML_struct} options @{syntax name} |
-      @@{antiquotation \"file\"} options @{syntax name} |
+      @@{antiquotation "file"} options @{syntax name} |
       @@{antiquotation file_unchecked} options @{syntax name} |
       @@{antiquotation url} options @{syntax name}
     ;
@@ -187,7 +187,7 @@
     styles: '(' (style + ',') ')'
     ;
     style: (@{syntax name} +)
-  "}
+  \<close>}
 
   Note that the syntax of antiquotations may \emph{not} include source
   comments @{verbatim "(*"}~@{text "\<dots>"}~@{verbatim "*)"} nor verbatim
@@ -404,11 +404,11 @@
   presentation tags, to indicate some modification in the way it is
   printed in the document.
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def tags}: ( tag * )
     ;
     tag: '%' (@{syntax ident} | @{syntax string})
-  "}
+  \<close>}
 
   Some tags are pre-declared for certain classes of commands, serving
   as default markup if no tags are given in the text:
@@ -457,7 +457,9 @@
     @{antiquotation_def "rail"} & : & @{text antiquotation} \\
   \end{matharray}
 
-  @{rail "'rail' string"}
+  @{rail \<open>
+    'rail' (@{syntax string} | @{syntax cartouche})
+  \<close>}
 
   The @{antiquotation rail} antiquotation allows to include syntax
   diagrams into Isabelle documents.  {\LaTeX} requires the style file
@@ -468,7 +470,7 @@
   The rail specification language is quoted here as Isabelle @{syntax
   string}; it has its own grammar given below.
 
-  @{rail "
+  @{rail \<open>
   rule? + ';'
   ;
   rule: ((identifier | @{syntax antiquotation}) ':')? body
@@ -479,13 +481,13 @@
   ;
   atom: '(' body? ')' | identifier |
     '@'? (string | @{syntax antiquotation}) |
-    '\\\\\\\\'
-  "}
+    '\<newline>'
+  \<close>}
 
   The lexical syntax of @{text "identifier"} coincides with that of
   @{syntax ident} in regular Isabelle syntax, but @{text string} uses
   single quotes instead of double quotes of the standard @{syntax
-  string} category, to avoid extra escapes.
+  string} category.
 
   Each @{text rule} defines a formal language (with optional name),
   using a notation that is similar to EBNF or regular expressions with
@@ -496,62 +498,62 @@
 
   \item Empty @{verbatim "()"}
 
-  @{rail "()"}
+  @{rail \<open>()\<close>}
 
   \item Nonterminal @{verbatim "A"}
 
-  @{rail "A"}
+  @{rail \<open>A\<close>}
 
   \item Nonterminal via Isabelle antiquotation
   @{verbatim "@{syntax method}"}
 
-  @{rail "@{syntax method}"}
+  @{rail \<open>@{syntax method}\<close>}
 
   \item Terminal @{verbatim "'xyz'"}
 
-  @{rail "'xyz'"}
+  @{rail \<open>'xyz'\<close>}
 
   \item Terminal in keyword style @{verbatim "@'xyz'"}
 
-  @{rail "@'xyz'"}
+  @{rail \<open>@'xyz'\<close>}
 
   \item Terminal via Isabelle antiquotation
   @{verbatim "@@{method rule}"}
 
-  @{rail "@@{method rule}"}
+  @{rail \<open>@@{method rule}\<close>}
 
   \item Concatenation @{verbatim "A B C"}
 
-  @{rail "A B C"}
+  @{rail \<open>A B C\<close>}
 
   \item Newline inside concatenation
   @{verbatim "A B C \<newline> D E F"}
 
-  @{rail "A B C \<newline> D E F"}
+  @{rail \<open>A B C \<newline> D E F\<close>}
 
   \item Variants @{verbatim "A | B | C"}
 
-  @{rail "A | B | C"}
+  @{rail \<open>A | B | C\<close>}
 
   \item Option @{verbatim "A ?"}
 
-  @{rail "A ?"}
+  @{rail \<open>A ?\<close>}
 
   \item Repetition @{verbatim "A *"}
 
-  @{rail "A *"}
+  @{rail \<open>A *\<close>}
 
   \item Repetition with separator @{verbatim "A * sep"}
 
-  @{rail "A * sep"}
+  @{rail \<open>A * sep\<close>}
 
   \item Strict repetition @{verbatim "A +"}
 
-  @{rail "A +"}
+  @{rail \<open>A +\<close>}
 
   \item Strict repetition with separator @{verbatim "A + sep"}
 
-  @{rail "A + sep"}
+  @{rail \<open>A + sep\<close>}
 
   \end{itemize}
 *}
@@ -564,10 +566,9 @@
     @{command_def "display_drafts"}@{text "\<^sup>*"} & : & @{text "any \<rightarrow>"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command display_drafts} (@{syntax name} +)
-
-  "}
+  \<close>}
 
   \begin{description}
 
--- a/src/Doc/IsarRef/Generic.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/IsarRef/Generic.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -34,9 +34,9 @@
     @{command_def "print_options"} & : & @{text "context \<rightarrow>"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @{syntax name} ('=' ('true' | 'false' | @{syntax int} | @{syntax float} | @{syntax name}))?
-  "}
+  \<close>}
 
   \begin{description}
   
@@ -70,14 +70,14 @@
     @{method_def fail} & : & @{text method} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     (@@{method fold} | @@{method unfold} | @@{method insert}) @{syntax thmrefs}
     ;
     (@@{method erule} | @@{method drule} | @@{method frule})
       ('(' @{syntax nat} ')')? @{syntax thmrefs}
     ;
     (@@{method intro} | @@{method elim}) @{syntax thmrefs}?
-  "}
+  \<close>}
 
   \begin{description}
   
@@ -136,7 +136,7 @@
     @{attribute_def no_vars}@{text "\<^sup>*"} & : & @{text attribute} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{attribute tagged} @{syntax name} @{syntax name}
     ;
     @@{attribute untagged} @{syntax name}
@@ -146,7 +146,7 @@
     (@@{attribute unfolded} | @@{attribute folded}) @{syntax thmrefs}
     ;
     @@{attribute rotated} @{syntax int}?
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -201,11 +201,11 @@
     @{method_def split} & : & @{text method} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{method subst} ('(' 'asm' ')')? \<newline> ('(' (@{syntax nat}+) ')')? @{syntax thmref}
     ;
     @@{method split} @{syntax thmrefs}
-  "}
+  \<close>}
 
   These methods provide low-level facilities for equational reasoning
   that are intended for specialized applications only.  Normally,
@@ -304,7 +304,7 @@
     @{method_def raw_tactic}@{text "\<^sup>*"} & : & @{text method} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     (@@{method rule_tac} | @@{method erule_tac} | @@{method drule_tac} |
       @@{method frule_tac} | @@{method cut_tac} | @@{method thin_tac}) @{syntax goal_spec}? \<newline>
     ( dynamic_insts @'in' @{syntax thmref} | @{syntax thmrefs} )
@@ -319,7 +319,7 @@
     ;
 
     dynamic_insts: ((@{syntax name} '=' @{syntax term}) + @'and')
-  "}
+  \<close>}
 
 \begin{description}
 
@@ -406,7 +406,7 @@
     @{method_def simp_all} & : & @{text method} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     (@@{method simp} | @@{method simp_all}) opt? (@{syntax simpmod} * )
     ;
 
@@ -414,7 +414,7 @@
     ;
     @{syntax_def simpmod}: ('add' | 'del' | 'only' | 'split' (() | 'add' | 'del') |
       'cong' (() | 'add' | 'del')) ':' @{syntax thmrefs}
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -608,10 +608,10 @@
     @{command_def "print_simpset"}@{text "\<^sup>*"} & : & @{text "context \<rightarrow>"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     (@@{attribute simp} | @@{attribute split} | @@{attribute cong})
       (() | 'add' | 'del')
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -923,13 +923,13 @@
     simproc & : & @{text attribute} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command simproc_setup} @{syntax name} '(' (@{syntax term} + '|') ')' '='
       @{syntax text} \<newline> (@'identifier' (@{syntax nameref}+))?
     ;
 
     @@{attribute simproc} (('add' ':')? | 'del' ':') (@{syntax name}+)
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1229,12 +1229,12 @@
     @{attribute_def simplified} & : & @{text attribute} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{attribute simplified} opt? @{syntax thmrefs}?
     ;
 
     opt: '(' ('no_asm' | 'no_asm_simp' | 'no_asm_use') ')'
-  "}
+  \<close>}
 
   \begin{description}
   
@@ -1490,13 +1490,13 @@
     @{attribute_def swapped} & : & @{text attribute} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     (@@{attribute intro} | @@{attribute elim} | @@{attribute dest}) ('!' | () | '?') @{syntax nat}?
     ;
     @@{attribute rule} 'del'
     ;
     @@{attribute iff} (((() | 'add') '?'?) | 'del')
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1562,9 +1562,9 @@
     @{method_def contradiction} & : & @{text method} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{method rule} @{syntax thmrefs}?
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1603,7 +1603,7 @@
     @{method_def deepen} & : & @{text method} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{method blast} @{syntax nat}? (@{syntax clamod} * )
     ;
     @@{method auto} (@{syntax nat} @{syntax nat})? (@{syntax clasimpmod} * )
@@ -1624,7 +1624,7 @@
       ('cong' | 'split') (() | 'add' | 'del') |
       'iff' (((() | 'add') '?'?) | 'del') |
       (('intro' | 'elim' | 'dest') ('!' | () | '?') | 'del')) ':' @{syntax thmrefs}
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1735,11 +1735,11 @@
     @{method_def clarsimp} & : & @{text method} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     (@@{method safe} | @@{method clarify}) (@{syntax clamod} * )
     ;
     @@{method clarsimp} (@{syntax clasimpmod} * )
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1926,13 +1926,13 @@
   Generic tools may refer to the information provided by object-logic
   declarations internally.
 
-  @{rail "
+  @{rail \<open>
     @@{command judgment} @{syntax name} '::' @{syntax type} @{syntax mixfix}?
     ;
     @@{attribute atomize} ('(' 'full' ')')?
     ;
     @@{attribute rule_format} ('(' 'noasm' ')')?
-  "}
+  \<close>}
 
   \begin{description}
   
--- a/src/Doc/IsarRef/HOL_Specific.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/IsarRef/HOL_Specific.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -96,17 +96,17 @@
   introduction rule.  The default rule declarations of Isabelle/HOL
   already take care of most common situations.
 
-  @{rail "
+  @{rail \<open>
     (@@{command (HOL) inductive} | @@{command (HOL) inductive_set} |
       @@{command (HOL) coinductive} | @@{command (HOL) coinductive_set})
     @{syntax target}? \<newline>
-    @{syntax \"fixes\"} (@'for' @{syntax \"fixes\"})? (@'where' clauses)? \<newline>
+    @{syntax "fixes"} (@'for' @{syntax "fixes"})? (@'where' clauses)? \<newline>
     (@'monos' @{syntax thmrefs})?
     ;
     clauses: (@{syntax thmdecl}? @{syntax prop} + '|')
     ;
     @@{attribute (HOL) mono} (() | 'add' | 'del')
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -264,11 +264,11 @@
     @{command_def (HOL) "fun_cases"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
   \end{matharray}
 
-  @{rail "
-    @@{command (HOL) primrec} @{syntax target}? @{syntax \"fixes\"} @'where' equations
+  @{rail \<open>
+    @@{command (HOL) primrec} @{syntax target}? @{syntax "fixes"} @'where' equations
     ;
     (@@{command (HOL) fun} | @@{command (HOL) function}) @{syntax target}? functionopts?
-      @{syntax \"fixes\"} \<newline> @'where' equations
+      @{syntax "fixes"} \<newline> @'where' equations
     ;
 
     equations: (@{syntax thmdecl}? @{syntax prop} + '|')
@@ -278,7 +278,7 @@
     @@{command (HOL) termination} @{syntax term}?
     ;
     @@{command (HOL) fun_cases} (@{syntax thmdecl}? @{syntax prop} + @'and')
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -508,7 +508,7 @@
     @{method_def (HOL) induction_schema} & : & @{text method} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{method (HOL) relation} @{syntax term}
     ;
     @@{method (HOL) lexicographic_order} (@{syntax clasimpmod} * )
@@ -518,7 +518,7 @@
     @@{method (HOL) induction_schema}
     ;
     orders: ( 'max' | 'min' | 'ms' ) *
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -574,11 +574,11 @@
     @{attribute_def (HOL) "partial_function_mono"} & : & @{text attribute} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command (HOL) partial_function} @{syntax target}?
-      '(' @{syntax nameref} ')' @{syntax \"fixes\"} \<newline>
+      '(' @{syntax nameref} ')' @{syntax "fixes"} \<newline>
       @'where' @{syntax thmdecl}? @{syntax prop}
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -649,7 +649,7 @@
   "recdef_tc"} for defining recursive are mostly obsolete; @{command
   (HOL) "function"} or @{command (HOL) "fun"} should be used instead.
 
-  @{rail "
+  @{rail \<open>
     @@{command (HOL) recdef} ('(' @'permissive' ')')? \<newline>
       @{syntax name} @{syntax term} (@{syntax prop} +) hints?
     ;
@@ -661,7 +661,7 @@
       (() | 'add' | 'del') ':' @{syntax thmrefs}) | @{syntax clasimpmod}
     ;
     tc: @{syntax nameref} ('(' @{syntax nat} ')')?
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -695,10 +695,10 @@
     @{attribute_def (HOL) recdef_wf} & : & @{text attribute} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     (@@{attribute (HOL) recdef_simp} | @@{attribute (HOL) recdef_cong} |
       @@{attribute (HOL) recdef_wf}) (() | 'add' | 'del')
-  "}
+  \<close>}
 *}
 
 
@@ -710,7 +710,7 @@
     @{command_def (HOL) "rep_datatype"} & : & @{text "theory \<rightarrow> proof(prove)"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command (HOL) datatype} (spec + @'and')
     ;
     @@{command (HOL) rep_datatype} ('(' (@{syntax name} +) ')')? (@{syntax term} +)
@@ -719,7 +719,7 @@
     spec: @{syntax typespec_sorts} @{syntax mixfix}? '=' (cons + '|')
     ;
     cons: @{syntax name} (@{syntax type} * ) @{syntax mixfix}?
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -873,12 +873,12 @@
     @{command_def (HOL) "record"} & : & @{text "theory \<rightarrow> theory"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command (HOL) record} @{syntax typespec_sorts} '=' \<newline>
       (@{syntax type} '+')? (constdecl +)
     ;
     constdecl: @{syntax name} '::' @{syntax type} @{syntax mixfix}?
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1074,14 +1074,13 @@
   type_synonym} from Isabelle/Pure merely introduces syntactic
   abbreviations, without any logical significance.
 
-  @{rail "
+  @{rail \<open>
     @@{command (HOL) typedef} abs_type '=' rep_set
     ;
-
     abs_type: @{syntax typespec_sorts} @{syntax mixfix}?
     ;
     rep_set: @{syntax term} (@'morphisms' @{syntax name} @{syntax name})?
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1198,10 +1197,9 @@
     @{command_def (HOL) "enriched_type"} & : & @{text "local_theory \<rightarrow> proof(prove)"}
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command (HOL) enriched_type} (@{syntax name} ':')? @{syntax term}
-    ;
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1265,28 +1263,26 @@
   packages. The user should consider using these two new packages for
   lifting definitions and transporting theorems.
 
-  @{rail "
-    @@{command (HOL) quotient_type} (spec);
-
+  @{rail \<open>
+    @@{command (HOL) quotient_type} (spec)
+    ;
     spec: @{syntax typespec} @{syntax mixfix}? '=' \<newline>
      @{syntax type} '/' ('partial' ':')? @{syntax term} \<newline>
-     (@'morphisms' @{syntax name} @{syntax name})? (@'parametric' @{syntax thmref})?;
-  "}
-
-  @{rail "
+     (@'morphisms' @{syntax name} @{syntax name})? (@'parametric' @{syntax thmref})?
+  \<close>}
+
+  @{rail \<open>
     @@{command (HOL) quotient_definition} constdecl? @{syntax thmdecl}? \<newline>
-    @{syntax term} 'is' @{syntax term};
-
+    @{syntax term} 'is' @{syntax term}
+    ;
     constdecl: @{syntax name} ('::' @{syntax type})? @{syntax mixfix}?
-  "}
-
-  @{rail "
+  \<close>}
+
+  @{rail \<open>
     @@{method (HOL) lifting} @{syntax thmrefs}?
     ;
-
     @@{method (HOL) lifting_setup} @{syntax thmrefs}?
-    ;
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1399,12 +1395,12 @@
     @{command_def (HOL) "ax_specification"} & : & @{text "theory \<rightarrow> proof(prove)"} \\
   \end{matharray}
 
-  @{rail "
-  (@@{command (HOL) specification} | @@{command (HOL) ax_specification})
-    '(' (decl +) ')' \<newline> (@{syntax thmdecl}? @{syntax prop} +)
-  ;
-  decl: ((@{syntax name} ':')? @{syntax term} '(' @'overloaded' ')'?)
-  "}
+  @{rail \<open>
+    (@@{command (HOL) specification} | @@{command (HOL) ax_specification})
+      '(' (decl +) ')' \<newline> (@{syntax thmdecl}? @{syntax prop} +)
+    ;
+    decl: ((@{syntax name} ':')? @{syntax term} '(' @'overloaded' ')'?)
+  \<close>}
 
   \begin{description}
 
@@ -1458,10 +1454,10 @@
   @{file "~~/src/HOL/ex/Adhoc_Overloading_Examples.thy"} and
   @{file "~~/src/HOL/Library/Monad_Syntax.thy"}.
 
-  @{rail "
+  @{rail \<open>
     (@@{command adhoc_overloading} | @@{command no_adhoc_overloading})
-    (@{syntax nameref} (@{syntax term} + ) + @'and')
-  "}
+      (@{syntax nameref} (@{syntax term} + ) + @'and')
+  \<close>}
 
   \begin{description}
 
@@ -1490,9 +1486,9 @@
     @{attribute_def (HOL) split_format}@{text "\<^sup>*"} & : & @{text attribute} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{attribute (HOL) split_format} ('(' 'complete' ')')?
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1626,27 +1622,27 @@
     @{attribute_def (HOL) "lifting_restore"} & : & @{text attribute} \\   
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command (HOL) setup_lifting} ('(' 'no_code' ')')? \<newline>
       @{syntax thmref} @{syntax thmref}? (@'parametric' @{syntax thmref})?;
-  "}
-
-  @{rail "
+  \<close>}
+
+  @{rail \<open>
     @@{command (HOL) lift_definition} @{syntax name} '::' @{syntax type}  @{syntax mixfix}? \<newline>
       'is' @{syntax term} (@'parametric' @{syntax thmref})?;
-  "}
-
-  @{rail "
+  \<close>}
+
+  @{rail \<open>
     @@{command (HOL) lifting_forget} @{syntax nameref};
-  "}
-
-  @{rail "
+  \<close>}
+
+  @{rail \<open>
     @@{command (HOL) lifting_update} @{syntax nameref};
-  "}
-
-  @{rail "
+  \<close>}
+
+  @{rail \<open>
     @@{attribute (HOL) lifting_restore} @{syntax thmref} (@{syntax thmref} @{syntax thmref})?;
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1828,14 +1824,11 @@
   them as necessary when parsing a term. See
   \cite{traytel-berghofer-nipkow-2011} for details.
 
-  @{rail "
+  @{rail \<open>
     @@{attribute (HOL) coercion} (@{syntax term})?
     ;
-  "}
-  @{rail "
     @@{attribute (HOL) coercion_map} (@{syntax term})?
-    ;
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1902,9 +1895,9 @@
     @{method_def (HOL) iprover} & : & @{text method} \\
   \end{matharray}
 
-  @{rail "
-    @@{method (HOL) iprover} ( @{syntax rulemod} * )
-  "}
+  @{rail \<open>
+    @@{method (HOL) iprover} (@{syntax rulemod} *)
+  \<close>}
 
   \begin{description}
 
@@ -1934,14 +1927,13 @@
     @{method_def (HOL) "metis"} & : & @{text method} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{method (HOL) meson} @{syntax thmrefs}?
     ;
-
     @@{method (HOL) metis}
       ('(' ('partial_types' | 'full_types' | 'no_types' | @{syntax name}) ')')?
       @{syntax thmrefs}?
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1968,13 +1960,13 @@
     @{attribute_def (HOL) algebra} & : & @{text attribute} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{method (HOL) algebra}
       ('add' ':' @{syntax thmrefs})?
       ('del' ':' @{syntax thmrefs})?
     ;
     @@{attribute (HOL) algebra} (() | 'add' | 'del')
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -2051,9 +2043,9 @@
     @{method_def (HOL) "coherent"} & : & @{text method} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{method (HOL) coherent} @{syntax thmrefs}?
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -2081,7 +2073,7 @@
     @{command_def (HOL) "sledgehammer_params"} & : & @{text "theory \<rightarrow> theory"}
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command (HOL) try}
     ;
 
@@ -2094,13 +2086,10 @@
 
     @@{command (HOL) sledgehammer_params} ( ( '[' args ']' ) ? )
     ;
-
     args: ( @{syntax name} '=' value + ',' )
     ;
-
     facts: '(' ( ( ( ( 'add' | 'del' ) ':' ) ? @{syntax thmrefs} ) + ) ? ')'
-    ;
-  "} % FIXME check args "value"
+  \<close>} % FIXME check args "value"
 
   \begin{description}
 
@@ -2150,7 +2139,7 @@
     @{command_def (HOL) "find_unused_assms"} & : & @{text "context \<rightarrow>"}
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command (HOL) value} ( '[' @{syntax name} ']' )? modes? @{syntax term}
     ;
 
@@ -2171,13 +2160,10 @@
 
     @@{command (HOL) find_unused_assms} @{syntax name}?
     ;
-
     modes: '(' (@{syntax name} +) ')'
     ;
-
     args: ( @{syntax name} '=' value + ',' )
-    ;
-  "} % FIXME check "value"
+  \<close>} % FIXME check "value"
 
   \begin{description}
 
@@ -2320,7 +2306,7 @@
     @{command_def (HOL) "inductive_cases"}@{text "\<^sup>*"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{method (HOL) case_tac} @{syntax goal_spec}? @{syntax term} rule?
     ;
     @@{method (HOL) induct_tac} @{syntax goal_spec}? (@{syntax insts} * @'and') rule?
@@ -2329,9 +2315,8 @@
     ;
     @@{command (HOL) inductive_cases} (@{syntax thmdecl}? (@{syntax prop}+) + @'and')
     ;
-
     rule: 'rule' ':' @{syntax thmref}
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -2413,7 +2398,7 @@
     @{command_def (HOL) "code_pred"} & : & @{text "theory \<rightarrow> proof(prove)"}
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command (HOL) export_code} ( constexpr + ) \<newline>
        ( ( @'in' target ( @'module_name' @{syntax string} ) ? \<newline>
         ( @'file' @{syntax string} ) ? ( '(' args ')' ) ?) + ) ?
@@ -2553,7 +2538,7 @@
     ;
 
     modes: mode @'as' const
-  "}
+  \<close>}
 
   \begin{description}
 
--- a/src/Doc/IsarRef/Inner_Syntax.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/IsarRef/Inner_Syntax.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -46,7 +46,7 @@
   These diagnostic commands assist interactive development by printing
   internal logical entities in a human-readable fashion.
 
-  @{rail "
+  @{rail \<open>
     @@{command typ} @{syntax modes}? @{syntax type} ('::' @{syntax sort})?
     ;
     @@{command term} @{syntax modes}? @{syntax term}
@@ -59,9 +59,8 @@
     ;
     @@{command print_state} @{syntax modes}?
     ;
-
     @{syntax_def modes}: '(' (@{syntax name} + ) ')'
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -358,7 +357,7 @@
   to specify any context-free priority grammar, which is more general
   than the fixity declarations of ML and Prolog.
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def mixfix}: '('
       @{syntax template} prios? @{syntax nat}? |
       (@'infix' | @'infixl' | @'infixr') @{syntax template} @{syntax nat} |
@@ -369,7 +368,7 @@
     template: string
     ;
     prios: '[' (@{syntax nat} + ',') ']'
-  "}
+  \<close>}
 
   The string given as @{text template} may include literal text,
   spacing, blocks, and arguments (denoted by ``@{text _}''); the
@@ -559,7 +558,7 @@
   allows to add or delete mixfix annotations for of existing logical
   entities within the current context.
 
-  @{rail "
+  @{rail \<open>
     (@@{command type_notation} | @@{command no_type_notation}) @{syntax target}?
       @{syntax mode}? \<newline> (@{syntax nameref} @{syntax mixfix} + @'and')
     ;
@@ -567,7 +566,7 @@
       (@{syntax nameref} @{syntax mixfix} + @'and')
     ;
     @@{command write} @{syntax mode}? (@{syntax nameref} @{syntax mixfix} + @'and')
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -631,18 +630,19 @@
     @{syntax_def (inner) num_token} & = & @{syntax_ref nat}@{text "  |  "}@{verbatim "-"}@{syntax_ref nat} \\
     @{syntax_def (inner) float_token} & = & @{syntax_ref nat}@{verbatim "."}@{syntax_ref nat}@{text "  |  "}@{verbatim "-"}@{syntax_ref nat}@{verbatim "."}@{syntax_ref nat} \\
     @{syntax_def (inner) xnum_token} & = & @{verbatim "#"}@{syntax_ref nat}@{text "  |  "}@{verbatim "#-"}@{syntax_ref nat} \\
-
     @{syntax_def (inner) str_token} & = & @{verbatim "''"} @{text "\<dots>"} @{verbatim "''"} \\
+    @{syntax_def (inner) string_token} & = & @{verbatim "\""} @{text "\<dots>"} @{verbatim "\""} \\
     @{syntax_def (inner) cartouche} & = & @{verbatim "\<open>"} @{text "\<dots>"} @{verbatim "\<close>"} \\
   \end{supertabular}
   \end{center}
 
   The token categories @{syntax (inner) num_token}, @{syntax (inner)
   float_token}, @{syntax (inner) xnum_token}, @{syntax (inner)
-  str_token}, and @{syntax (inner) cartouche} are not used in Pure.
-  Object-logics may implement numerals and string literals by adding
-  appropriate syntax declarations, together with some translation
-  functions (e.g.\ see Isabelle/HOL).
+  str_token}, @{syntax (inner) string_token}, and @{syntax (inner)
+  cartouche} are not used in Pure. Object-logics may implement
+  numerals and string literals by adding appropriate syntax
+  declarations, together with some translation functions (e.g.\ see
+  @{file "~~/src/HOL/Tools/string_syntax.ML"}).
 
   The derived categories @{syntax_def (inner) num_const}, @{syntax_def
   (inner) float_const}, and @{syntax_def (inner) num_const} provide
@@ -1216,7 +1216,7 @@
   @{command translations}) are required to turn resulting parse trees
   into proper representations of formal entities again.
 
-  @{rail "
+  @{rail \<open>
     @@{command nonterminal} (@{syntax name} + @'and')
     ;
     (@@{command syntax} | @@{command no_syntax}) @{syntax mode}? (constdecl +)
@@ -1230,7 +1230,7 @@
     mode: ('(' ( @{syntax name} | @'output' | @{syntax name} @'output' ) ')')
     ;
     transpat: ('(' @{syntax nameref} ')')? @{syntax string}
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1463,7 +1463,7 @@
   manipulations of inner syntax, at the expense of some complexity and
   obscurity in the implementation.
 
-  @{rail "
+  @{rail \<open>
   ( @@{command parse_ast_translation} | @@{command parse_translation} |
     @@{command print_translation} | @@{command typed_print_translation} |
     @@{command print_ast_translation}) @{syntax text}
@@ -1472,7 +1472,7 @@
    @@{ML_antiquotation type_syntax} |
    @@{ML_antiquotation const_syntax} |
    @@{ML_antiquotation syntax_const}) name
-  "}
+  \<close>}
 
   \begin{description}
 
--- a/src/Doc/IsarRef/Misc.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/IsarRef/Misc.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -20,7 +20,7 @@
     @{command_def "print_binds"}@{text "\<^sup>*"} & : & @{text "context \<rightarrow>"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     (@@{command print_theory} | @@{command print_theorems}) ('!'?)
     ;
 
@@ -37,7 +37,7 @@
     @@{command thm_deps} @{syntax thmrefs}
     ;
     @@{command unused_thms} ((@{syntax name} +) '-' (@{syntax name} * ))?
-  "}
+  \<close>}
 
   These commands print certain parts of the theory and proof context.
   Note that there are some further ones available, such as for the set
@@ -121,9 +121,9 @@
     @{command_def "use_thy"}@{text "\<^sup>*"} & : & @{text "any \<rightarrow>"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     (@@{command cd} | @@{command use_thy}) @{syntax name}
-  "}
+  \<close>}
 
   \begin{description}
 
--- a/src/Doc/IsarRef/Outer_Syntax.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/IsarRef/Outer_Syntax.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -72,9 +72,9 @@
     @{command_def "help"}@{text "\<^sup>*"} & : & @{text "any \<rightarrow>"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command help} (@{syntax name} * )
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -233,14 +233,14 @@
   Already existing objects are usually referenced by @{syntax
   nameref}.
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def name}: @{syntax ident} | @{syntax symident} |
       @{syntax string} | @{syntax nat}
     ;
     @{syntax_def parname}: '(' @{syntax name} ')'
     ;
     @{syntax_def nameref}: @{syntax name} | @{syntax longident}
-  "}
+  \<close>}
 *}
 
 
@@ -250,11 +250,11 @@
   natural numbers and floating point numbers.  These are combined as
   @{syntax int} and @{syntax real} as follows.
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def int}: @{syntax nat} | '-' @{syntax nat}
     ;
     @{syntax_def real}: @{syntax float} | @{syntax int}
-  "}
+  \<close>}
 
   Note that there is an overlap with the category @{syntax name},
   which also includes @{syntax nat}.
@@ -271,11 +271,11 @@
   @{verbatim "--"}~@{syntax text}.  Any number of these may occur
   within Isabelle/Isar commands.
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def text}: @{syntax verbatim} | @{syntax nameref}
     ;
     @{syntax_def comment}: '--' @{syntax text}
-  "}
+  \<close>}
 *}
 
 
@@ -288,13 +288,13 @@
   intersection of these classes.  The syntax of type arities is given
   directly at the outer level.
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def classdecl}: @{syntax name} (('<' | '\<subseteq>') (@{syntax nameref} + ','))?
     ;
     @{syntax_def sort}: @{syntax nameref}
     ;
     @{syntax_def arity}: ('(' (@{syntax sort} + ',') ')')? @{syntax sort}
-  "}
+  \<close>}
 *}
 
 
@@ -314,38 +314,38 @@
   by commands or other keywords already (such as @{verbatim "="} or
   @{verbatim "+"}).
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def type}: @{syntax nameref} | @{syntax typefree} |
       @{syntax typevar}
     ;
     @{syntax_def term}: @{syntax nameref} | @{syntax var}
     ;
     @{syntax_def prop}: @{syntax term}
-  "}
+  \<close>}
 
   Positional instantiations are indicated by giving a sequence of
   terms, or the placeholder ``@{text _}'' (underscore), which means to
   skip a position.
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def inst}: '_' | @{syntax term}
     ;
     @{syntax_def insts}: (@{syntax inst} *)
-  "}
+  \<close>}
 
   Type declarations and definitions usually refer to @{syntax
   typespec} on the left-hand side.  This models basic type constructor
   application at the outer syntax level.  Note that only plain postfix
   notation is available here, but no infixes.
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def typespec}:
       (() | @{syntax typefree} | '(' ( @{syntax typefree} + ',' ) ')') @{syntax name}
     ;
     @{syntax_def typespec_sorts}:
       (() | (@{syntax typefree} ('::' @{syntax sort})?) |
         '(' ( (@{syntax typefree} ('::' @{syntax sort})?) + ',' ) ')') @{syntax name}
-  "}
+  \<close>}
 *}
 
 
@@ -356,11 +356,11 @@
   specified via patterns of the form ``@{text "(\<IS> p\<^sub>1 \<dots> p\<^sub>n)"}''.
   This works both for @{syntax term} and @{syntax prop}.
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def term_pat}: '(' (@'is' @{syntax term} +) ')'
     ;
     @{syntax_def prop_pat}: '(' (@'is' @{syntax prop} +) ')'
-  "}
+  \<close>}
 
   \medskip Declarations of local variables @{text "x :: \<tau>"} and
   logical propositions @{text "a : \<phi>"} represent different views on
@@ -370,11 +370,11 @@
   references of current facts).  In any case, Isar proof elements
   usually admit to introduce multiple such items simultaneously.
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def vars}: (@{syntax name} +) ('::' @{syntax type})?
     ;
     @{syntax_def props}: @{syntax thmdecl}? (@{syntax prop} @{syntax prop_pat}? +)
-  "}
+  \<close>}
 
   The treatment of multiple declarations corresponds to the
   complementary focus of @{syntax vars} versus @{syntax props}.  In
@@ -398,7 +398,7 @@
   any atomic entity, including any @{syntax keyword} conforming to
   @{syntax symident}.
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def atom}: @{syntax nameref} | @{syntax typefree} |
       @{syntax typevar} | @{syntax var} | @{syntax nat} | @{syntax float} |
       @{syntax keyword} | @{syntax cartouche}
@@ -408,7 +408,7 @@
     @{syntax_def args}: arg *
     ;
     @{syntax_def attributes}: '[' (@{syntax nameref} @{syntax args} * ',') ']'
-  "}
+  \<close>}
 
   Theorem specifications come in several flavors: @{syntax axmdecl}
   and @{syntax thmdecl} usually refer to axioms, assumptions or
@@ -443,7 +443,7 @@
   This form of in-place declarations is particularly useful with
   commands like @{command "declare"} and @{command "using"}.
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def axmdecl}: @{syntax name} @{syntax attributes}? ':'
     ;
     @{syntax_def thmdecl}: thmbind ':'
@@ -460,7 +460,7 @@
     thmbind: @{syntax name} @{syntax attributes} | @{syntax name} | @{syntax attributes}
     ;
     selection: '(' ((@{syntax nat} | @{syntax nat} '-' @{syntax nat}?) + ',') ')'
-  "}
+  \<close>}
 *}
 
 end
--- a/src/Doc/IsarRef/Proof.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/IsarRef/Proof.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -53,11 +53,11 @@
     @{command_def "notepad"} & : & @{text "local_theory \<rightarrow> proof(state)"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command notepad} @'begin'
     ;
     @@{command end}
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -183,7 +183,7 @@
   exporting some result @{text "x \<equiv> t \<turnstile> \<phi>[x]"} yields @{text "\<turnstile>
   \<phi>[t]"}.
 
-  @{rail "
+  @{rail \<open>
     @@{command fix} (@{syntax vars} + @'and')
     ;
     (@@{command assume} | @@{command presume}) (@{syntax props} + @'and')
@@ -192,7 +192,7 @@
     ;
     def: @{syntax thmdecl}? \<newline>
       @{syntax name} ('==' | '\<equiv>') @{syntax term} @{syntax term_pat}?
-  "}
+  \<close>}
 
   \begin{description}
   
@@ -262,9 +262,9 @@
   input process just after type checking.  Also note that @{command
   "def"} does not support polymorphism.
 
-  @{rail "
+  @{rail \<open>
     @@{command let} ((@{syntax term} + @'and') '=' @{syntax term} + @'and')
-  "}
+  \<close>}
 
   The syntax of @{keyword "is"} patterns follows @{syntax term_pat} or
   @{syntax prop_pat} (see \secref{sec:term-decls}).
@@ -318,12 +318,12 @@
   to the most recently established facts, but only \emph{before}
   issuing a follow-up claim.
 
-  @{rail "
+  @{rail \<open>
     @@{command note} (@{syntax thmdef}? @{syntax thmrefs} + @'and')
     ;
     (@@{command from} | @@{command with} | @@{command using} | @@{command unfolding})
       (@{syntax thmrefs} + @'and')
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -429,7 +429,7 @@
   contexts of (essentially a big disjunction of eliminated parameters
   and assumptions, cf.\ \secref{sec:obtain}).
 
-  @{rail "
+  @{rail \<open>
     (@@{command lemma} | @@{command theorem} | @@{command corollary} |
      @@{command schematic_lemma} | @@{command schematic_theorem} |
      @@{command schematic_corollary}) @{syntax target}? (goal | longgoal)
@@ -441,12 +441,12 @@
   
     goal: (@{syntax props} + @'and')
     ;
-    longgoal: @{syntax thmdecl}? (@{syntax_ref \"includes\"}?) (@{syntax context_elem} * ) conclusion
+    longgoal: @{syntax thmdecl}? (@{syntax_ref "includes"}?) (@{syntax context_elem} * ) conclusion
     ;
     conclusion: @'shows' goal | @'obtains' (@{syntax parname}? case + '|')
     ;
     case: (@{syntax vars} + @'and') @'where' (@{syntax props} + @'and')
-  "}
+  \<close>}
 
   \begin{description}
   
@@ -541,12 +541,12 @@
   nameref}~@{syntax args} specifications.  Note that parentheses may
   be dropped for single method specifications (with no arguments).
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def method}:
       (@{syntax nameref} | '(' methods ')') (() | '?' | '+' | '[' @{syntax nat}? ']')
     ;
     methods: (@{syntax nameref} @{syntax args} | @{syntax method}) + (',' | '|')
-  "}
+  \<close>}
 
   Proper Isar proof methods do \emph{not} admit arbitrary goal
   addressing, but refer either to the first sub-goal or all sub-goals
@@ -564,10 +564,10 @@
   all goals, and ``@{text "[n-]"}'' to all goals starting from @{text
   "n"}.
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def goal_spec}:
       '[' (@{syntax nat} '-' @{syntax nat} | @{syntax nat} '-' | @{syntax nat} | '!' ) ']'
-  "}
+  \<close>}
 *}
 
 
@@ -621,15 +621,15 @@
   default terminal method.  Any remaining goals are always solved by
   assumption in the very last step.
 
-  @{rail "
+  @{rail \<open>
     @@{command proof} method?
     ;
     @@{command qed} method?
     ;
-    @@{command \"by\"} method method?
+    @@{command "by"} method method?
     ;
-    (@@{command \".\"} | @@{command \"..\"} | @@{command sorry})
-  "}
+    (@@{command "."} | @@{command ".."} | @@{command sorry})
+  \<close>}
 
   \begin{description}
   
@@ -706,7 +706,7 @@
     @{attribute_def "where"} & : & @{text attribute} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{method fact} @{syntax thmrefs}?
     ;
     @@{method (Pure) rule} @{syntax thmrefs}?
@@ -723,10 +723,10 @@
     ;
     @@{attribute of} @{syntax insts} ('concl' ':' @{syntax insts})?
     ;
-    @@{attribute \"where\"}
+    @@{attribute "where"}
       ((@{syntax name} | @{syntax var} | @{syntax typefree} | @{syntax typevar}) '='
       (@{syntax type} | @{syntax term}) * @'and')
-  "}
+  \<close>}
 
   \begin{description}
   
@@ -846,13 +846,13 @@
     @{command_def "back"}@{text "\<^sup>*"} & : & @{text "proof \<rightarrow> proof"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     ( @@{command apply} | @@{command apply_end} ) @{syntax method}
     ;
     @@{command defer} @{syntax nat}?
     ;
     @@{command prefer} @{syntax nat}
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -907,10 +907,9 @@
     @{command_def "method_setup"} & : & @{text "theory \<rightarrow> theory"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command method_setup} @{syntax name} '=' @{syntax text} @{syntax text}?
-    ;
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -963,12 +962,12 @@
   later, provided that the corresponding parameters do \emph{not}
   occur in the conclusion.
 
-  @{rail "
+  @{rail \<open>
     @@{command obtain} @{syntax parname}? (@{syntax vars} + @'and')
       @'where' (@{syntax props} + @'and')
     ;
     @@{command guess} (@{syntax vars} + @'and')
-  "}
+  \<close>}
 
   The derived Isar command @{command "obtain"} is defined as follows
   (where @{text "b\<^sub>1, \<dots>, b\<^sub>k"} shall refer to (optional)
@@ -1078,11 +1077,11 @@
     @{command "ultimately"} & \equiv & @{command "moreover"}~@{command "from"}~@{text calculation} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     (@@{command also} | @@{command finally}) ('(' @{syntax thmrefs} ')')?
     ;
     @@{attribute trans} (() | 'add' | 'del')
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1195,7 +1194,7 @@
   derived manually become ready to use in advanced case analysis
   later.
 
-  @{rail "
+  @{rail \<open>
     @@{command case} (caseref | '(' caseref (('_' | @{syntax name}) *) ')')
     ;
     caseref: nameref attributes?
@@ -1208,7 +1207,7 @@
     @@{attribute params} ((@{syntax name} * ) + @'and')
     ;
     @@{attribute consumes} @{syntax int}?
-  "}
+  \<close>}
 
   \begin{description}
   
@@ -1303,7 +1302,7 @@
   the names of the facts in the local context invoked by the @{command "case"}
   command.
 
-  @{rail "
+  @{rail \<open>
     @@{method cases} ('(' 'no_simp' ')')? \<newline>
       (@{syntax insts} * @'and') rule?
     ;
@@ -1322,7 +1321,7 @@
     arbitrary: 'arbitrary' ':' ((@{syntax term} * ) @'and' +)
     ;
     taking: 'taking' ':' @{syntax insts}
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1499,7 +1498,7 @@
     @{attribute_def coinduct} & : & @{text attribute} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{attribute cases} spec
     ;
     @@{attribute induct} spec
@@ -1508,7 +1507,7 @@
     ;
 
     spec: (('type' | 'pred' | 'set') ':' @{syntax nameref}) | 'del'
-  "}
+  \<close>}
 
   \begin{description}
 
--- a/src/Doc/IsarRef/Spec.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/IsarRef/Spec.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -50,7 +50,7 @@
   although some user-interfaces might pretend that trailing input is
   admissible.
 
-  @{rail "
+  @{rail \<open>
     @@{command theory} @{syntax name} imports keywords? \<newline> @'begin'
     ;
     imports: @'imports' (@{syntax name} +)
@@ -59,7 +59,7 @@
     ;
     keyword_decls: (@{syntax string} +)
       ('::' @{syntax name} @{syntax tags})? ('==' @{syntax name})?
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -121,13 +121,13 @@
   targets, like @{command "locale"}, @{command "class"}, @{command
   "instantiation"}, @{command "overloading"}.
 
-  @{rail "
+  @{rail \<open>
     @@{command context} @{syntax nameref} @'begin'
     ;
-    @@{command context} @{syntax_ref \"includes\"}? (@{syntax context_elem} * ) @'begin'
+    @@{command context} @{syntax_ref "includes"}? (@{syntax context_elem} * ) @'begin'
     ;
     @{syntax_def target}: '(' @'in' @{syntax nameref} ')'
-  "}
+  \<close>}
 
   \begin{description}
   
@@ -210,14 +210,14 @@
   without logical dependencies, which is in contrast to locales and
   locale interpretation (\secref{sec:locale}).
 
-  @{rail "
+  @{rail \<open>
     @@{command bundle} @{syntax target}? \<newline>
     @{syntax name} '=' @{syntax thmrefs} (@'for' (@{syntax vars} + @'and'))?
     ;
     (@@{command include} | @@{command including}) (@{syntax nameref}+)
     ;
-    @{syntax_def \"includes\"}: @'includes' (@{syntax nameref}+)
-  "}
+    @{syntax_def "includes"}: @'includes' (@{syntax nameref}+)
+  \<close>}
 
   \begin{description}
 
@@ -274,8 +274,8 @@
   "defs"} (see \secref{sec:consts}), and raw axioms.  In particular,
   type-inference covers the whole specification as usual.
 
-  @{rail "
-    @@{command axiomatization} @{syntax \"fixes\"}? (@'where' specs)?
+  @{rail \<open>
+    @@{command axiomatization} @{syntax "fixes"}? (@'where' specs)?
     ;
     @@{command definition} @{syntax target}? \<newline>
       (decl @'where')? @{syntax thmdecl}? @{syntax prop}
@@ -284,13 +284,13 @@
       (decl @'where')? @{syntax prop}
     ;
 
-    @{syntax_def \"fixes\"}: ((@{syntax name} ('::' @{syntax type})?
+    @{syntax_def "fixes"}: ((@{syntax name} ('::' @{syntax type})?
       @{syntax mixfix}? | @{syntax vars}) + @'and')
     ;
     specs: (@{syntax thmdecl}? @{syntax props} + @'and')
     ;
     decl: @{syntax name} ('::' @{syntax type})? @{syntax mixfix}?
-  "}
+  \<close>}
 
   \begin{description}
   
@@ -364,12 +364,12 @@
   case: it consists of a theorem which is applied to the context by
   means of an attribute.
 
-  @{rail "
+  @{rail \<open>
     (@@{command declaration} | @@{command syntax_declaration})
       ('(' @'pervasive' ')')? \<newline> @{syntax target}? @{syntax text}
     ;
     @@{command declare} @{syntax target}? (@{syntax thmrefs} + @'and')
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -431,8 +431,8 @@
   elements from the locale instances.  Redundant locale instances are
   omitted according to roundup.
 
-  @{rail "
-    @{syntax_def locale_expr}: (instance + '+') (@'for' (@{syntax \"fixes\"} + @'and'))?
+  @{rail \<open>
+    @{syntax_def locale_expr}: (instance + '+') (@'for' (@{syntax "fixes"} + @'and'))?
     ;
     instance: (qualifier ':')? @{syntax nameref} (pos_insts | named_insts)
     ;
@@ -441,7 +441,7 @@
     pos_insts: ('_' | @{syntax term})*
     ;
     named_insts: @'where' (@{syntax name} '=' @{syntax term} + @'and')
-  "}
+  \<close>}
 
   A locale instance consists of a reference to a locale and either
   positional or named parameter instantiations.  Identical
@@ -483,7 +483,7 @@
 
   \indexisarelem{fixes}\indexisarelem{constrains}\indexisarelem{assumes}
   \indexisarelem{defines}\indexisarelem{notes}
-  @{rail "
+  @{rail \<open>
     @@{command locale} @{syntax name} ('=' @{syntax locale})? @'begin'?
     ;
     @@{command print_locale} '!'? @{syntax nameref}
@@ -492,12 +492,12 @@
       @{syntax locale_expr} ('+' (@{syntax context_elem}+))?
     ;
     @{syntax_def context_elem}:
-      @'fixes' (@{syntax \"fixes\"} + @'and') |
+      @'fixes' (@{syntax "fixes"} + @'and') |
       @'constrains' (@{syntax name} '::' @{syntax type} + @'and') |
       @'assumes' (@{syntax props} + @'and') |
       @'defines' (@{syntax thmdecl}? @{syntax prop} @{syntax prop_pat}? + @'and') |
       @'notes' (@{syntax thmdef}? @{syntax thmrefs} + @'and')
-  "}
+  \<close>}
 
   \begin{description}
   
@@ -630,7 +630,7 @@
   "interpretation"}) and also within proof bodies (@{command
   "interpret"}).
 
-  @{rail "
+  @{rail \<open>
     @@{command interpretation} @{syntax locale_expr} equations?
     ;
     @@{command interpret} @{syntax locale_expr} equations?
@@ -644,7 +644,7 @@
     ;
 
     equations: @'where' (@{syntax thmdecl}? @{syntax prop} + @'and')
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -796,7 +796,7 @@
   (notably type-inference).  See \cite{isabelle-classes} for a short
   tutorial.
 
-  @{rail "
+  @{rail \<open>
     @@{command class} class_spec @'begin'?
     ;
     class_spec: @{syntax name} '='
@@ -809,7 +809,7 @@
       @{syntax nameref} ('<' | '\<subseteq>') @{syntax nameref} )
     ;
     @@{command subclass} @{syntax target}? @{syntax nameref}
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -968,11 +968,11 @@
   The @{command "overloading"} target provides a convenient view for
   end-users.
 
-  @{rail "
+  @{rail \<open>
     @@{command overloading} ( spec + ) @'begin'
     ;
     spec: @{syntax name} ( '==' | '\<equiv>' ) @{syntax term} ( '(' @'unchecked' ')' )?
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1010,14 +1010,14 @@
     @{command_def "attribute_setup"} & : & @{text "theory \<rightarrow> theory"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command ML_file} @{syntax name}
     ;
     (@@{command ML} | @@{command ML_prf} | @@{command ML_val} |
       @@{command ML_command} | @@{command setup} | @@{command local_setup}) @{syntax text}
     ;
     @@{command attribute_setup} @{syntax name} '=' @{syntax text} @{syntax text}?
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1094,13 +1094,13 @@
     @{command_def "default_sort"} & : & @{text "local_theory \<rightarrow> local_theory"}
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command classes} (@{syntax classdecl} +)
     ;
     @@{command classrel} (@{syntax nameref} ('<' | '\<subseteq>') @{syntax nameref} + @'and')
     ;
     @@{command default_sort} @{syntax sort}
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1141,13 +1141,13 @@
     @{command_def "arities"} & : & @{text "theory \<rightarrow> theory"} & (axiomatic!) \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command type_synonym} (@{syntax typespec} '=' @{syntax type} @{syntax mixfix}?)
     ;
     @@{command typedecl} @{syntax typespec} @{syntax mixfix}?
     ;
     @@{command arities} (@{syntax nameref} '::' @{syntax arity} +)
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1219,13 +1219,13 @@
   corresponding occurrences on some right-hand side need to be an
   instance of this, general @{text "d :: \<alpha> \<times> \<beta>"} will be disallowed.
 
-  @{rail "
+  @{rail \<open>
     @@{command consts} ((@{syntax name} '::' @{syntax type} @{syntax mixfix}?) +)
     ;
     @@{command defs} opt? (@{syntax axmdecl} @{syntax prop} +)
     ;
     opt: '(' @'unchecked'? @'overloaded'? ')'
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1259,11 +1259,11 @@
     @{command_def "theorems"} & : & @{text "local_theory \<rightarrow> local_theory"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     (@@{command lemmas} | @@{command theorems}) @{syntax target}? \<newline>
       (@{syntax thmdef}? @{syntax thmrefs} + @'and')
       (@'for' (@{syntax vars} + @'and'))?
-  "}
+  \<close>}
 
   \begin{description}
   
@@ -1302,9 +1302,9 @@
   asserted, and records within the internal derivation object how
   presumed theorems depend on unproven suppositions.
 
-  @{rail "
+  @{rail \<open>
     @@{command oracle} @{syntax name} '=' @{syntax text}
-  "}
+  \<close>}
 
   \begin{description}
 
@@ -1333,10 +1333,10 @@
     @{command_def "hide_fact"} & : & @{text "theory \<rightarrow> theory"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     ( @{command hide_class} | @{command hide_type} |
       @{command hide_const} | @{command hide_fact} ) ('(' @'open' ')')? (@{syntax nameref} + )
-  "}
+  \<close>}
 
   Isabelle organizes any kind of name declarations (of types,
   constants, theorems etc.) by separate hierarchically structured name
--- a/src/Doc/ProgProve/LaTeXsugar.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/ProgProve/LaTeXsugar.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -52,7 +52,7 @@
       end
   in
     Thy_Output.antiquotation @{binding "const_typ"}
-     (Scan.lift Args.name_source)
+     (Scan.lift Args.name_inner_syntax)
        (fn {source = src, context = ctxt, ...} => fn arg =>
           Thy_Output.output ctxt
             (Thy_Output.maybe_pretty_source pretty ctxt src [arg]))
--- a/src/Doc/System/Sessions.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/System/Sessions.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -48,7 +48,7 @@
   mode @{verbatim "isabelle-root"} for session ROOT files, which is
   enabled by default for any file of that name.
 
-  @{rail "
+  @{rail \<open>
     @{syntax_def session_chapter}: @'chapter' @{syntax name}
     ;
 
@@ -73,7 +73,7 @@
     theories: @'theories' opts? ( @{syntax name} * )
     ;
     files: @'files' ( @{syntax name} + )
-    "}
+  \<close>}
 
   \begin{description}
 
--- a/src/Doc/ZF/ZF_Isar.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Doc/ZF/ZF_Isar.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -24,9 +24,9 @@
     @{attribute_def (ZF) TC} & : & @{text attribute} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{attribute (ZF) TC} (() | 'add' | 'del')
-  "}
+  \<close>}
 
   \begin{description}
   
@@ -59,7 +59,7 @@
     @{command_def (ZF) "codatatype"} & : & @{text "theory \<rightarrow> theory"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     (@@{command (ZF) inductive} | @@{command (ZF) coinductive}) domains intros hints
     ;
 
@@ -67,22 +67,22 @@
     ;
     intros: @'intros' (@{syntax thmdecl}? @{syntax prop} +)
     ;
-    hints: @{syntax (ZF) \"monos\"}? condefs? \<newline>
+    hints: @{syntax (ZF) "monos"}? condefs? \<newline>
       @{syntax (ZF) typeintros}? @{syntax (ZF) typeelims}?
     ;
-    @{syntax_def (ZF) \"monos\"}: @'monos' @{syntax thmrefs}
+    @{syntax_def (ZF) "monos"}: @'monos' @{syntax thmrefs}
     ;
     condefs: @'con_defs' @{syntax thmrefs}
     ;
     @{syntax_def (ZF) typeintros}: @'type_intros' @{syntax thmrefs}
     ;
     @{syntax_def (ZF) typeelims}: @'type_elims' @{syntax thmrefs}
-  "}
+  \<close>}
 
   In the following syntax specification @{text "monos"}, @{text
   typeintros}, and @{text typeelims} are the same as above.
 
-  @{rail "
+  @{rail \<open>
     (@@{command (ZF) datatype} | @@{command (ZF) codatatype}) domain? (dtspec + @'and') hints
     ;
 
@@ -92,8 +92,8 @@
     ;
     con: @{syntax name} ('(' (@{syntax term} ',' +) ')')?
     ;
-    hints: @{syntax (ZF) \"monos\"}? @{syntax (ZF) typeintros}? @{syntax (ZF) typeelims}?
-  "}
+    hints: @{syntax (ZF) "monos"}? @{syntax (ZF) typeintros}? @{syntax (ZF) typeelims}?
+  \<close>}
 
   See \cite{isabelle-ZF} for further information on inductive
   definitions in ZF, but note that this covers the old-style theory
@@ -108,9 +108,9 @@
     @{command_def (ZF) "primrec"} & : & @{text "theory \<rightarrow> theory"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     @@{command (ZF) primrec} (@{syntax thmdecl}? @{syntax prop} +)
-  "}
+  \<close>}
 *}
 
 
@@ -127,14 +127,13 @@
     @{command_def (ZF) "inductive_cases"} & : & @{text "theory \<rightarrow> theory"} \\
   \end{matharray}
 
-  @{rail "
+  @{rail \<open>
     (@@{method (ZF) case_tac} | @@{method (ZF) induct_tac}) @{syntax goal_spec}? @{syntax name}
     ;
     @@{method (ZF) ind_cases} (@{syntax prop} +)
     ;
     @@{command (ZF) inductive_cases} (@{syntax thmdecl}? (@{syntax prop} +) + @'and')
-    ;
-  "}
+  \<close>}
 *}
 
 end
--- a/src/FOL/FOL.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/FOL/FOL.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -72,7 +72,7 @@
 *}
 
 method_setup case_tac = {*
-  Args.goal_spec -- Scan.lift Args.name_source >>
+  Args.goal_spec -- Scan.lift Args.name_inner_syntax >>
     (fn (quant, s) => fn ctxt => SIMPLE_METHOD'' quant (case_tac ctxt s))
 *} "case_tac emulation (dynamic instantiation!)"
 
--- a/src/HOL/Library/LaTeXsugar.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/HOL/Library/LaTeXsugar.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -106,7 +106,7 @@
       end
   in
     Thy_Output.antiquotation @{binding "const_typ"}
-     (Scan.lift Args.name_source)
+     (Scan.lift Args.name_inner_syntax)
        (fn {source = src, context = ctxt, ...} => fn arg =>
           Thy_Output.output ctxt
             (Thy_Output.maybe_pretty_source pretty ctxt src [arg]))
--- a/src/HOL/SPARK/Tools/fdl_lexer.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/HOL/SPARK/Tools/fdl_lexer.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -205,7 +205,7 @@
 val any_line = whitespace' |-- any_line' --|
   newline >> (implode o map symbol);
 
-fun gen_comment a b = $$$ a |-- !!! "missing end of comment"
+fun gen_comment a b = $$$ a |-- !!! "unclosed comment"
   (Scan.repeat (Scan.unless ($$$ b) any) --| $$$ b) >> make_token Comment;
 
 val c_comment = gen_comment "/*" "*/";
--- a/src/HOL/Tools/inductive.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/HOL/Tools/inductive.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -591,7 +591,7 @@
 
 val ind_cases_setup =
   Method.setup @{binding ind_cases}
-    (Scan.lift (Scan.repeat1 Args.name_source --
+    (Scan.lift (Scan.repeat1 Args.name_inner_syntax --
       Scan.optional (Args.$$$ "for" |-- Scan.repeat1 Args.binding) []) >>
       (fn (raw_props, fixes) => fn ctxt =>
         let
--- a/src/HOL/Tools/string_syntax.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/HOL/Tools/string_syntax.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -38,24 +38,24 @@
 val specials = raw_explode "\\\"`'";
 
 fun dest_chr c1 c2 =
-  let val c = chr (dest_nib c1 * 16 + dest_nib c2) in
-    if not (member (op =) specials c) andalso Symbol.is_ascii c andalso Symbol.is_printable c
-    then c
-    else if c = "\n" then "\<newline>"
+  let val s = chr (dest_nib c1 * 16 + dest_nib c2) in
+    if not (member (op =) specials s) andalso Symbol.is_ascii s andalso Symbol.is_printable s
+    then s
+    else if s = "\n" then "\<newline>"
     else raise Match
   end;
 
 fun dest_char (Ast.Appl [Ast.Constant @{const_syntax Char}, c1, c2]) = dest_chr c1 c2
   | dest_char _ = raise Match;
 
-fun syntax_string cs =
+fun syntax_string ss =
   Ast.Appl [Ast.Constant @{syntax_const "_inner_string"},
-    Ast.Variable (Lexicon.implode_str cs)];
+    Ast.Variable (Lexicon.implode_str ss)];
 
 
 fun char_ast_tr [Ast.Variable str] =
-      (case Lexicon.explode_str str of
-        [c] => mk_char c
+      (case Lexicon.explode_str (str, Position.none) of
+        [(s, _)] => mk_char s
       | _ => error ("Single character expected: " ^ str))
   | char_ast_tr [Ast.Appl [Ast.Constant @{syntax_const "_constrain"}, ast1, ast2]] =
       Ast.Appl [Ast.Constant @{syntax_const "_constrain"}, char_ast_tr [ast1], ast2]
@@ -69,16 +69,16 @@
 (* string *)
 
 fun mk_string [] = Ast.Constant @{const_syntax Nil}
-  | mk_string (c :: cs) =
-      Ast.Appl [Ast.Constant @{const_syntax Cons}, mk_char c, mk_string cs];
+  | mk_string (s :: ss) =
+      Ast.Appl [Ast.Constant @{const_syntax Cons}, mk_char s, mk_string ss];
 
 fun string_ast_tr [Ast.Variable str] =
-      (case Lexicon.explode_str str of
+      (case Lexicon.explode_str (str, Position.none) of
         [] =>
           Ast.Appl
             [Ast.Constant @{syntax_const "_constrain"},
               Ast.Constant @{const_syntax Nil}, Ast.Constant @{type_syntax string}]
-      | cs => mk_string cs)
+      | ss => mk_string (map Symbol_Pos.symbol ss))
   | string_ast_tr [Ast.Appl [Ast.Constant @{syntax_const "_constrain"}, ast1, ast2]] =
       Ast.Appl [Ast.Constant @{syntax_const "_constrain"}, string_ast_tr [ast1], ast2]
   | string_ast_tr asts = raise Ast.AST ("string_tr", asts);
--- a/src/HOL/UNITY/UNITY_Main.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/HOL/UNITY/UNITY_Main.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -16,7 +16,7 @@
     "for proving safety properties"
 
 method_setup ensures_tac = {*
-  Args.goal_spec -- Scan.lift Args.name_source >>
+  Args.goal_spec -- Scan.lift Args.name_inner_syntax >>
   (fn (quant, s) => fn ctxt => SIMPLE_METHOD'' quant (ensures_tac ctxt s))
 *} "for proving progress properties"
 
--- a/src/HOL/ex/Cartouche_Examples.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/HOL/ex/Cartouche_Examples.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -2,10 +2,12 @@
 
 theory Cartouche_Examples
 imports Main
-keywords "cartouche" :: diag
+keywords
+  "cartouche" "term_cartouche" :: diag and
+  "text_cartouche" :: thy_decl
 begin
 
-subsection {* Outer syntax *}
+subsection {* Outer syntax: cartouche within command syntax *}
 
 ML {*
   Outer_Syntax.improper_command @{command_spec "cartouche"} ""
@@ -17,12 +19,10 @@
 cartouche \<open>abc \<open>\<alpha>\<beta>\<gamma>\<close> xzy\<close>
 
 
-subsection {* Inner syntax *}
+subsection {* Inner syntax: string literals via cartouche *}
 
-syntax "_string_cartouche" :: "cartouche_position \<Rightarrow> string"  ("STR _")
-
-parse_translation {*
-  let
+ML {*
+  local
     val mk_nib =
       Syntax.const o Lexicon.mark_const o
         fst o Term.dest_Const o HOLogic.mk_nibble;
@@ -39,27 +39,139 @@
       | mk_string (s :: ss) =
           Syntax.const @{const_syntax Cons} $ mk_char s $ mk_string ss;
 
-    fun string_tr ctxt args =
-      let fun err () = raise TERM ("string_tr", []) in
+  in
+    fun string_tr content args =
+      let fun err () = raise TERM ("string_tr", args) in
         (case args of
           [(c as Const (@{syntax_const "_constrain"}, _)) $ Free (s, _) $ p] =>
             (case Term_Position.decode_position p of
-              SOME (pos, _) =>
-                c $ mk_string (Symbol_Pos.cartouche_content (Symbol_Pos.explode (s, pos))) $ p
+              SOME (pos, _) => c $ mk_string (content (s, pos)) $ p
             | NONE => err ())
         | _ => err ())
       end;
-  in
-    [(@{syntax_const "_string_cartouche"}, string_tr)]
-  end
+  end;
+*}
+
+syntax "_STR_cartouche" :: "cartouche_position \<Rightarrow> string"  ("STR _")
+
+parse_translation {*
+  [(@{syntax_const "_STR_cartouche"},
+    K (string_tr (Symbol_Pos.cartouche_content o Symbol_Pos.explode)))]
 *}
 
 term "STR \<open>\<close>"
 term "STR \<open>abc\<close>"
-lemma "STR \<open>abc\<close> @ STR \<open>xyz\<close> = STR \<open>abcxyz\<close>" by simp
+term "STR \<open>abc\<close> @ STR \<open>xyz\<close>"
+term "STR \<open>\<newline>\<close>"
+term "STR \<open>\001\010\100\<close>"
+
+
+subsection {* Alternate outer and inner syntax: string literals *}
+
+subsubsection {* Nested quotes *}
+
+syntax "_STR_string" :: "string_position \<Rightarrow> string"  ("STR _")
+
+parse_translation {*
+  [(@{syntax_const "_STR_string"}, K (string_tr Lexicon.explode_string))]
+*}
+
+term "STR \"\""
+term "STR \"abc\""
+term "STR \"abc\" @ STR \"xyz\""
+term "STR \"\<newline>\""
+term "STR \"\001\010\100\""
+
+
+subsubsection {* Term cartouche and regular quotes *}
+
+ML {*
+  Outer_Syntax.improper_command @{command_spec "term_cartouche"} ""
+    (Parse.inner_syntax Parse.cartouche >> (fn s =>
+      Toplevel.keep (fn state =>
+        let
+          val ctxt = Toplevel.context_of state;
+          val t = Syntax.read_term ctxt s;
+        in writeln (Syntax.string_of_term ctxt t) end)))
+*}
+
+term_cartouche \<open>STR ""\<close>
+term_cartouche \<open>STR "abc"\<close>
+term_cartouche \<open>STR "abc" @ STR "xyz"\<close>
+term_cartouche \<open>STR "\<newline>"\<close>
+term_cartouche \<open>STR "\001\010\100"\<close>
 
 
-subsection {* Proof method syntax *}
+subsubsection {* Further nesting: antiquotations *}
+
+setup -- "ML antiquotation"
+{*
+  ML_Antiquote.inline @{binding term_cartouche}
+    (Args.context -- Scan.lift (Parse.inner_syntax Parse.cartouche) >>
+      (fn (ctxt, s) => ML_Syntax.atomic (ML_Syntax.print_term (Syntax.read_term ctxt s))))
+*}
+
+setup -- "document antiquotation"
+{*
+  Thy_Output.antiquotation @{binding ML_cartouche}
+    (Scan.lift Args.cartouche_source_position) (fn {context, ...} => fn (txt, pos) =>
+      let
+        val toks =
+          ML_Lex.read Position.none "fn _ => (" @
+          ML_Lex.read pos txt @
+          ML_Lex.read Position.none ");";
+        val _ = ML_Context.eval_in (SOME context) false pos toks;
+      in "" end);
+*}
+
+ML {*
+  @{term_cartouche \<open>STR ""\<close>};
+  @{term_cartouche \<open>STR "abc"\<close>};
+  @{term_cartouche \<open>STR "abc" @ STR "xyz"\<close>};
+  @{term_cartouche \<open>STR "\<newline>"\<close>};
+  @{term_cartouche \<open>STR "\001\010\100"\<close>};
+*}
+
+text {*
+  @{ML_cartouche
+    \<open>
+      (
+        @{term_cartouche \<open>STR ""\<close>};
+        @{term_cartouche \<open>STR "abc"\<close>};
+        @{term_cartouche \<open>STR "abc" @ STR "xyz"\<close>};
+        @{term_cartouche \<open>STR "\<newline>"\<close>};
+        @{term_cartouche \<open>STR "\001\010\100"\<close>}
+      )
+    \<close>
+  }
+*}
+
+
+subsubsection {* Uniform nesting of sub-languages: document source, ML, term, string literals *}
+
+ML {*
+  Outer_Syntax.command
+    @{command_spec "text_cartouche"} ""
+    (Parse.opt_target -- Parse.source_position Parse.cartouche >> Isar_Cmd.local_theory_markup)
+*}
+
+text_cartouche
+\<open>
+  @{ML_cartouche
+    \<open>
+      (
+        @{term_cartouche \<open>STR ""\<close>};
+        @{term_cartouche \<open>STR "abc"\<close>};
+        @{term_cartouche \<open>STR "abc" @ STR "xyz"\<close>};
+        @{term_cartouche \<open>STR "\<newline>"\<close>};
+        @{term_cartouche \<open>STR "\001\010\100"\<close>}
+      )
+    \<close>
+  }
+\<close>
+
+
+subsection {* Proof method syntax: ML tactic expression *}
 
 ML {*
 structure ML_Tactic:
@@ -83,7 +195,7 @@
 *}
 
 
-subsection {* Explicit version: method with cartouche argument *}
+subsubsection {* Explicit version: method with cartouche argument *}
 
 method_setup ml_tactic = {*
   Scan.lift Args.cartouche_source_position
@@ -104,7 +216,7 @@
 text {* @{ML "@{lemma \"A \<and> B \<longrightarrow> B \<and> A\" by (ml_tactic \<open>blast_tac ctxt 1\<close>)}"} *}
 
 
-subsection {* Implicit version: method with special name "cartouche" (dynamic!) *}
+subsubsection {* Implicit version: method with special name "cartouche" (dynamic!) *}
 
 method_setup "cartouche" = {*
   Scan.lift Args.cartouche_source_position
--- a/src/Pure/General/antiquote.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Pure/General/antiquote.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -45,20 +45,20 @@
 val err_prefix = "Antiquotation lexical error: ";
 
 val scan_txt =
-  $$$ "@" --| Scan.ahead (~$$$ "{") ||
+  $$$ "@" --| Scan.ahead (~$$ "{") ||
   Scan.one (fn (s, _) => s <> "@" andalso Symbol.is_regular s) >> single;
 
 val scan_ant =
   Scan.trace (Symbol_Pos.scan_string_qq err_prefix || Symbol_Pos.scan_string_bq err_prefix) >> #2 ||
-  Scan.trace (Symbol_Pos.scan_cartouche (fn s => Symbol_Pos.!!! (fn () => err_prefix ^ s))) >> #2 ||
+  Scan.trace (Symbol_Pos.scan_cartouche err_prefix) >> #2 ||
   Scan.one (fn (s, _) => s <> "}" andalso Symbol.is_regular s) >> single;
 
 in
 
 val scan_antiq =
-  Symbol_Pos.scan_pos -- ($$$ "@" |-- $$$ "{" |--
+  Symbol_Pos.scan_pos -- ($$ "@" |-- $$ "{" |--
     Symbol_Pos.!!! (fn () => err_prefix ^ "missing closing brace")
-      (Scan.repeat scan_ant -- ($$$ "}" |-- Symbol_Pos.scan_pos)))
+      (Scan.repeat scan_ant -- ($$ "}" |-- Symbol_Pos.scan_pos)))
   >> (fn (pos1, (body, pos2)) => (flat body, Position.range pos1 pos2));
 
 val scan_text = scan_antiq >> Antiq || Scan.repeat1 scan_txt >> (Text o flat);
--- a/src/Pure/General/scan.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Pure/General/scan.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -64,6 +64,7 @@
   val state: 'a * 'b -> 'a * ('a * 'b)
   val depend: ('a -> 'b -> ('c * 'd) * 'e) -> 'a * 'b -> 'd * ('c * 'e)
   val peek: ('a -> 'b -> 'c * 'd) -> 'a * 'b -> 'c * ('a * 'd)
+  val provide: ('a -> bool) -> 'b -> ('b * 'c -> 'd * ('a * 'e)) -> 'c -> 'd * 'e
   val pass: 'a -> ('a * 'b -> 'c * ('d * 'e)) -> 'b -> 'c * 'e
   val lift: ('a -> 'b * 'c) -> 'd * 'a -> 'b * ('d * 'c)
   val unlift: (unit * 'a -> 'b * ('c * 'd)) -> 'a -> 'b * 'd
@@ -210,9 +211,11 @@
 
 fun peek scan = depend (fn st => scan st >> pair st);
 
-fun pass st scan xs =
-  let val (y, (_, xs')) = scan (st, xs)
-  in (y, xs') end;
+fun provide pred st scan xs =
+  let val (y, (st', xs')) = scan (st, xs)
+  in if pred st' then (y, xs') else fail () end;
+
+fun pass st = provide (K true) st;
 
 fun lift scan (st, xs) =
   let val (y, xs') = scan xs
--- a/src/Pure/General/symbol_pos.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Pure/General/symbol_pos.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -8,6 +8,8 @@
 sig
   type T = Symbol.symbol * Position.T
   val symbol: T -> Symbol.symbol
+  val $$ : Symbol.symbol -> T list -> T * T list
+  val ~$$ : Symbol.symbol -> T list -> T * T list
   val $$$ : Symbol.symbol -> T list -> T list * T list
   val ~$$$ : Symbol.symbol -> T list -> T list * T list
   val content: T list -> string
@@ -26,16 +28,11 @@
   val quote_string_q: string -> string
   val quote_string_qq: string -> string
   val quote_string_bq: string -> string
-  val scan_cartouche: (string -> (T list -> T list * T list) -> T list -> T list * T list) ->
-    T list -> T list * T list
-  val scan_cartouche_body: (string -> (T list -> T list * T list) -> T list -> T list * T list) ->
-    T list -> T list * T list
+  val scan_cartouche: string -> T list -> T list * T list
   val recover_cartouche: T list -> T list * T list
   val cartouche_content: T list -> T list
-  val scan_comment: (string -> (T list -> T list * T list) -> T list -> T list * T list) ->
-    T list -> T list * T list
-  val scan_comment_body: (string -> (T list -> T list * T list) -> T list -> T list * T list) ->
-    T list -> T list * T list
+  val scan_comment: string -> T list -> T list * T list
+  val scan_comment_body: string -> T list -> T list * T list
   val recover_comment: T list -> T list * T list
   val source: Position.T -> (Symbol.symbol, 'a) Source.source ->
     (T, Position.T * (Symbol.symbol, 'a) Source.source) Source.source
@@ -91,6 +88,8 @@
 fun change_prompt scan = Scan.prompt "# " scan;
 
 fun $$ s = Scan.one (fn x => symbol x = s);
+fun ~$$ s = Scan.one (fn x => symbol x <> s);
+
 fun $$$ s = Scan.one (fn x => symbol x = s) >> single;
 fun ~$$$ s = Scan.one (fn x => symbol x <> s) >> single;
 
@@ -115,8 +114,10 @@
   Scan.one (fn (s, _) => s <> q andalso s <> "\\" andalso Symbol.is_regular s) >> single;
 
 fun scan_strs q err_prefix =
-  (scan_pos --| $$$ q) -- !!! (fn () => err_prefix ^ "missing quote at end of string")
-    (change_prompt ((Scan.repeat (scan_str q err_prefix) >> flat) -- ($$$ q |-- scan_pos)));
+  Scan.ahead ($$ q) |--
+    !!! (fn () => err_prefix ^ "unclosed string literal")
+      ((scan_pos --| $$$ q) --
+        (change_prompt ((Scan.repeat (scan_str q err_prefix) >> flat) -- ($$$ q |-- scan_pos))));
 
 fun recover_strs q =
   $$$ q @@@ (Scan.repeat (Scan.permissive (scan_str q "")) >> flat);
@@ -159,29 +160,20 @@
 
 (* nested text cartouches *)
 
-local
-
-val scan_cart =
-  Scan.depend (fn (d: int) => $$ "\\<open>" >> pair (d + 1)) ||
-  Scan.depend (fn 0 => Scan.fail | d => $$ "\\<close>" >> pair (d - 1)) ||
-  Scan.lift (Scan.one (fn (s, _) => s <> "\\<close>" andalso Symbol.is_regular s));
-
-val scan_carts = Scan.pass 0 (Scan.repeat scan_cart);
-
-val scan_body = change_prompt scan_carts;
+val scan_cartouche_depth =
+  Scan.repeat1 (Scan.depend (fn (d: int) =>
+    $$ "\\<open>" >> pair (d + 1) ||
+      (if d > 0 then
+        Scan.one (fn (s, _) => s <> "\\<close>" andalso Symbol.is_regular s) >> pair d ||
+        $$ "\\<close>" >> pair (d - 1)
+      else Scan.fail)));
 
-in
-
-fun scan_cartouche cut =
-  $$ "\\<open>" ::: cut "unclosed text cartouche" (scan_body @@@ $$$ "\\<close>");
+fun scan_cartouche err_prefix =
+  Scan.ahead ($$ "\\<open>") |--
+    !!! (fn () => err_prefix ^ "unclosed text cartouche")
+      (change_prompt (Scan.provide (fn d => d = 0) 0 scan_cartouche_depth));
 
-fun scan_cartouche_body cut =
-  $$ "\\<open>" |-- cut "unclosed text cartouche" (scan_body --| $$ "\\<close>");
-
-val recover_cartouche =
-  $$$ "\\<open>" @@@ scan_carts;
-
-end;
+val recover_cartouche = Scan.pass 0 scan_cartouche_depth;
 
 fun cartouche_content syms =
   let
@@ -214,11 +206,15 @@
 
 in
 
-fun scan_comment cut =
-  $$$ "(" @@@ $$$ "*" @@@ cut "missing end of comment" (scan_body @@@ $$$ "*" @@@ $$$ ")");
+fun scan_comment err_prefix =
+  Scan.ahead ($$ "(" -- $$ "*") |--
+    !!! (fn () => err_prefix ^ "unclosed comment")
+      ($$$ "(" @@@ $$$ "*" @@@ scan_body @@@ $$$ "*" @@@ $$$ ")");
 
-fun scan_comment_body cut =
-  $$$ "(" |-- $$$ "*" |-- cut "missing end of comment" (scan_body --| $$$ "*" --| $$$ ")");
+fun scan_comment_body err_prefix =
+  Scan.ahead ($$ "(" -- $$ "*") |--
+    !!! (fn () => err_prefix ^ "unclosed comment")
+      ($$ "(" |-- $$ "*" |-- scan_body --| $$ "*" --| $$ ")");
 
 val recover_comment =
   $$$ "(" @@@ $$$ "*" @@@ scan_cmts;
@@ -284,6 +280,8 @@
 
 structure Basic_Symbol_Pos =   (*not open by default*)
 struct
+  val $$ = Symbol_Pos.$$;
+  val ~$$ = Symbol_Pos.~$$;
   val $$$ = Symbol_Pos.$$$;
   val ~$$$ = Symbol_Pos.~$$$;
 end;
--- a/src/Pure/Isar/args.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Pure/Isar/args.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -29,9 +29,9 @@
   val bracks: 'a parser -> 'a parser
   val mode: string -> bool parser
   val maybe: 'a parser -> 'a option parser
-  val cartouche_source: string parser
+  val cartouche_inner_syntax: string parser
   val cartouche_source_position: (Symbol_Pos.text * Position.T) parser
-  val name_source: string parser
+  val name_inner_syntax: string parser
   val name_source_position: (Symbol_Pos.text * Position.T) parser
   val name: string parser
   val binding: binding parser
@@ -151,10 +151,10 @@
 fun maybe scan = $$$ "_" >> K NONE || scan >> SOME;
 
 val cartouche = token Parse.cartouche;
-val cartouche_source = cartouche >> Token.source_of;
+val cartouche_inner_syntax = cartouche >> Token.inner_syntax_of;
 val cartouche_source_position = cartouche >> Token.source_position_of;
 
-val name_source = named >> Token.source_of;
+val name_inner_syntax = named >> Token.inner_syntax_of;
 val name_source_position = named >> Token.source_position_of;
 
 val name = named >> Token.content_of;
@@ -182,11 +182,11 @@
 val internal_attribute = value (fn Token.Attribute att => att);
 
 fun named_text intern = internal_text || named >> evaluate Token.Text (intern o Token.content_of);
-fun named_typ readT = internal_typ || named >> evaluate Token.Typ (readT o Token.source_of);
-fun named_term read = internal_term || named >> evaluate Token.Term (read o Token.source_of);
+fun named_typ readT = internal_typ || named >> evaluate Token.Typ (readT o Token.inner_syntax_of);
+fun named_term read = internal_term || named >> evaluate Token.Term (read o Token.inner_syntax_of);
 
 fun named_fact get = internal_fact || named >> evaluate Token.Fact (get o Token.content_of) ||
-  alt_string >> evaluate Token.Fact (get o Token.source_of);
+  alt_string >> evaluate Token.Fact (get o Token.inner_syntax_of);
 
 fun named_attribute att =
   internal_attribute ||
--- a/src/Pure/Isar/parse.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Pure/Isar/parse.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -167,7 +167,7 @@
 
 fun position scan = (Scan.ahead not_eof >> Token.position_of) -- scan >> Library.swap;
 fun source_position atom = Scan.ahead atom |-- not_eof >> Token.source_position_of;
-fun inner_syntax atom = Scan.ahead atom |-- not_eof >> Token.source_of;
+fun inner_syntax atom = Scan.ahead atom |-- not_eof >> Token.inner_syntax_of;
 
 fun kind k =
   group (fn () => Token.str_of_kind k)
--- a/src/Pure/Isar/token.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Pure/Isar/token.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -41,7 +41,7 @@
   val is_space: T -> bool
   val is_blank: T -> bool
   val is_newline: T -> bool
-  val source_of: T -> string
+  val inner_syntax_of: T -> string
   val source_position_of: T -> Symbol_Pos.text * Position.T
   val content_of: T -> string
   val unparse: T -> string
@@ -116,10 +116,10 @@
   | TypeVar => "schematic type variable"
   | Nat => "natural number"
   | Float => "floating-point number"
-  | String => "string"
+  | String => "quoted string"
   | AltString => "back-quoted string"
   | Verbatim => "verbatim text"
-  | Cartouche => "cartouche"
+  | Cartouche => "text cartouche"
   | Space => "white space"
   | Comment => "comment text"
   | InternalValue => "internal value"
@@ -206,7 +206,7 @@
 
 (* token content *)
 
-fun source_of (Token ((source, (pos, _)), (_, x), _)) =
+fun inner_syntax_of (Token ((source, (pos, _)), (_, x), _)) =
   if YXML.detect x then x
   else YXML.string_of (XML.Elem (Markup.token (Position.properties_of pos), [XML.Text source]));
 
@@ -321,13 +321,15 @@
 (* scan verbatim text *)
 
 val scan_verb =
-  $$$ "*" --| Scan.ahead (~$$$ "}") ||
+  $$$ "*" --| Scan.ahead (~$$ "}") ||
   Scan.one (fn (s, _) => s <> "*" andalso Symbol.is_regular s) >> single;
 
 val scan_verbatim =
-  (Symbol_Pos.scan_pos --| $$$ "{" --| $$$ "*") -- !!! "missing end of verbatim text"
-    (Symbol_Pos.change_prompt
-      ((Scan.repeat scan_verb >> flat) -- ($$$ "*" |-- $$$ "}" |-- Symbol_Pos.scan_pos)));
+  Scan.ahead ($$ "{" -- $$ "*") |--
+    !!! "unclosed verbatim text"
+      ((Symbol_Pos.scan_pos --| $$ "{" --| $$ "*") --
+        Symbol_Pos.change_prompt
+          ((Scan.repeat scan_verb >> flat) -- ($$ "*" |-- $$ "}" |-- Symbol_Pos.scan_pos)));
 
 val recover_verbatim =
   $$$ "{" @@@ $$$ "*" @@@ (Scan.repeat scan_verb >> flat);
@@ -336,7 +338,8 @@
 (* scan cartouche *)
 
 val scan_cartouche =
-  Symbol_Pos.scan_pos -- (Symbol_Pos.scan_cartouche_body !!! -- Symbol_Pos.scan_pos);
+  Symbol_Pos.scan_pos --
+    ((Symbol_Pos.scan_cartouche err_prefix >> Symbol_Pos.cartouche_content) -- Symbol_Pos.scan_pos);
 
 
 (* scan space *)
@@ -351,7 +354,7 @@
 (* scan comment *)
 
 val scan_comment =
-  Symbol_Pos.scan_pos -- (Symbol_Pos.scan_comment_body !!! -- Symbol_Pos.scan_pos);
+  Symbol_Pos.scan_pos -- (Symbol_Pos.scan_comment_body err_prefix -- Symbol_Pos.scan_pos);
 
 
 
--- a/src/Pure/ML/ml_antiquote.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Pure/ML/ml_antiquote.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -99,14 +99,14 @@
 
   value (Binding.name "cpat")
     (Args.context --
-      Scan.lift Args.name_source >> uncurry Proof_Context.read_term_pattern >> (fn t =>
+      Scan.lift Args.name_inner_syntax >> uncurry Proof_Context.read_term_pattern >> (fn t =>
         "Thm.cterm_of (Proof_Context.theory_of ML_context) " ^
           ML_Syntax.atomic (ML_Syntax.print_term t))));
 
 
 (* type classes *)
 
-fun class syn = Args.context -- Scan.lift Args.name_source >> (fn (ctxt, s) =>
+fun class syn = Args.context -- Scan.lift Args.name_inner_syntax >> (fn (ctxt, s) =>
   Proof_Context.read_class ctxt s
   |> syn ? Lexicon.mark_class
   |> ML_Syntax.print_string);
@@ -116,13 +116,13 @@
   inline (Binding.name "class_syntax") (class true) #>
 
   inline (Binding.name "sort")
-    (Args.context -- Scan.lift Args.name_source >> (fn (ctxt, s) =>
+    (Args.context -- Scan.lift Args.name_inner_syntax >> (fn (ctxt, s) =>
       ML_Syntax.atomic (ML_Syntax.print_sort (Syntax.read_sort ctxt s)))));
 
 
 (* type constructors *)
 
-fun type_name kind check = Args.context -- Scan.lift (Parse.position Args.name_source)
+fun type_name kind check = Args.context -- Scan.lift (Parse.position Args.name_inner_syntax)
   >> (fn (ctxt, (s, pos)) =>
     let
       val Type (c, _) = Proof_Context.read_type_name_proper ctxt false s;
@@ -146,7 +146,7 @@
 
 (* constants *)
 
-fun const_name check = Args.context -- Scan.lift (Parse.position Args.name_source)
+fun const_name check = Args.context -- Scan.lift (Parse.position Args.name_inner_syntax)
   >> (fn (ctxt, (s, pos)) =>
     let
       val Const (c, _) = Proof_Context.read_const_proper ctxt false s;
@@ -169,7 +169,7 @@
       else error ("Unknown syntax const: " ^ quote c ^ Position.here pos))) #>
 
   inline (Binding.name "const")
-    (Args.context -- Scan.lift Args.name_source -- Scan.optional
+    (Args.context -- Scan.lift Args.name_inner_syntax -- Scan.optional
         (Scan.lift (Args.$$$ "(") |-- Parse.enum1' "," Args.typ --| Scan.lift (Args.$$$ ")")) []
       >> (fn ((ctxt, raw_c), Ts) =>
         let
--- a/src/Pure/ML/ml_lex.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Pure/ML/ml_lex.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -137,7 +137,9 @@
 
 open Basic_Symbol_Pos;
 
-fun !!! msg = Symbol_Pos.!!! (fn () => "SML lexical error: " ^ msg);
+val err_prefix = "SML lexical error: ";
+
+fun !!! msg = Symbol_Pos.!!! (fn () => err_prefix ^ msg);
 
 
 (* blanks *)
@@ -241,8 +243,9 @@
   $$$ "#" @@@ $$$ "\"" @@@ scan_gaps @@@ Scan.optional (Scan.permissive scan_str @@@ scan_gaps) [];
 
 val scan_string =
-  $$$ "\"" @@@ !!! "missing quote at end of string"
-    ((Scan.repeat (scan_gap || scan_str) >> flat) @@@ $$$ "\"");
+  Scan.ahead ($$ "\"") |--
+    !!! "unclosed string literal"
+      ($$$ "\"" @@@ (Scan.repeat (scan_gap || scan_str) >> flat) @@@ $$$ "\"");
 
 val recover_string =
   $$$ "\"" @@@ (Scan.repeat (scan_gap || Scan.permissive scan_str) >> flat);
@@ -260,7 +263,7 @@
  (scan_char >> token Char ||
   scan_string >> token String ||
   scan_blanks1 >> token Space ||
-  Symbol_Pos.scan_comment !!! >> token Comment ||
+  Symbol_Pos.scan_comment err_prefix >> token Comment ||
   Scan.max token_leq
    (scan_keyword >> token Keyword)
    (scan_word >> token Word ||
@@ -304,9 +307,7 @@
           (SOME (false, fn msg => recover msg >> map Antiquote.Text))
         |> Source.exhaust
         |> tap (Position.reports_text o Antiquote.reports_of (single o report_of_token))
-        |> tap (List.app (fn Antiquote.Text tok => (check_content_of tok; warn tok) | _ => ())))
-      handle ERROR msg =>
-        cat_error msg ("The error(s) above occurred in ML source" ^ Position.here pos);
+        |> tap (List.app (fn Antiquote.Text tok => (check_content_of tok; warn tok) | _ => ())));
   in input @ termination end;
 
 end;
--- a/src/Pure/ML/ml_thms.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Pure/ML/ml_thms.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -85,7 +85,7 @@
 
 val and_ = Args.$$$ "and";
 val by = Args.$$$ "by";
-val goal = Scan.unless (by || and_) Args.name_source;
+val goal = Scan.unless (by || and_) Args.name_inner_syntax;
 
 val _ = Theory.setup
   (ML_Context.add_antiq (Binding.name "lemma")
--- a/src/Pure/Syntax/lexicon.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Pure/Syntax/lexicon.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -24,8 +24,8 @@
   val scan_tvar: Symbol_Pos.T list -> Symbol_Pos.T list * Symbol_Pos.T list
   val is_tid: string -> bool
   datatype token_kind =
-    Literal | IdentSy | LongIdentSy | VarSy | TFreeSy | TVarSy |
-    NumSy | FloatSy | XNumSy | StrSy | Cartouche | Space | Comment | EOF
+    Literal | IdentSy | LongIdentSy | VarSy | TFreeSy | TVarSy | NumSy |
+    FloatSy | XNumSy | StrSy | StringSy | Cartouche | Space | Comment | EOF
   datatype token = Token of token_kind * string * Position.range
   val str_of_token: token -> string
   val pos_of_token: token -> Position.T
@@ -42,8 +42,10 @@
   val matching_tokens: token * token -> bool
   val valued_token: token -> bool
   val predef_term: string -> token option
-  val implode_str: string list -> string
-  val explode_str: string -> string list
+  val implode_string: Symbol.symbol list -> string
+  val explode_string: string * Position.T -> Symbol_Pos.T list
+  val implode_str: Symbol.symbol list -> string
+  val explode_str: string * Position.T -> Symbol_Pos.T list
   val tokenize: Scan.lexicon -> bool -> Symbol_Pos.T list -> token list
   val read_indexname: string -> indexname
   val read_var: string -> term
@@ -87,7 +89,9 @@
 
 open Basic_Symbol_Pos;
 
-fun !!! msg = Symbol_Pos.!!! (fn () => "Inner lexical error: " ^ msg);
+val err_prefix = "Inner lexical error: ";
+
+fun !!! msg = Symbol_Pos.!!! (fn () => err_prefix ^ msg);
 
 val scan_id = Symbol_Pos.scan_ident;
 val scan_longid = scan_id @@@ (Scan.repeat1 ($$$ "." @@@ scan_id) >> flat);
@@ -114,8 +118,8 @@
 (** datatype token **)
 
 datatype token_kind =
-  Literal | IdentSy | LongIdentSy | VarSy | TFreeSy | TVarSy |
-  NumSy | FloatSy | XNumSy | StrSy | Cartouche | Space | Comment | EOF;
+  Literal | IdentSy | LongIdentSy | VarSy | TFreeSy | TVarSy | NumSy |
+  FloatSy | XNumSy | StrSy | StringSy | Cartouche | Space | Comment | EOF;
 
 datatype token = Token of token_kind * string * Position.range;
 
@@ -150,6 +154,7 @@
   ("float_token", FloatSy),
   ("xnum_token", XNumSy),
   ("str_token", StrSy),
+  ("string_token", StringSy),
   ("cartouche", Cartouche)];
 
 val terminals = map #1 terminal_kinds;
@@ -170,6 +175,7 @@
   | FloatSy => Markup.numeral
   | XNumSy => Markup.numeral
   | StrSy => Markup.inner_string
+  | StringSy => Markup.inner_string
   | Cartouche => Markup.inner_cartouche
   | Comment => Markup.inner_comment
   | _ => Markup.empty;
@@ -205,30 +211,45 @@
   | NONE => NONE);
 
 
-(* str tokens *)
+
+(** string literals **)
+
+fun explode_literal scan_body (str, pos) =
+  (case Scan.read Symbol_Pos.stopper scan_body (Symbol_Pos.explode (str, pos)) of
+    SOME ss => ss
+  | _ => error (err_prefix ^ "malformed string literal " ^ quote str ^ Position.here pos));
+
+
+(* string *)
+
+val scan_string = Scan.trace (Symbol_Pos.scan_string_qq err_prefix) >> #2;
+val scan_string_body = Symbol_Pos.scan_string_qq err_prefix >> (#1 o #2);
+
+fun implode_string ss = quote (implode (map (fn "\"" => "\\\"" | s => s) ss));
+val explode_string = explode_literal scan_string_body;
+
+
+(* str *)
 
 val scan_chr =
-  $$$ "\\" |-- $$$ "'" ||
+  $$ "\\" |-- $$$ "'" ||
   Scan.one
     ((fn s => s <> "\\" andalso s <> "'" andalso Symbol.is_regular s) o
       Symbol_Pos.symbol) >> single ||
-  $$$ "'" --| Scan.ahead (~$$$ "'");
+  $$$ "'" --| Scan.ahead (~$$ "'");
 
 val scan_str =
-  $$$ "'" @@@ $$$ "'" @@@ !!! "missing end of string"
-    ((Scan.repeat scan_chr >> flat) @@@ $$$ "'" @@@ $$$ "'");
+  Scan.ahead ($$ "'" -- $$ "'") |--
+    !!! "unclosed string literal"
+      ($$$ "'" @@@ $$$ "'" @@@ (Scan.repeat scan_chr >> flat) @@@ $$$ "'" @@@ $$$ "'");
 
 val scan_str_body =
-  $$$ "'" |-- $$$ "'" |-- !!! "missing end of string"
-    ((Scan.repeat scan_chr >> flat) --| $$$ "'" --| $$$ "'");
-
+  Scan.ahead ($$ "'" |-- $$ "'") |--
+    !!! "unclosed string literal"
+      ($$ "'" |-- $$ "'" |-- (Scan.repeat scan_chr >> flat) --| $$ "'" --| $$ "'");
 
-fun implode_str cs = enclose "''" "''" (implode (map (fn "'" => "\\'" | c => c) cs));
-
-fun explode_str str =
-  (case Scan.read Symbol_Pos.stopper scan_str_body (Symbol_Pos.explode (str, Position.none)) of
-    SOME cs => map Symbol_Pos.symbol cs
-  | _ => error ("Inner lexical error: literal string expected at " ^ quote str));
+fun implode_str ss = enclose "''" "''" (implode (map (fn "'" => "\\'" | s => s) ss));
+val explode_str = explode_literal scan_str_body;
 
 
 
@@ -258,17 +279,18 @@
     val scan_lit = Scan.literal lex >> token Literal;
 
     val scan_token =
-      Symbol_Pos.scan_cartouche !!! >> token Cartouche ||
-      Symbol_Pos.scan_comment !!! >> token Comment ||
+      Symbol_Pos.scan_cartouche err_prefix >> token Cartouche ||
+      Symbol_Pos.scan_comment err_prefix >> token Comment ||
       Scan.max token_leq scan_lit scan_val ||
+      scan_string >> token StringSy ||
       scan_str >> token StrSy ||
       Scan.many1 (Symbol.is_blank o Symbol_Pos.symbol) >> token Space;
   in
     (case Scan.error
         (Scan.finite Symbol_Pos.stopper (Scan.repeat scan_token)) syms of
       (toks, []) => toks
-    | (_, ss) => error ("Inner lexical error at: " ^ Symbol_Pos.content ss ^
-        Position.here (#1 (Symbol_Pos.range ss))))
+    | (_, ss) =>
+        error (err_prefix ^ Symbol_Pos.content ss ^ Position.here (#1 (Symbol_Pos.range ss))))
   end;
 
 
@@ -293,7 +315,7 @@
 
     val scan =
       (scan_id >> map Symbol_Pos.symbol) --
-      Scan.optional ($$$ "." |-- scan_nat >> (nat 0 o map Symbol_Pos.symbol)) ~1;
+      Scan.optional ($$ "." |-- scan_nat >> (nat 0 o map Symbol_Pos.symbol)) ~1;
   in
     scan >>
       (fn (cs, ~1) => chop_idx (rev cs) []
@@ -302,7 +324,7 @@
 
 in
 
-val scan_indexname = $$$ "'" |-- scan_vname >> (fn (x, i) => ("'" ^ x, i)) || scan_vname;
+val scan_indexname = $$ "'" |-- scan_vname >> (fn (x, i) => ("'" ^ x, i)) || scan_vname;
 
 end;
 
@@ -320,7 +342,7 @@
 fun read_var str =
   let
     val scan =
-      $$$ "?" |-- scan_indexname --| Scan.ahead (Scan.one Symbol_Pos.is_eof)
+      $$ "?" |-- scan_indexname --| Scan.ahead (Scan.one Symbol_Pos.is_eof)
         >> Syntax.var ||
       Scan.many (Symbol.is_regular o Symbol_Pos.symbol)
         >> (Syntax.free o implode o map Symbol_Pos.symbol);
@@ -330,7 +352,7 @@
 (* read_variable *)
 
 fun read_variable str =
-  let val scan = $$$ "?" |-- scan_indexname || scan_indexname
+  let val scan = $$ "?" |-- scan_indexname || scan_indexname
   in Scan.read Symbol_Pos.stopper scan (Symbol_Pos.explode (str, Position.none)) end;
 
 
--- a/src/Pure/Thy/thy_output.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Pure/Thy/thy_output.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -570,9 +570,9 @@
   basic_entity (Binding.name "term_type") (Term_Style.parse -- Args.term) pretty_term_typ #>
   basic_entity (Binding.name "typeof") (Term_Style.parse -- Args.term) pretty_term_typeof #>
   basic_entity (Binding.name "const") (Args.const_proper false) pretty_const #>
-  basic_entity (Binding.name "abbrev") (Scan.lift Args.name_source) pretty_abbrev #>
+  basic_entity (Binding.name "abbrev") (Scan.lift Args.name_inner_syntax) pretty_abbrev #>
   basic_entity (Binding.name "typ") Args.typ_abbrev Syntax.pretty_typ #>
-  basic_entity (Binding.name "class") (Scan.lift Args.name_source) pretty_class #>
+  basic_entity (Binding.name "class") (Scan.lift Args.name_inner_syntax) pretty_class #>
   basic_entity (Binding.name "type") (Scan.lift Args.name) pretty_type #>
   basic_entity (Binding.name "text") (Scan.lift Args.name) pretty_text #>
   basic_entities (Binding.name "prf") Attrib.thms (pretty_prf false) #>
--- a/src/Pure/Tools/rail.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Pure/Tools/rail.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -266,7 +266,7 @@
 
 val _ = Theory.setup
   (Thy_Output.antiquotation @{binding rail}
-    (Scan.lift (Parse.source_position Parse.string))
+    (Scan.lift (Parse.source_position (Parse.string || Parse.cartouche)))
     (fn {state, ...} => output_rules state o read));
 
 end;
--- a/src/Pure/Tools/rule_insts.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Pure/Tools/rule_insts.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -165,7 +165,7 @@
 
 val _ = Theory.setup
   (Attrib.setup @{binding "where"}
-    (Scan.lift (Parse.and_list (Args.var -- (Args.$$$ "=" |-- Args.name_source))) >>
+    (Scan.lift (Parse.and_list (Args.var -- (Args.$$$ "=" |-- Args.name_inner_syntax))) >>
       (fn args =>
         Thm.rule_attribute (fn context => read_instantiate_mixed (Context.proof_of context) args)))
     "named instantiation of theorem");
@@ -175,7 +175,7 @@
 
 local
 
-val inst = Args.maybe Args.name_source;
+val inst = Args.maybe Args.name_inner_syntax;
 val concl = Args.$$$ "concl" -- Args.colon;
 
 val insts =
@@ -320,7 +320,7 @@
 fun method inst_tac tac =
   Args.goal_spec --
   Scan.optional (Scan.lift
-    (Parse.and_list1 (Args.var -- (Args.$$$ "=" |-- Parse.!!! Args.name_source)) --|
+    (Parse.and_list1 (Args.var -- (Args.$$$ "=" |-- Parse.!!! Args.name_inner_syntax)) --|
       Args.$$$ "in")) [] --
   Attrib.thms >>
   (fn ((quant, insts), thms) => fn ctxt => METHOD (fn facts =>
@@ -347,12 +347,12 @@
   Method.setup @{binding cut_tac} (method cut_inst_tac (K cut_rules_tac))
     "cut rule (dynamic instantiation)" #>
   Method.setup @{binding subgoal_tac}
-    (Args.goal_spec -- Scan.lift (Scan.repeat1 Args.name_source) >>
+    (Args.goal_spec -- Scan.lift (Scan.repeat1 Args.name_inner_syntax) >>
       (fn (quant, props) => fn ctxt =>
         SIMPLE_METHOD'' quant (EVERY' (map (subgoal_tac ctxt) props))))
     "insert subgoal (dynamic instantiation)" #>
   Method.setup @{binding thin_tac}
-    (Args.goal_spec -- Scan.lift Args.name_source >>
+    (Args.goal_spec -- Scan.lift Args.name_inner_syntax >>
       (fn (quant, prop) => fn ctxt => SIMPLE_METHOD'' quant (thin_tac ctxt prop)))
       "remove premise (dynamic instantiation)");
 
--- a/src/Pure/pure_thy.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Pure/pure_thy.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -70,8 +70,9 @@
         "args", "cargs", "pttrn", "pttrns", "idt", "idts", "aprop", "asms",
         "any", "prop'", "num_const", "float_const", "xnum_const", "num_position",
         "float_position", "xnum_position", "index", "struct", "tid_position",
-        "tvar_position", "id_position", "longid_position", "var_position", "str_position",
-        "cartouche_position", "type_name", "class_name"]))
+        "tvar_position", "id_position", "longid_position", "var_position",
+        "str_position", "string_position", "cartouche_position", "type_name",
+        "class_name"]))
   #> Sign.add_syntax_i (map (fn x => (x, typ "'a", NoSyn)) token_markers)
   #> Sign.add_syntax_i
    [("",            typ "prop' => prop",               Delimfix "_"),
@@ -155,6 +156,7 @@
     ("_position",   typ "longid => longid_position",   Delimfix "_"),
     ("_position",   typ "var => var_position",         Delimfix "_"),
     ("_position",   typ "str_token => str_position",   Delimfix "_"),
+    ("_position",   typ "string_token => string_position", Delimfix "_"),
     ("_position",   typ "cartouche => cartouche_position", Delimfix "_"),
     ("_type_constraint_", typ "'a",                    NoSyn),
     ("_context_const", typ "id_position => logic",     Delimfix "CONST _"),
--- a/src/Tools/induct_tacs.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/Tools/induct_tacs.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -122,13 +122,14 @@
 val opt_rules = Scan.option (rule_spec |-- Attrib.thms);
 
 val varss =
-  Parse.and_list' (Scan.repeat (Scan.unless rule_spec (Scan.lift (Args.maybe Args.name_source))));
+  Parse.and_list'
+    (Scan.repeat (Scan.unless rule_spec (Scan.lift (Args.maybe Args.name_inner_syntax))));
 
 in
 
 val setup =
   Method.setup @{binding case_tac}
-    (Args.goal_spec -- Scan.lift Args.name_source -- opt_rule >>
+    (Args.goal_spec -- Scan.lift Args.name_inner_syntax -- opt_rule >>
       (fn ((quant, s), r) => fn ctxt => SIMPLE_METHOD'' quant (gen_case_tac ctxt s r)))
     "unstructured case analysis on types" #>
   Method.setup @{binding induct_tac}
--- a/src/ZF/Tools/ind_cases.ML	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/ZF/Tools/ind_cases.ML	Wed Jan 22 17:14:27 2014 +0100
@@ -57,7 +57,7 @@
 
 val setup =
   Method.setup @{binding "ind_cases"}
-    (Scan.lift (Scan.repeat1 Args.name_source) >>
+    (Scan.lift (Scan.repeat1 Args.name_inner_syntax) >>
       (fn props => fn ctxt => Method.erule ctxt 0 (map (smart_cases ctxt) props)))
     "dynamic case analysis on sets";
 
--- a/src/ZF/UNITY/SubstAx.thy	Wed Jan 22 10:13:40 2014 +0100
+++ b/src/ZF/UNITY/SubstAx.thy	Wed Jan 22 17:14:27 2014 +0100
@@ -375,7 +375,7 @@
 *}
 
 method_setup ensures = {*
-    Args.goal_spec -- Scan.lift Args.name_source >>
+    Args.goal_spec -- Scan.lift Args.name_inner_syntax >>
     (fn (quant, s) => fn ctxt => SIMPLE_METHOD'' quant (ensures_tac ctxt s))
 *} "for proving progress properties"