tuned signature;
authorwenzelm
Wed, 03 Dec 2014 14:04:38 +0100
changeset 59083 88b0b1f28adc
parent 59082 25501ba956ac
child 59084 f982f3072d79
tuned signature;
src/Doc/antiquote_setup.ML
src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML
src/HOL/Tools/Lifting/lifting_setup.ML
src/HOL/Tools/try0.ML
src/Pure/Isar/outer_syntax.ML
src/Pure/Isar/outer_syntax.scala
src/Pure/Isar/token.ML
src/Pure/Isar/token.scala
src/Pure/PIDE/document.ML
src/Pure/PIDE/resources.ML
src/Pure/System/options.scala
src/Pure/Tools/build.scala
src/Pure/Tools/find_consts.ML
src/Pure/Tools/find_theorems.ML
src/Pure/Tools/update_cartouches.scala
src/Pure/Tools/update_header.scala
src/Pure/Tools/update_semicolons.scala
src/Tools/Code/code_target.ML
src/Tools/jEdit/src/token_markup.scala
--- a/src/Doc/antiquote_setup.ML	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Doc/antiquote_setup.ML	Wed Dec 03 14:04:38 2014 +0100
@@ -204,7 +204,7 @@
     Keyword.is_command keywords name andalso
       let
         val markup =
-          Outer_Syntax.scan keywords Position.none name
+          Token.explode keywords Position.none name
           |> maps (Outer_Syntax.command_reports thy)
           |> map (snd o fst);
         val _ = Context_Position.reports ctxt (map (pair pos) markup);
--- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML	Wed Dec 03 14:04:38 2014 +0100
@@ -534,8 +534,8 @@
   let
     fun do_method named_thms ctxt =
       let
-        val ref_of_str =
-          suffix ";" #> Outer_Syntax.scan (Thy_Header.get_keywords' ctxt) Position.none
+        val ref_of_str = (* FIXME proper wrapper for parser combinators *)
+          suffix ";" #> Token.explode (Thy_Header.get_keywords' ctxt) Position.none
           #> Parse.xthm #> fst
         val thms = named_thms |> maps snd
         val facts = named_thms |> map (ref_of_str o fst o fst)
@@ -561,7 +561,7 @@
           let
             val (type_encs, lam_trans) =
               !meth
-              |> Outer_Syntax.scan (Thy_Header.get_keywords' ctxt) Position.start
+              |> Token.explode (Thy_Header.get_keywords' ctxt) Position.start
               |> filter Token.is_proper |> tl
               |> Metis_Tactic.parse_metis_options |> fst
               |>> the_default [ATP_Proof_Reconstruct.partial_typesN]
--- a/src/HOL/Tools/Lifting/lifting_setup.ML	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/HOL/Tools/Lifting/lifting_setup.ML	Wed Dec 03 14:04:38 2014 +0100
@@ -221,7 +221,7 @@
 
     val thy = Proof_Context.theory_of lthy
     val dummy_thm = Thm.transfer thy Drule.dummy_thm
-    val pointer = Outer_Syntax.scan (Thy_Header.get_keywords thy) Position.none bundle_name
+    val pointer = Token.explode (Thy_Header.get_keywords thy) Position.none bundle_name
     val restore_lifting_att = 
       ([dummy_thm], [Token.src ("Lifting.lifting_restore_internal", Position.none) pointer])
   in
--- a/src/HOL/Tools/try0.ML	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/HOL/Tools/try0.ML	Wed Dec 03 14:04:38 2014 +0100
@@ -43,7 +43,7 @@
 
 fun parse_method keywords s =
   enclose "(" ")" s
-  |> Outer_Syntax.scan keywords Position.start
+  |> Token.explode keywords Position.start
   |> filter Token.is_proper
   |> Scan.read Token.stopper Method.parse
   |> (fn SOME (Method.Source src, _) => src | _ => raise Fail "expected Source");
--- a/src/Pure/Isar/outer_syntax.ML	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Pure/Isar/outer_syntax.ML	Wed Dec 03 14:04:38 2014 +0100
@@ -19,7 +19,6 @@
     (bool -> local_theory -> Proof.state) parser -> unit
   val local_theory_to_proof: command_spec -> string ->
     (local_theory -> Proof.state) parser -> unit
-  val scan: Keyword.keywords -> Position.T -> string -> Token.T list
   val parse: theory -> Position.T -> string -> Toplevel.transition list
   val parse_tokens: theory -> Token.T list -> Toplevel.transition list
   val parse_spans: Token.T list -> Command_Span.span list
@@ -148,15 +147,6 @@
 
 (** toplevel parsing **)
 
-(* scan tokens *)
-
-fun scan keywords pos =
-  Source.of_string #>
-  Symbol.source #>
-  Token.source keywords pos #>
-  Source.exhaust;
-
-
 (* parse commands *)
 
 local
--- a/src/Pure/Isar/outer_syntax.scala	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Pure/Isar/outer_syntax.scala	Wed Dec 03 14:04:38 2014 +0100
@@ -7,7 +7,6 @@
 package isabelle
 
 
-import scala.util.parsing.input.{Reader, CharSequenceReader}
 import scala.collection.mutable
 import scala.annotation.tailrec
 
@@ -184,33 +183,6 @@
   }
 
 
-  /* token language */
-
-  def scan(input: CharSequence): List[Token] =
-  {
-    val in: Reader[Char] = new CharSequenceReader(input)
-    Token.Parsers.parseAll(Token.Parsers.rep(Token.Parsers.token(keywords)), in) match {
-      case Token.Parsers.Success(tokens, _) => tokens
-      case _ => error("Unexpected failure of tokenizing input:\n" + input.toString)
-    }
-  }
-
-  def scan_line(input: CharSequence, context: Scan.Line_Context): (List[Token], Scan.Line_Context) =
-  {
-    var in: Reader[Char] = new CharSequenceReader(input)
-    val toks = new mutable.ListBuffer[Token]
-    var ctxt = context
-    while (!in.atEnd) {
-      Token.Parsers.parse(Token.Parsers.token_line(keywords, ctxt), in) match {
-        case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
-        case Token.Parsers.NoSuccess(_, rest) =>
-          error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
-      }
-    }
-    (toks.toList, ctxt)
-  }
-
-
   /* command spans */
 
   def parse_spans(toks: List[Token]): List[Command_Span.Span] =
@@ -249,7 +221,7 @@
   }
 
   def parse_spans(input: CharSequence): List[Command_Span.Span] =
-    parse_spans(scan(input))
+    parse_spans(Token.explode(keywords, input))
 
 
   /* overall document structure */
--- a/src/Pure/Isar/token.ML	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Pure/Isar/token.ML	Wed Dec 03 14:04:38 2014 +0100
@@ -89,6 +89,7 @@
   val source_strict: Keyword.keywords ->
     Position.T -> (Symbol.symbol, 'a) Source.source -> (T,
       (Symbol_Pos.T, Position.T * (Symbol.symbol, 'a) Source.source) Source.source) Source.source
+  val explode: Keyword.keywords -> Position.T -> string -> T list
   type 'a parser = T list -> 'a * T list
   type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list)
   val read_no_commands: Keyword.keywords -> 'a parser -> Symbol_Pos.T list -> 'a list
@@ -538,7 +539,7 @@
 fun token_range k (pos1, (ss, pos2)) =
   Token (Symbol_Pos.implode_range pos1 pos2 ss, (k, Symbol_Pos.content ss), Slot);
 
-fun scan keywords = !!! "bad input"
+fun scan_token keywords = !!! "bad input"
   (Symbol_Pos.scan_string_qq err_prefix >> token_range String ||
     Symbol_Pos.scan_string_bq err_prefix >> token_range Alt_String ||
     scan_verbatim >> token_range Verbatim ||
@@ -571,7 +572,7 @@
 
 fun source' strict keywords =
   let
-    val scan_strict = Scan.bulk (scan keywords);
+    val scan_strict = Scan.bulk (scan_token keywords);
     val scan = if strict then scan_strict else Scan.recover scan_strict recover;
   in Source.source Symbol_Pos.stopper scan end;
 
@@ -581,6 +582,15 @@
 end;
 
 
+(* explode *)
+
+fun explode keywords pos =
+  Source.of_string #>
+  Symbol.source #>
+  source keywords pos #>
+  Source.exhaust;
+
+
 
 (** parsers **)
 
--- a/src/Pure/Isar/token.scala	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Pure/Isar/token.scala	Wed Dec 03 14:04:38 2014 +0100
@@ -7,6 +7,10 @@
 package isabelle
 
 
+import scala.collection.mutable
+import scala.util.parsing.input
+
+
 object Token
 {
   /* tokens */
@@ -121,6 +125,34 @@
   }
 
 
+  /* explode */
+
+  def explode(keywords: Keyword.Keywords, inp: CharSequence): List[Token] =
+  {
+    val in: input.Reader[Char] = new input.CharSequenceReader(inp)
+    Parsers.parseAll(Parsers.rep(Parsers.token(keywords)), in) match {
+      case Parsers.Success(tokens, _) => tokens
+      case _ => error("Unexpected failure of tokenizing input:\n" + inp.toString)
+    }
+  }
+
+  def explode_line(keywords: Keyword.Keywords, inp: CharSequence, context: Scan.Line_Context)
+    : (List[Token], Scan.Line_Context) =
+  {
+    var in: input.Reader[Char] = new input.CharSequenceReader(inp)
+    val toks = new mutable.ListBuffer[Token]
+    var ctxt = context
+    while (!in.atEnd) {
+      Parsers.parse(Parsers.token_line(keywords, ctxt), in) match {
+        case Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
+        case Parsers.NoSuccess(_, rest) =>
+          error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
+      }
+    }
+    (toks.toList, ctxt)
+  }
+
+
   /* token reader */
 
   object Pos
--- a/src/Pure/PIDE/document.ML	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Pure/PIDE/document.ML	Wed Dec 03 14:04:38 2014 +0100
@@ -333,7 +333,7 @@
       val span =
         Lazy.lazy (fn () =>
           Position.setmp_thread_data (Position.id_only id)
-            (fn () => Outer_Syntax.scan (get_keywords ()) (Position.id id) text) ());
+            (fn () => Token.explode (get_keywords ()) (Position.id id) text) ());
       val _ =
         Position.setmp_thread_data (Position.id_only id)
           (fn () => Output.status (Markup.markup_only Markup.accepted)) ();
--- a/src/Pure/PIDE/resources.ML	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Pure/PIDE/resources.ML	Wed Dec 03 14:04:38 2014 +0100
@@ -151,7 +151,7 @@
       fold (curry Keyword.merge_keywords o Thy_Header.get_keywords) parents
         (Keyword.add_keywords (#keywords header) Keyword.empty_keywords);
 
-    val toks = Outer_Syntax.scan keywords text_pos text;
+    val toks = Token.explode keywords text_pos text;
     val spans = Outer_Syntax.parse_spans toks;
     val elements = Thy_Syntax.parse_elements keywords spans;
 
--- a/src/Pure/System/options.scala	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Pure/System/options.scala	Wed Dec 03 14:04:38 2014 +0100
@@ -108,7 +108,7 @@
     def parse_file(syntax: Outer_Syntax, parser: Parser[Options => Options],
       options: Options, file: Path): Options =
     {
-      val toks = syntax.scan(File.read(file))
+      val toks = Token.explode(syntax.keywords, File.read(file))
       val ops =
         parse_all(rep(parser), Token.reader(toks, file.implode)) match {
           case Success(result, _) => result
--- a/src/Pure/Tools/build.scala	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Pure/Tools/build.scala	Wed Dec 03 14:04:38 2014 +0100
@@ -259,7 +259,7 @@
 
     def parse_entries(root: Path): List[(String, Session_Entry)] =
     {
-      val toks = root_syntax.scan(File.read(root))
+      val toks = Token.explode(root_syntax.keywords, File.read(root))
 
       parse_all(rep(chapter | session_entry), Token.reader(toks, root.implode)) match {
         case Success(result, _) =>
--- a/src/Pure/Tools/find_consts.ML	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Pure/Tools/find_consts.ML	Wed Dec 03 14:04:38 2014 +0100
@@ -145,7 +145,7 @@
 in
 
 fun read_query pos str =
-  Outer_Syntax.scan query_keywords pos str
+  Token.explode query_keywords pos str
   |> filter Token.is_proper
   |> Scan.error (Scan.finite Token.stopper (Parse.!!! (query --| Scan.ahead Parse.eof)))
   |> #1;
--- a/src/Pure/Tools/find_theorems.ML	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Pure/Tools/find_theorems.ML	Wed Dec 03 14:04:38 2014 +0100
@@ -531,7 +531,7 @@
 in
 
 fun read_query pos str =
-  Outer_Syntax.scan query_keywords pos str
+  Token.explode query_keywords pos str
   |> filter Token.is_proper
   |> Scan.error (Scan.finite Token.stopper (Parse.!!! (query --| Scan.ahead Parse.eof)))
   |> #1;
--- a/src/Pure/Tools/update_cartouches.scala	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Pure/Tools/update_cartouches.scala	Wed Dec 03 14:04:38 2014 +0100
@@ -20,7 +20,7 @@
   {
     val text0 = File.read(path)
     val text1 =
-      (for (tok <- Outer_Syntax.empty.scan(text0).iterator)
+      (for (tok <- Token.explode(Keyword.Keywords.empty, text0).iterator)
         yield {
           if (tok.kind == Token.Kind.ALT_STRING) cartouche(tok.content)
           else if (tok.kind == Token.Kind.VERBATIM) {
--- a/src/Pure/Tools/update_header.scala	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Pure/Tools/update_header.scala	Wed Dec 03 14:04:38 2014 +0100
@@ -13,7 +13,7 @@
   {
     val text0 = File.read(path)
     val text1 =
-      (for (tok <- Outer_Syntax.empty.scan(text0).iterator)
+      (for (tok <- Token.explode(Keyword.Keywords.empty, text0).iterator)
         yield { if (tok.source == "header") section else tok.source }).mkString
 
     if (text0 != text1) {
--- a/src/Pure/Tools/update_semicolons.scala	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Pure/Tools/update_semicolons.scala	Wed Dec 03 14:04:38 2014 +0100
@@ -13,7 +13,7 @@
   {
     val text0 = File.read(path)
     val text1 =
-      (for (tok <- Outer_Syntax.empty.scan(text0).iterator if tok.source != ";")
+      (for (tok <- Token.explode(Keyword.Keywords.empty, text0).iterator if tok.source != ";")
         yield tok.source).mkString
 
     if (text0 != text1) {
--- a/src/Tools/Code/code_target.ML	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Tools/Code/code_target.ML	Wed Dec 03 14:04:38 2014 +0100
@@ -696,7 +696,7 @@
     val thy = Thy_Info.get_theory thyname;
     val ctxt = Proof_Context.init_global thy;
     val parse = Scan.read Token.stopper (Parse.!!! code_exprP) o
-      (filter Token.is_proper o Outer_Syntax.scan (Thy_Header.get_keywords thy) Position.none);
+      (filter Token.is_proper o Token.explode (Thy_Header.get_keywords thy) Position.none);
   in case parse cmd_expr
    of SOME f => (writeln "Now generating code..."; f ctxt)
     | NONE => error ("Bad directive " ^ quote cmd_expr)
--- a/src/Tools/jEdit/src/token_markup.scala	Wed Dec 03 11:50:58 2014 +0100
+++ b/src/Tools/jEdit/src/token_markup.scala	Wed Dec 03 14:04:38 2014 +0100
@@ -218,7 +218,7 @@
     for {
       ctxt <- line_context.context
       text <- JEdit_Lib.try_get_text(buffer, JEdit_Lib.line_range(buffer, line))
-    } yield syntax.scan_line(text, ctxt)._1
+    } yield Token.explode_line(syntax.keywords, text, ctxt)._1
   }
 
   def line_token_iterator(
@@ -346,7 +346,7 @@
               (styled_tokens, new Line_Context(Some(ctxt1), structure))
 
             case (Some(ctxt), Some(syntax)) if syntax.has_tokens =>
-              val (tokens, ctxt1) = syntax.scan_line(line, ctxt)
+              val (tokens, ctxt1) = Token.explode_line(syntax.keywords, line, ctxt)
               val structure1 = syntax.line_structure(tokens, structure)
               val styled_tokens =
                 tokens.map(tok => (Rendering.token_markup(syntax, tok), tok.source))