tuned signature;
authorwenzelm
Wed Dec 03 14:04:38 2014 +0100 (2014-12-03)
changeset 5908388b0b1f28adc
parent 59082 25501ba956ac
child 59084 f982f3072d79
tuned signature;
src/Doc/antiquote_setup.ML
src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML
src/HOL/Tools/Lifting/lifting_setup.ML
src/HOL/Tools/try0.ML
src/Pure/Isar/outer_syntax.ML
src/Pure/Isar/outer_syntax.scala
src/Pure/Isar/token.ML
src/Pure/Isar/token.scala
src/Pure/PIDE/document.ML
src/Pure/PIDE/resources.ML
src/Pure/System/options.scala
src/Pure/Tools/build.scala
src/Pure/Tools/find_consts.ML
src/Pure/Tools/find_theorems.ML
src/Pure/Tools/update_cartouches.scala
src/Pure/Tools/update_header.scala
src/Pure/Tools/update_semicolons.scala
src/Tools/Code/code_target.ML
src/Tools/jEdit/src/token_markup.scala
     1.1 --- a/src/Doc/antiquote_setup.ML	Wed Dec 03 11:50:58 2014 +0100
     1.2 +++ b/src/Doc/antiquote_setup.ML	Wed Dec 03 14:04:38 2014 +0100
     1.3 @@ -204,7 +204,7 @@
     1.4      Keyword.is_command keywords name andalso
     1.5        let
     1.6          val markup =
     1.7 -          Outer_Syntax.scan keywords Position.none name
     1.8 +          Token.explode keywords Position.none name
     1.9            |> maps (Outer_Syntax.command_reports thy)
    1.10            |> map (snd o fst);
    1.11          val _ = Context_Position.reports ctxt (map (pair pos) markup);
     2.1 --- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML	Wed Dec 03 11:50:58 2014 +0100
     2.2 +++ b/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML	Wed Dec 03 14:04:38 2014 +0100
     2.3 @@ -534,8 +534,8 @@
     2.4    let
     2.5      fun do_method named_thms ctxt =
     2.6        let
     2.7 -        val ref_of_str =
     2.8 -          suffix ";" #> Outer_Syntax.scan (Thy_Header.get_keywords' ctxt) Position.none
     2.9 +        val ref_of_str = (* FIXME proper wrapper for parser combinators *)
    2.10 +          suffix ";" #> Token.explode (Thy_Header.get_keywords' ctxt) Position.none
    2.11            #> Parse.xthm #> fst
    2.12          val thms = named_thms |> maps snd
    2.13          val facts = named_thms |> map (ref_of_str o fst o fst)
    2.14 @@ -561,7 +561,7 @@
    2.15            let
    2.16              val (type_encs, lam_trans) =
    2.17                !meth
    2.18 -              |> Outer_Syntax.scan (Thy_Header.get_keywords' ctxt) Position.start
    2.19 +              |> Token.explode (Thy_Header.get_keywords' ctxt) Position.start
    2.20                |> filter Token.is_proper |> tl
    2.21                |> Metis_Tactic.parse_metis_options |> fst
    2.22                |>> the_default [ATP_Proof_Reconstruct.partial_typesN]
     3.1 --- a/src/HOL/Tools/Lifting/lifting_setup.ML	Wed Dec 03 11:50:58 2014 +0100
     3.2 +++ b/src/HOL/Tools/Lifting/lifting_setup.ML	Wed Dec 03 14:04:38 2014 +0100
     3.3 @@ -221,7 +221,7 @@
     3.4  
     3.5      val thy = Proof_Context.theory_of lthy
     3.6      val dummy_thm = Thm.transfer thy Drule.dummy_thm
     3.7 -    val pointer = Outer_Syntax.scan (Thy_Header.get_keywords thy) Position.none bundle_name
     3.8 +    val pointer = Token.explode (Thy_Header.get_keywords thy) Position.none bundle_name
     3.9      val restore_lifting_att = 
    3.10        ([dummy_thm], [Token.src ("Lifting.lifting_restore_internal", Position.none) pointer])
    3.11    in
     4.1 --- a/src/HOL/Tools/try0.ML	Wed Dec 03 11:50:58 2014 +0100
     4.2 +++ b/src/HOL/Tools/try0.ML	Wed Dec 03 14:04:38 2014 +0100
     4.3 @@ -43,7 +43,7 @@
     4.4  
     4.5  fun parse_method keywords s =
     4.6    enclose "(" ")" s
     4.7 -  |> Outer_Syntax.scan keywords Position.start
     4.8 +  |> Token.explode keywords Position.start
     4.9    |> filter Token.is_proper
    4.10    |> Scan.read Token.stopper Method.parse
    4.11    |> (fn SOME (Method.Source src, _) => src | _ => raise Fail "expected Source");
     5.1 --- a/src/Pure/Isar/outer_syntax.ML	Wed Dec 03 11:50:58 2014 +0100
     5.2 +++ b/src/Pure/Isar/outer_syntax.ML	Wed Dec 03 14:04:38 2014 +0100
     5.3 @@ -19,7 +19,6 @@
     5.4      (bool -> local_theory -> Proof.state) parser -> unit
     5.5    val local_theory_to_proof: command_spec -> string ->
     5.6      (local_theory -> Proof.state) parser -> unit
     5.7 -  val scan: Keyword.keywords -> Position.T -> string -> Token.T list
     5.8    val parse: theory -> Position.T -> string -> Toplevel.transition list
     5.9    val parse_tokens: theory -> Token.T list -> Toplevel.transition list
    5.10    val parse_spans: Token.T list -> Command_Span.span list
    5.11 @@ -148,15 +147,6 @@
    5.12  
    5.13  (** toplevel parsing **)
    5.14  
    5.15 -(* scan tokens *)
    5.16 -
    5.17 -fun scan keywords pos =
    5.18 -  Source.of_string #>
    5.19 -  Symbol.source #>
    5.20 -  Token.source keywords pos #>
    5.21 -  Source.exhaust;
    5.22 -
    5.23 -
    5.24  (* parse commands *)
    5.25  
    5.26  local
     6.1 --- a/src/Pure/Isar/outer_syntax.scala	Wed Dec 03 11:50:58 2014 +0100
     6.2 +++ b/src/Pure/Isar/outer_syntax.scala	Wed Dec 03 14:04:38 2014 +0100
     6.3 @@ -7,7 +7,6 @@
     6.4  package isabelle
     6.5  
     6.6  
     6.7 -import scala.util.parsing.input.{Reader, CharSequenceReader}
     6.8  import scala.collection.mutable
     6.9  import scala.annotation.tailrec
    6.10  
    6.11 @@ -184,33 +183,6 @@
    6.12    }
    6.13  
    6.14  
    6.15 -  /* token language */
    6.16 -
    6.17 -  def scan(input: CharSequence): List[Token] =
    6.18 -  {
    6.19 -    val in: Reader[Char] = new CharSequenceReader(input)
    6.20 -    Token.Parsers.parseAll(Token.Parsers.rep(Token.Parsers.token(keywords)), in) match {
    6.21 -      case Token.Parsers.Success(tokens, _) => tokens
    6.22 -      case _ => error("Unexpected failure of tokenizing input:\n" + input.toString)
    6.23 -    }
    6.24 -  }
    6.25 -
    6.26 -  def scan_line(input: CharSequence, context: Scan.Line_Context): (List[Token], Scan.Line_Context) =
    6.27 -  {
    6.28 -    var in: Reader[Char] = new CharSequenceReader(input)
    6.29 -    val toks = new mutable.ListBuffer[Token]
    6.30 -    var ctxt = context
    6.31 -    while (!in.atEnd) {
    6.32 -      Token.Parsers.parse(Token.Parsers.token_line(keywords, ctxt), in) match {
    6.33 -        case Token.Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
    6.34 -        case Token.Parsers.NoSuccess(_, rest) =>
    6.35 -          error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
    6.36 -      }
    6.37 -    }
    6.38 -    (toks.toList, ctxt)
    6.39 -  }
    6.40 -
    6.41 -
    6.42    /* command spans */
    6.43  
    6.44    def parse_spans(toks: List[Token]): List[Command_Span.Span] =
    6.45 @@ -249,7 +221,7 @@
    6.46    }
    6.47  
    6.48    def parse_spans(input: CharSequence): List[Command_Span.Span] =
    6.49 -    parse_spans(scan(input))
    6.50 +    parse_spans(Token.explode(keywords, input))
    6.51  
    6.52  
    6.53    /* overall document structure */
     7.1 --- a/src/Pure/Isar/token.ML	Wed Dec 03 11:50:58 2014 +0100
     7.2 +++ b/src/Pure/Isar/token.ML	Wed Dec 03 14:04:38 2014 +0100
     7.3 @@ -89,6 +89,7 @@
     7.4    val source_strict: Keyword.keywords ->
     7.5      Position.T -> (Symbol.symbol, 'a) Source.source -> (T,
     7.6        (Symbol_Pos.T, Position.T * (Symbol.symbol, 'a) Source.source) Source.source) Source.source
     7.7 +  val explode: Keyword.keywords -> Position.T -> string -> T list
     7.8    type 'a parser = T list -> 'a * T list
     7.9    type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list)
    7.10    val read_no_commands: Keyword.keywords -> 'a parser -> Symbol_Pos.T list -> 'a list
    7.11 @@ -538,7 +539,7 @@
    7.12  fun token_range k (pos1, (ss, pos2)) =
    7.13    Token (Symbol_Pos.implode_range pos1 pos2 ss, (k, Symbol_Pos.content ss), Slot);
    7.14  
    7.15 -fun scan keywords = !!! "bad input"
    7.16 +fun scan_token keywords = !!! "bad input"
    7.17    (Symbol_Pos.scan_string_qq err_prefix >> token_range String ||
    7.18      Symbol_Pos.scan_string_bq err_prefix >> token_range Alt_String ||
    7.19      scan_verbatim >> token_range Verbatim ||
    7.20 @@ -571,7 +572,7 @@
    7.21  
    7.22  fun source' strict keywords =
    7.23    let
    7.24 -    val scan_strict = Scan.bulk (scan keywords);
    7.25 +    val scan_strict = Scan.bulk (scan_token keywords);
    7.26      val scan = if strict then scan_strict else Scan.recover scan_strict recover;
    7.27    in Source.source Symbol_Pos.stopper scan end;
    7.28  
    7.29 @@ -581,6 +582,15 @@
    7.30  end;
    7.31  
    7.32  
    7.33 +(* explode *)
    7.34 +
    7.35 +fun explode keywords pos =
    7.36 +  Source.of_string #>
    7.37 +  Symbol.source #>
    7.38 +  source keywords pos #>
    7.39 +  Source.exhaust;
    7.40 +
    7.41 +
    7.42  
    7.43  (** parsers **)
    7.44  
     8.1 --- a/src/Pure/Isar/token.scala	Wed Dec 03 11:50:58 2014 +0100
     8.2 +++ b/src/Pure/Isar/token.scala	Wed Dec 03 14:04:38 2014 +0100
     8.3 @@ -7,6 +7,10 @@
     8.4  package isabelle
     8.5  
     8.6  
     8.7 +import scala.collection.mutable
     8.8 +import scala.util.parsing.input
     8.9 +
    8.10 +
    8.11  object Token
    8.12  {
    8.13    /* tokens */
    8.14 @@ -121,6 +125,34 @@
    8.15    }
    8.16  
    8.17  
    8.18 +  /* explode */
    8.19 +
    8.20 +  def explode(keywords: Keyword.Keywords, inp: CharSequence): List[Token] =
    8.21 +  {
    8.22 +    val in: input.Reader[Char] = new input.CharSequenceReader(inp)
    8.23 +    Parsers.parseAll(Parsers.rep(Parsers.token(keywords)), in) match {
    8.24 +      case Parsers.Success(tokens, _) => tokens
    8.25 +      case _ => error("Unexpected failure of tokenizing input:\n" + inp.toString)
    8.26 +    }
    8.27 +  }
    8.28 +
    8.29 +  def explode_line(keywords: Keyword.Keywords, inp: CharSequence, context: Scan.Line_Context)
    8.30 +    : (List[Token], Scan.Line_Context) =
    8.31 +  {
    8.32 +    var in: input.Reader[Char] = new input.CharSequenceReader(inp)
    8.33 +    val toks = new mutable.ListBuffer[Token]
    8.34 +    var ctxt = context
    8.35 +    while (!in.atEnd) {
    8.36 +      Parsers.parse(Parsers.token_line(keywords, ctxt), in) match {
    8.37 +        case Parsers.Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
    8.38 +        case Parsers.NoSuccess(_, rest) =>
    8.39 +          error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
    8.40 +      }
    8.41 +    }
    8.42 +    (toks.toList, ctxt)
    8.43 +  }
    8.44 +
    8.45 +
    8.46    /* token reader */
    8.47  
    8.48    object Pos
     9.1 --- a/src/Pure/PIDE/document.ML	Wed Dec 03 11:50:58 2014 +0100
     9.2 +++ b/src/Pure/PIDE/document.ML	Wed Dec 03 14:04:38 2014 +0100
     9.3 @@ -333,7 +333,7 @@
     9.4        val span =
     9.5          Lazy.lazy (fn () =>
     9.6            Position.setmp_thread_data (Position.id_only id)
     9.7 -            (fn () => Outer_Syntax.scan (get_keywords ()) (Position.id id) text) ());
     9.8 +            (fn () => Token.explode (get_keywords ()) (Position.id id) text) ());
     9.9        val _ =
    9.10          Position.setmp_thread_data (Position.id_only id)
    9.11            (fn () => Output.status (Markup.markup_only Markup.accepted)) ();
    10.1 --- a/src/Pure/PIDE/resources.ML	Wed Dec 03 11:50:58 2014 +0100
    10.2 +++ b/src/Pure/PIDE/resources.ML	Wed Dec 03 14:04:38 2014 +0100
    10.3 @@ -151,7 +151,7 @@
    10.4        fold (curry Keyword.merge_keywords o Thy_Header.get_keywords) parents
    10.5          (Keyword.add_keywords (#keywords header) Keyword.empty_keywords);
    10.6  
    10.7 -    val toks = Outer_Syntax.scan keywords text_pos text;
    10.8 +    val toks = Token.explode keywords text_pos text;
    10.9      val spans = Outer_Syntax.parse_spans toks;
   10.10      val elements = Thy_Syntax.parse_elements keywords spans;
   10.11  
    11.1 --- a/src/Pure/System/options.scala	Wed Dec 03 11:50:58 2014 +0100
    11.2 +++ b/src/Pure/System/options.scala	Wed Dec 03 14:04:38 2014 +0100
    11.3 @@ -108,7 +108,7 @@
    11.4      def parse_file(syntax: Outer_Syntax, parser: Parser[Options => Options],
    11.5        options: Options, file: Path): Options =
    11.6      {
    11.7 -      val toks = syntax.scan(File.read(file))
    11.8 +      val toks = Token.explode(syntax.keywords, File.read(file))
    11.9        val ops =
   11.10          parse_all(rep(parser), Token.reader(toks, file.implode)) match {
   11.11            case Success(result, _) => result
    12.1 --- a/src/Pure/Tools/build.scala	Wed Dec 03 11:50:58 2014 +0100
    12.2 +++ b/src/Pure/Tools/build.scala	Wed Dec 03 14:04:38 2014 +0100
    12.3 @@ -259,7 +259,7 @@
    12.4  
    12.5      def parse_entries(root: Path): List[(String, Session_Entry)] =
    12.6      {
    12.7 -      val toks = root_syntax.scan(File.read(root))
    12.8 +      val toks = Token.explode(root_syntax.keywords, File.read(root))
    12.9  
   12.10        parse_all(rep(chapter | session_entry), Token.reader(toks, root.implode)) match {
   12.11          case Success(result, _) =>
    13.1 --- a/src/Pure/Tools/find_consts.ML	Wed Dec 03 11:50:58 2014 +0100
    13.2 +++ b/src/Pure/Tools/find_consts.ML	Wed Dec 03 14:04:38 2014 +0100
    13.3 @@ -145,7 +145,7 @@
    13.4  in
    13.5  
    13.6  fun read_query pos str =
    13.7 -  Outer_Syntax.scan query_keywords pos str
    13.8 +  Token.explode query_keywords pos str
    13.9    |> filter Token.is_proper
   13.10    |> Scan.error (Scan.finite Token.stopper (Parse.!!! (query --| Scan.ahead Parse.eof)))
   13.11    |> #1;
    14.1 --- a/src/Pure/Tools/find_theorems.ML	Wed Dec 03 11:50:58 2014 +0100
    14.2 +++ b/src/Pure/Tools/find_theorems.ML	Wed Dec 03 14:04:38 2014 +0100
    14.3 @@ -531,7 +531,7 @@
    14.4  in
    14.5  
    14.6  fun read_query pos str =
    14.7 -  Outer_Syntax.scan query_keywords pos str
    14.8 +  Token.explode query_keywords pos str
    14.9    |> filter Token.is_proper
   14.10    |> Scan.error (Scan.finite Token.stopper (Parse.!!! (query --| Scan.ahead Parse.eof)))
   14.11    |> #1;
    15.1 --- a/src/Pure/Tools/update_cartouches.scala	Wed Dec 03 11:50:58 2014 +0100
    15.2 +++ b/src/Pure/Tools/update_cartouches.scala	Wed Dec 03 14:04:38 2014 +0100
    15.3 @@ -20,7 +20,7 @@
    15.4    {
    15.5      val text0 = File.read(path)
    15.6      val text1 =
    15.7 -      (for (tok <- Outer_Syntax.empty.scan(text0).iterator)
    15.8 +      (for (tok <- Token.explode(Keyword.Keywords.empty, text0).iterator)
    15.9          yield {
   15.10            if (tok.kind == Token.Kind.ALT_STRING) cartouche(tok.content)
   15.11            else if (tok.kind == Token.Kind.VERBATIM) {
    16.1 --- a/src/Pure/Tools/update_header.scala	Wed Dec 03 11:50:58 2014 +0100
    16.2 +++ b/src/Pure/Tools/update_header.scala	Wed Dec 03 14:04:38 2014 +0100
    16.3 @@ -13,7 +13,7 @@
    16.4    {
    16.5      val text0 = File.read(path)
    16.6      val text1 =
    16.7 -      (for (tok <- Outer_Syntax.empty.scan(text0).iterator)
    16.8 +      (for (tok <- Token.explode(Keyword.Keywords.empty, text0).iterator)
    16.9          yield { if (tok.source == "header") section else tok.source }).mkString
   16.10  
   16.11      if (text0 != text1) {
    17.1 --- a/src/Pure/Tools/update_semicolons.scala	Wed Dec 03 11:50:58 2014 +0100
    17.2 +++ b/src/Pure/Tools/update_semicolons.scala	Wed Dec 03 14:04:38 2014 +0100
    17.3 @@ -13,7 +13,7 @@
    17.4    {
    17.5      val text0 = File.read(path)
    17.6      val text1 =
    17.7 -      (for (tok <- Outer_Syntax.empty.scan(text0).iterator if tok.source != ";")
    17.8 +      (for (tok <- Token.explode(Keyword.Keywords.empty, text0).iterator if tok.source != ";")
    17.9          yield tok.source).mkString
   17.10  
   17.11      if (text0 != text1) {
    18.1 --- a/src/Tools/Code/code_target.ML	Wed Dec 03 11:50:58 2014 +0100
    18.2 +++ b/src/Tools/Code/code_target.ML	Wed Dec 03 14:04:38 2014 +0100
    18.3 @@ -696,7 +696,7 @@
    18.4      val thy = Thy_Info.get_theory thyname;
    18.5      val ctxt = Proof_Context.init_global thy;
    18.6      val parse = Scan.read Token.stopper (Parse.!!! code_exprP) o
    18.7 -      (filter Token.is_proper o Outer_Syntax.scan (Thy_Header.get_keywords thy) Position.none);
    18.8 +      (filter Token.is_proper o Token.explode (Thy_Header.get_keywords thy) Position.none);
    18.9    in case parse cmd_expr
   18.10     of SOME f => (writeln "Now generating code..."; f ctxt)
   18.11      | NONE => error ("Bad directive " ^ quote cmd_expr)
    19.1 --- a/src/Tools/jEdit/src/token_markup.scala	Wed Dec 03 11:50:58 2014 +0100
    19.2 +++ b/src/Tools/jEdit/src/token_markup.scala	Wed Dec 03 14:04:38 2014 +0100
    19.3 @@ -218,7 +218,7 @@
    19.4      for {
    19.5        ctxt <- line_context.context
    19.6        text <- JEdit_Lib.try_get_text(buffer, JEdit_Lib.line_range(buffer, line))
    19.7 -    } yield syntax.scan_line(text, ctxt)._1
    19.8 +    } yield Token.explode_line(syntax.keywords, text, ctxt)._1
    19.9    }
   19.10  
   19.11    def line_token_iterator(
   19.12 @@ -346,7 +346,7 @@
   19.13                (styled_tokens, new Line_Context(Some(ctxt1), structure))
   19.14  
   19.15              case (Some(ctxt), Some(syntax)) if syntax.has_tokens =>
   19.16 -              val (tokens, ctxt1) = syntax.scan_line(line, ctxt)
   19.17 +              val (tokens, ctxt1) = Token.explode_line(syntax.keywords, line, ctxt)
   19.18                val structure1 = syntax.line_structure(tokens, structure)
   19.19                val styled_tokens =
   19.20                  tokens.map(tok => (Rendering.token_markup(syntax, tok), tok.source))