# HG changeset patch # User wenzelm # Date 1310131108 -7200 # Node ID 8a61f2441b62e452f249be428e55cc38fe181c21 # Parent 4068e95f1e43ad9d2dab974fcad8b628518cc11c# Parent c37a1f29bbc07aff7adbf9dfa88599fa1d925e1f merged diff -r 4068e95f1e43 -r 8a61f2441b62 src/HOL/Boogie/Tools/boogie_commands.ML --- a/src/HOL/Boogie/Tools/boogie_commands.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/HOL/Boogie/Tools/boogie_commands.ML Fri Jul 08 15:18:28 2011 +0200 @@ -22,15 +22,14 @@ "expected file ending " ^ quote ext) val base_path = Path.explode base_name |> tap check_ext - val full_path = Thy_Load.check_file (Thy_Load.master_directory thy) base_path + val (text, thy') = Thy_Load.use_file base_path thy - val _ = Boogie_VCs.is_closed thy orelse + val _ = Boogie_VCs.is_closed thy' orelse error ("Found the beginning of a new Boogie environment, " ^ "but another Boogie environment is still open.") in - thy - |> Boogie_Loader.load_b2i (not quiet) offsets full_path - |> Thy_Load.provide_file base_path + thy' + |> Boogie_Loader.parse_b2i (not quiet) offsets text end diff -r 4068e95f1e43 -r 8a61f2441b62 src/HOL/Boogie/Tools/boogie_loader.ML --- a/src/HOL/Boogie/Tools/boogie_loader.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/HOL/Boogie/Tools/boogie_loader.ML Fri Jul 08 15:18:28 2011 +0200 @@ -7,6 +7,7 @@ signature BOOGIE_LOADER = sig val load_b2i: bool -> (string * int) list -> Path.T -> theory -> theory + val parse_b2i: bool -> (string * int) list -> string -> theory -> theory end structure Boogie_Loader: BOOGIE_LOADER = @@ -321,12 +322,12 @@ datatype token = Token of string | Newline | EOF -fun tokenize path = +fun tokenize fold_lines input = let fun blank c = (c = #" " orelse c = #"\t" orelse c = #"\n" orelse c = #"\r") fun split line (i, tss) = (i + 1, map (pair i) (map Token (String.tokens blank line) @ [Newline]) :: tss) - in apsnd (flat o rev) (File.fold_lines split path (1, [])) end + in apsnd (flat o rev) (fold_lines split input (1, [])) end fun stopper i = Scan.stopper (K (i, EOF)) (fn (_, EOF) => true | _ => false) @@ -336,8 +337,8 @@ fun scan_fail msg = Scan.fail_with (scan_err msg) -fun finite scan path = - let val (i, ts) = tokenize path +fun finite scan fold_lines input = + let val (i, ts) = tokenize fold_lines input in (case Scan.error (Scan.finite (stopper i) scan) ts of (x, []) => x @@ -591,6 +592,9 @@ var_decls tds fds |-- vcs verbose offsets tds fds))) -fun load_b2i verbose offsets path thy = finite (parse verbose offsets thy) path +fun load_b2i verbose offsets path thy = finite (parse verbose offsets thy) File.fold_lines path + +fun parse_b2i verbose offsets text thy = + finite (parse verbose offsets thy) (fn f => fold f o String.tokens (fn c => c = #"\n")) text end diff -r 4068e95f1e43 -r 8a61f2441b62 src/HOL/Quickcheck_Narrowing.thy --- a/src/HOL/Quickcheck_Narrowing.thy Fri Jul 08 12:18:46 2011 +0200 +++ b/src/HOL/Quickcheck_Narrowing.thy Fri Jul 08 15:18:28 2011 +0200 @@ -5,9 +5,9 @@ theory Quickcheck_Narrowing imports Quickcheck_Exhaustive uses - ("~~/src/HOL/Tools/Quickcheck/PNF_Narrowing_Engine.hs") - ("~~/src/HOL/Tools/Quickcheck/Narrowing_Engine.hs") - ("~~/src/HOL/Tools/Quickcheck/narrowing_generators.ML") + ("Tools/Quickcheck/PNF_Narrowing_Engine.hs") + ("Tools/Quickcheck/Narrowing_Engine.hs") + ("Tools/Quickcheck/narrowing_generators.ML") begin subsection {* Counterexample generator *} @@ -352,9 +352,7 @@ subsubsection {* Setting up the counterexample generator *} -setup {* Thy_Load.provide_file (Path.explode ("~~/src/HOL/Tools/Quickcheck/PNF_Narrowing_Engine.hs")) *} -setup {* Thy_Load.provide_file (Path.explode ("~~/src/HOL/Tools/Quickcheck/Narrowing_Engine.hs")) *} -use "~~/src/HOL/Tools/Quickcheck/narrowing_generators.ML" +use "Tools/Quickcheck/narrowing_generators.ML" setup {* Narrowing_Generators.setup *} diff -r 4068e95f1e43 -r 8a61f2441b62 src/HOL/SPARK/Tools/spark_commands.ML --- a/src/HOL/SPARK/Tools/spark_commands.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/HOL/SPARK/Tools/spark_commands.ML Fri Jul 08 15:18:28 2011 +0200 @@ -13,9 +13,10 @@ structure SPARK_Commands: SPARK_COMMANDS = struct +(* FIXME proper Thy_Load.use_file, also for fdl_path and rls_path (!?) *) fun spark_open (vc_name, prfx) thy = let - val vc_path = Thy_Load.check_file (Thy_Load.master_directory thy) (Path.explode vc_name); + val ((vc_path, vc_id), vc_text) = Thy_Load.load_file thy (Path.explode vc_name); val (base, header) = (case Path.split_ext vc_path of (base, "vcg") => (base, Fdl_Lexer.vcg_header >> K ()) @@ -27,7 +28,7 @@ SPARK_VCs.set_vcs (snd (Fdl_Parser.parse_declarations (Path.position fdl_path) (File.read fdl_path))) (Fdl_Parser.parse_rules (Path.position rls_path) (File.read rls_path)) - (snd (Fdl_Parser.parse_vcs header (Path.position vc_path) (File.read vc_path))) + (snd (Fdl_Parser.parse_vcs header (Path.position vc_path) vc_text)) base prfx thy end; diff -r 4068e95f1e43 -r 8a61f2441b62 src/HOL/SPARK/Tools/spark_vcs.ML --- a/src/HOL/SPARK/Tools/spark_vcs.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/HOL/SPARK/Tools/spark_vcs.ML Fri Jul 08 15:18:28 2011 +0200 @@ -886,7 +886,7 @@ (proved, []) => (Thm.join_proofs (maps (the o #2 o snd) proved); File.write (Path.ext "prv" path) - (concat (map (fn (s, _) => snd (strip_number s) ^ + (implode (map (fn (s, _) => snd (strip_number s) ^ " -- proved by " ^ Distribution.version ^ "\n") proved)); {pfuns = pfuns, type_map = type_map, env = NONE}) | (_, unproved) => err_vcs (map fst unproved)) diff -r 4068e95f1e43 -r 8a61f2441b62 src/HOL/Tools/Quickcheck/narrowing_generators.ML --- a/src/HOL/Tools/Quickcheck/narrowing_generators.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/HOL/Tools/Quickcheck/narrowing_generators.ML Fri Jul 08 15:18:28 2011 +0200 @@ -198,8 +198,13 @@ (** invocation of Haskell interpreter **) -val narrowing_engine = File.read (Path.explode "~~/src/HOL/Tools/Quickcheck/Narrowing_Engine.hs") -val pnf_narrowing_engine = File.read (Path.explode "~~/src/HOL/Tools/Quickcheck/PNF_Narrowing_Engine.hs") +val narrowing_engine = + Context.>>> (Context.map_theory_result + (Thy_Load.use_file (Path.explode "Tools/Quickcheck/Narrowing_Engine.hs"))) + +val pnf_narrowing_engine = + Context.>>> (Context.map_theory_result + (Thy_Load.use_file (Path.explode "Tools/Quickcheck/PNF_Narrowing_Engine.hs"))) fun exec verbose code = ML_Context.exec (fn () => Secure.use_text ML_Env.local_context (0, "generated code") verbose code) diff -r 4068e95f1e43 -r 8a61f2441b62 src/HOL/Tools/sat_solver.ML --- a/src/HOL/Tools/sat_solver.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/HOL/Tools/sat_solver.ML Fri Jul 08 15:18:28 2011 +0200 @@ -797,6 +797,7 @@ SatSolver.UNSATISFIABLE NONE => (let (* string list *) + (* FIXME File.tmp_path (!?) *) val proof_lines = ((split_lines o File.read) (Path.explode "resolve_trace")) handle IO.Io _ => raise INVALID_PROOF "Could not read file \"resolve_trace\"" fun cnf_number_of_clauses (Prop_Logic.And (fm1, fm2)) = diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/General/path.scala --- a/src/Pure/General/path.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/General/path.scala Fri Jul 08 15:18:28 2011 +0200 @@ -8,6 +8,9 @@ package isabelle +import scala.util.matching.Regex + + object Path { /* path elements */ @@ -139,6 +142,17 @@ prfx + Path.basic(s + "." + e) } + private val Ext = new Regex("(.*)\\.([^.]*)") + + def split_ext: (Path, String) = + { + val (prefix, base) = split_path + base match { + case Ext(b, e) => (prefix + Path.basic(b), e) + case _ => (Path.basic(base), "") + } + } + /* expand */ diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/General/scan.scala --- a/src/Pure/General/scan.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/General/scan.scala Fri Jul 08 15:18:28 2011 +0200 @@ -161,7 +161,7 @@ /* symbol range */ - def symbol_range(pred: String => Boolean, min_count: Int, max_count: Int): Parser[String] = + def symbol_range(pred: Symbol.Symbol => Boolean, min_count: Int, max_count: Int): Parser[String] = new Parser[String] { def apply(in: Input) = @@ -187,25 +187,30 @@ } }.named("symbol_range") - def one(pred: String => Boolean): Parser[String] = symbol_range(pred, 1, 1) - def many(pred: String => Boolean): Parser[String] = symbol_range(pred, 0, Integer.MAX_VALUE) - def many1(pred: String => Boolean): Parser[String] = symbol_range(pred, 1, Integer.MAX_VALUE) + def one(pred: Symbol.Symbol => Boolean): Parser[String] = + symbol_range(pred, 1, 1) + + def many(pred: Symbol.Symbol => Boolean): Parser[String] = + symbol_range(pred, 0, Integer.MAX_VALUE) + + def many1(pred: Symbol.Symbol => Boolean): Parser[String] = + symbol_range(pred, 1, Integer.MAX_VALUE) /* quoted strings */ - private def quoted_body(quote: String): Parser[String] = + private def quoted_body(quote: Symbol.Symbol): Parser[String] = { rep(many1(sym => sym != quote && sym != "\\") | "\\" + quote | "\\\\" | (("""\\\d\d\d""".r) ^? { case x if x.substring(1, 4).toInt <= 255 => x })) ^^ (_.mkString) } - private def quoted(quote: String): Parser[String] = + private def quoted(quote: Symbol.Symbol): Parser[String] = { quote ~ quoted_body(quote) ~ quote ^^ { case x ~ y ~ z => x + y + z } }.named("quoted") - def quoted_content(quote: String, source: String): String = + def quoted_content(quote: Symbol.Symbol, source: String): String = { require(parseAll(quoted(quote), source).successful) val body = source.substring(1, source.length - 1) @@ -218,7 +223,7 @@ else body } - def quoted_context(quote: String, ctxt: Context): Parser[(String, Context)] = + def quoted_context(quote: Symbol.Symbol, ctxt: Context): Parser[(String, Context)] = { ctxt match { case Finished => @@ -335,11 +340,11 @@ string | (alt_string | (verb | cmt)) } - private def other_token(symbols: Symbol.Interpretation, is_command: String => Boolean) + private def other_token(is_command: String => Boolean) : Parser[Token] = { - val id = one(symbols.is_letter) ~ many(symbols.is_letdig) ^^ { case x ~ y => x + y } - val nat = many1(symbols.is_digit) + val id = one(Symbol.is_letter) ~ many(Symbol.is_letdig) ^^ { case x ~ y => x + y } + val nat = many1(Symbol.is_digit) val natdot = nat ~ "." ~ nat ^^ { case x ~ y ~ z => x + y + z } val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x } @@ -355,14 +360,14 @@ ("-" ~ natdot ^^ { case x ~ y => x + y } | natdot) ^^ (x => Token(Token.Kind.FLOAT, x)) val sym_ident = - (many1(symbols.is_symbolic_char) | one(sym => symbols.is_symbolic(sym))) ^^ + (many1(Symbol.is_symbolic_char) | one(sym => Symbol.is_symbolic(sym))) ^^ (x => Token(Token.Kind.SYM_IDENT, x)) - val space = many1(symbols.is_blank) ^^ (x => Token(Token.Kind.SPACE, x)) + val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x)) // FIXME check - val junk = many(sym => !(symbols.is_blank(sym))) - val junk1 = many1(sym => !(symbols.is_blank(sym))) + val junk = many(sym => !(Symbol.is_blank(sym))) + val junk1 = many1(sym => !(Symbol.is_blank(sym))) val bad_delimiter = ("\"" | "`" | "{*" | "(*") ~ junk ^^ { case x ~ y => Token(Token.Kind.UNPARSED, x + y) } @@ -376,11 +381,10 @@ command_keyword) | bad)) } - def token(symbols: Symbol.Interpretation, is_command: String => Boolean): Parser[Token] = - delimited_token | other_token(symbols, is_command) + def token(is_command: String => Boolean): Parser[Token] = + delimited_token | other_token(is_command) - def token_context(symbols: Symbol.Interpretation, is_command: String => Boolean, ctxt: Context) - : Parser[(Token, Context)] = + def token_context(is_command: String => Boolean, ctxt: Context): Parser[(Token, Context)] = { val string = quoted_context("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) } @@ -388,7 +392,7 @@ quoted_context("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) } val verb = verbatim_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) } val cmt = comment_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) } - val other = other_token(symbols, is_command) ^^ { case x => (x, Finished) } + val other = other_token(is_command) ^^ { case x => (x, Finished) } string | (alt_string | (verb | (cmt | other))) } diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/General/symbol.scala --- a/src/Pure/General/symbol.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/General/symbol.scala Fri Jul 08 15:18:28 2011 +0200 @@ -13,6 +13,9 @@ object Symbol { + type Symbol = String + + /* spaces */ val spc = ' ' @@ -64,10 +67,10 @@ def is_plain(c: Char): Boolean = !(c == '\r' || c == '\\' || '\ud800' <= c && c <= '\udfff') - def is_physical_newline(s: String): Boolean = + def is_physical_newline(s: Symbol): Boolean = s == "\n" || s == "\r" || s == "\r\n" - def is_malformed(s: String): Boolean = + def is_malformed(s: Symbol): Boolean = !(s.length == 1 && is_plain(s(0))) && malformed_symbol.pattern.matcher(s).matches class Matcher(text: CharSequence) @@ -85,13 +88,13 @@ } - /* efficient iterators */ + /* iterator */ - private val char_symbols: Array[String] = + private val char_symbols: Array[Symbol] = (0 until 256).iterator.map(i => new String(Array(i.toChar))).toArray - def iterator(text: CharSequence): Iterator[String] = - new Iterator[String] + def iterator(text: CharSequence): Iterator[Symbol] = + new Iterator[Symbol] { private val matcher = new Matcher(text) private var i = 0 @@ -203,16 +206,20 @@ - /** Symbol interpretation **/ + /** symbol interpretation **/ - class Interpretation(symbol_decls: List[String]) + private lazy val symbols = + new Interpretation( + Isabelle_System.try_read(Path.split(Isabelle_System.getenv_strict("ISABELLE_SYMBOLS")))) + + private class Interpretation(symbols_spec: String) { /* read symbols */ private val empty = new Regex("""(?xs) ^\s* (?: \#.* )? $ """) private val key = new Regex("""(?xs) (.+): """) - private def read_decl(decl: String): (String, Map[String, String]) = + private def read_decl(decl: String): (Symbol, Map[String, String]) = { def err() = error("Bad symbol declaration: " + decl) @@ -231,21 +238,21 @@ } } - private val symbols: List[(String, Map[String, String])] = + private val symbols: List[(Symbol, Map[String, String])] = Map(( - for (decl <- symbol_decls if !empty.pattern.matcher(decl).matches) + for (decl <- split_lines(symbols_spec) if !empty.pattern.matcher(decl).matches) yield read_decl(decl)): _*) toList /* misc properties */ - val names: Map[String, String] = + val names: Map[Symbol, String] = { val name = new Regex("""\\<\^?([A-Za-z][A-Za-z0-9_']*)>""") Map((for ((sym @ name(a), _) <- symbols) yield (sym -> a)): _*) } - val abbrevs: Map[String, String] = + val abbrevs: Map[Symbol, String] = Map(( for ((sym, props) <- symbols if props.isDefinedAt("abbrev")) yield (sym -> props("abbrev"))): _*) @@ -291,7 +298,7 @@ /* user fonts */ - val fonts: Map[String, String] = + val fonts: Map[Symbol, String] = recode_map(( for ((sym, props) <- symbols if props.isDefinedAt("font")) yield (sym -> props("font"))): _*) @@ -299,12 +306,10 @@ val font_names: List[String] = Set(fonts.toList.map(_._2): _*).toList val font_index: Map[String, Int] = Map((font_names zip (0 until font_names.length).toList): _*) - def lookup_font(sym: String): Option[Int] = fonts.get(sym).map(font_index(_)) - /* classification */ - private val letters = recode_set( + val letters = recode_set( "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", @@ -339,37 +344,20 @@ "\\<^isub>", "\\<^isup>") - private val blanks = + val blanks = recode_set(space, "\t", "\n", "\u000B", "\f", "\r", "\\", "\\<^newline>") - private val sym_chars = + val sym_chars = Set("!", "#", "$", "%", "&", "*", "+", "-", "/", "<", "=", ">", "?", "@", "^", "_", "|", "~") - def is_letter(sym: String): Boolean = letters.contains(sym) - def is_digit(sym: String): Boolean = sym.length == 1 && '0' <= sym(0) && sym(0) <= '9' - def is_quasi(sym: String): Boolean = sym == "_" || sym == "'" - def is_letdig(sym: String): Boolean = is_letter(sym) || is_digit(sym) || is_quasi(sym) - def is_blank(sym: String): Boolean = blanks.contains(sym) - def is_symbolic_char(sym: String): Boolean = sym_chars.contains(sym) - def is_symbolic(sym: String): Boolean = - sym.startsWith("\\<") && sym.endsWith(">") && !sym.startsWith("\\<^") - /* control symbols */ - private val ctrl_decoded: Set[String] = + val ctrl_decoded: Set[Symbol] = Set((for ((sym, _) <- symbols if sym.startsWith("\\<^")) yield decode(sym)): _*) - def is_ctrl(sym: String): Boolean = - sym.startsWith("\\<^") || ctrl_decoded.contains(sym) - - def is_controllable(sym: String): Boolean = - !is_blank(sym) && !is_ctrl(sym) && !is_malformed(sym) - - private val subscript_decoded = Set(decode("\\<^sub>"), decode("\\<^isub>")) - private val superscript_decoded = Set(decode("\\<^sup>"), decode("\\<^isup>")) - def is_subscript_decoded(sym: String): Boolean = subscript_decoded.contains(sym) - def is_superscript_decoded(sym: String): Boolean = superscript_decoded.contains(sym) + val subscript_decoded = Set(decode("\\<^sub>"), decode("\\<^isub>")) + val superscript_decoded = Set(decode("\\<^sup>"), decode("\\<^isup>")) val bold_decoded = decode("\\<^bold>") val bsub_decoded = decode("\\<^bsub>") @@ -377,4 +365,47 @@ val bsup_decoded = decode("\\<^bsup>") val esup_decoded = decode("\\<^esup>") } + + + /* tables */ + + def names: Map[Symbol, String] = symbols.names + def abbrevs: Map[Symbol, String] = symbols.abbrevs + + def decode(text: String): String = symbols.decode(text) + def encode(text: String): String = symbols.encode(text) + + def fonts: Map[Symbol, String] = symbols.fonts + def font_names: List[String] = symbols.font_names + def font_index: Map[String, Int] = symbols.font_index + def lookup_font(sym: Symbol): Option[Int] = symbols.fonts.get(sym).map(font_index(_)) + + + /* classification */ + + def is_letter(sym: Symbol): Boolean = symbols.letters.contains(sym) + def is_digit(sym: Symbol): Boolean = sym.length == 1 && '0' <= sym(0) && sym(0) <= '9' + def is_quasi(sym: Symbol): Boolean = sym == "_" || sym == "'" + def is_letdig(sym: Symbol): Boolean = is_letter(sym) || is_digit(sym) || is_quasi(sym) + def is_blank(sym: Symbol): Boolean = symbols.blanks.contains(sym) + def is_symbolic_char(sym: Symbol): Boolean = symbols.sym_chars.contains(sym) + def is_symbolic(sym: Symbol): Boolean = + sym.startsWith("\\<") && sym.endsWith(">") && !sym.startsWith("\\<^") + + + /* control symbols */ + + def is_ctrl(sym: Symbol): Boolean = + sym.startsWith("\\<^") || symbols.ctrl_decoded.contains(sym) + + def is_controllable(sym: Symbol): Boolean = + !is_blank(sym) && !is_ctrl(sym) && !is_malformed(sym) + + def is_subscript_decoded(sym: Symbol): Boolean = symbols.subscript_decoded.contains(sym) + def is_superscript_decoded(sym: Symbol): Boolean = symbols.superscript_decoded.contains(sym) + def is_bold_decoded(sym: Symbol): Boolean = sym == symbols.bold_decoded + def is_bsub_decoded(sym: Symbol): Boolean = sym == symbols.bsub_decoded + def is_esub_decoded(sym: Symbol): Boolean = sym == symbols.esub_decoded + def is_bsup_decoded(sym: Symbol): Boolean = sym == symbols.bsup_decoded + def is_esup_decoded(sym: Symbol): Boolean = sym == symbols.esup_decoded } diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/General/xml_data.ML --- a/src/Pure/General/xml_data.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/General/xml_data.ML Fri Jul 08 15:18:28 2011 +0200 @@ -28,6 +28,8 @@ val dest_list: (XML.body -> 'a) -> XML.body -> 'a list val make_option: ('a -> XML.body) -> 'a option -> XML.body val dest_option: (XML.body -> 'a) -> XML.body -> 'a option + val make_variant: ('a -> XML.body) list -> 'a -> XML.body + val dest_variant: (XML.body -> 'a) list -> XML.body -> 'a end; structure XML_Data: XML_DATA = @@ -69,6 +71,11 @@ fun dest_node (XML.Elem ((":", []), ts)) = ts | dest_node t = raise XML_BODY [t]; +fun make_tagged (tag, ts) = XML.Elem ((make_int_atom tag, []), ts); + +fun dest_tagged (XML.Elem ((s, []), ts)) = (dest_int_atom s, ts) + | dest_tagged t = raise XML_BODY [t]; + (* representation of standard types *) @@ -115,14 +122,21 @@ fun dest_list dest ts = map (dest o dest_node) ts; -fun make_option make NONE = make_list make [] - | make_option make (SOME x) = make_list make [x]; +fun make_option _ NONE = [] + | make_option make (SOME x) = [make_node (make x)]; + +fun dest_option _ [] = NONE + | dest_option dest [t] = SOME (dest (dest_node t)) + | dest_option _ ts = raise XML_BODY ts; + -fun dest_option dest ts = - (case dest_list dest ts of - [] => NONE - | [x] => SOME x - | _ => raise XML_BODY ts); +fun make_variant makes x = + [make_tagged (the (get_index (fn make => try make x) makes))]; + +fun dest_variant dests [t] = + let val (tag, ts) = dest_tagged t + in nth dests tag ts end + | dest_variant _ ts = raise XML_BODY ts; end; diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/General/xml_data.scala --- a/src/Pure/General/xml_data.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/General/xml_data.scala Fri Jul 08 15:18:28 2011 +0200 @@ -55,6 +55,15 @@ case _ => throw new XML_Body(List(t)) } + private def make_tagged(tag: Int, ts: XML.Body): XML.Tree = + XML.Elem(Markup(make_int_atom(tag), Nil), ts) + + private def dest_tagged(t: XML.Tree): (Int, XML.Body) = + t match { + case XML.Elem(Markup(s, Nil), ts) => (dest_int_atom(s), ts) + case _ => throw new XML_Body(List(t)) + } + /* representation of standard types */ @@ -122,14 +131,29 @@ def make_option[A](make: A => XML.Body)(opt: Option[A]): XML.Body = opt match { - case None => make_list(make)(Nil) - case Some(x) => make_list(make)(List(x)) + case None => Nil + case Some(x) => List(make_node(make(x))) } def dest_option[A](dest: XML.Body => A)(ts: XML.Body): Option[A] = - dest_list(dest)(ts) match { + ts match { case Nil => None - case List(x) => Some(x) + case List(t) => Some(dest(dest_node(t))) + case _ => throw new XML_Body(ts) + } + + + def make_variant[A](makes: List[PartialFunction[A, XML.Body]])(x: A): XML.Body = + { + val (make, tag) = makes.iterator.zipWithIndex.find(p => p._1.isDefinedAt(x)).get + List(make_tagged(tag, make(x))) + } + + def dest_variant[A](dests: List[XML.Body => A])(ts: XML.Body): A = + ts match { + case List(t) => + val (tag, ts) = dest_tagged(t) + dests(tag)(ts) case _ => throw new XML_Body(ts) } } diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/Isar/outer_syntax.scala --- a/src/Pure/Isar/outer_syntax.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/Isar/outer_syntax.scala Fri Jul 08 15:18:28 2011 +0200 @@ -11,11 +11,11 @@ import scala.collection.mutable -class Outer_Syntax(symbols: Symbol.Interpretation) +class Outer_Syntax { protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG)) protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty - lazy val completion: Completion = new Completion + symbols // FIXME odd initialization + lazy val completion: Completion = (new Completion).add_symbols // FIXME odd initialization def keyword_kind(name: String): Option[String] = keywords.get(name) @@ -24,7 +24,7 @@ val new_keywords = keywords + (name -> kind) val new_lexicon = lexicon + name val new_completion = completion + (name, replace) - new Outer_Syntax(symbols) { + new Outer_Syntax { override val lexicon = new_lexicon override val keywords = new_keywords override lazy val completion = new_completion @@ -66,7 +66,7 @@ { import lexicon._ - parseAll(rep(token(symbols, is_command)), input) match { + parseAll(rep(token(is_command)), input) match { case Success(tokens, _) => tokens case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString) } @@ -83,7 +83,7 @@ val toks = new mutable.ListBuffer[Token] var ctxt = context while (!in.atEnd) { - parse(token_context(symbols, is_command, ctxt), in) match { + parse(token_context(is_command, ctxt), in) match { case Success((x, c), rest) => { toks += x; ctxt = c; in = rest } case NoSuccess(_, rest) => error("Unexpected failure of tokenizing input:\n" + rest.source.toString) diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/ML-Systems/polyml_common.ML --- a/src/Pure/ML-Systems/polyml_common.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/ML-Systems/polyml_common.ML Fri Jul 08 15:18:28 2011 +0200 @@ -30,6 +30,7 @@ val _ = PolyML.Compiler.forgetValue "foldr"; val _ = PolyML.Compiler.forgetValue "print"; val _ = PolyML.Compiler.forgetValue "explode"; +val _ = PolyML.Compiler.forgetValue "concat"; (* Compiler options *) diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/ML/ml_context.ML --- a/src/Pure/ML/ml_context.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/ML/ml_context.ML Fri Jul 08 15:18:28 2011 +0200 @@ -33,7 +33,6 @@ Context.generic option -> (ML_Lex.token list * ML_Lex.token list) * Context.generic option val eval: bool -> Position.T -> ML_Lex.token Antiquote.antiquote list -> unit val eval_text: bool -> Position.T -> Symbol_Pos.text -> unit - val eval_file: Path.T -> unit val eval_in: Proof.context option -> bool -> Position.T -> ML_Lex.token Antiquote.antiquote list -> unit val eval_text_in: Proof.context option -> bool -> Position.T -> Symbol_Pos.text -> unit @@ -207,7 +206,6 @@ (* derived versions *) fun eval_text verbose pos txt = eval verbose pos (ML_Lex.read pos txt); -fun eval_file path = eval_text true (Path.position path) (File.read path); fun eval_in ctxt verbose pos ants = Context.setmp_thread_data (Option.map Context.Proof ctxt) (fn () => eval verbose pos ants) (); diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/PIDE/document.scala --- a/src/Pure/PIDE/document.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/PIDE/document.scala Fri Jul 08 15:18:28 2011 +0200 @@ -46,7 +46,10 @@ object Node { - val empty: Node = new Node(Linear_Set()) + class Header(val master_dir: Path, val thy_header: Exn.Result[Thy_Header.Header]) + val empty_header = new Header(Path.current, Exn.Exn(ERROR("Bad theory header"))) + + val empty: Node = new Node(empty_header, Linear_Set()) def command_starts(commands: Iterator[Command], offset: Text.Offset = 0) : Iterator[(Command, Text.Offset)] = @@ -62,8 +65,15 @@ private val block_size = 1024 - class Node(val commands: Linear_Set[Command]) + class Node(val header: Node.Header, val commands: Linear_Set[Command]) { + /* header */ + + def set_header(header: Node.Header): Node = new Node(header, commands) + + + /* commands */ + private lazy val full_index: (Array[(Command, Text.Offset)], Text.Range) = { val blocks = new mutable.ListBuffer[(Command, Text.Offset)] diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/System/isabelle_process.scala --- a/src/Pure/System/isabelle_process.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/System/isabelle_process.scala Fri Jul 08 15:18:28 2011 +0200 @@ -92,7 +92,7 @@ private def put_result(kind: String, text: String) { - put_result(kind, Nil, List(XML.Text(Isabelle_System.symbols.decode(text)))) + put_result(kind, Nil, List(XML.Text(Symbol.decode(text)))) } @@ -341,7 +341,7 @@ if (i != n) throw new Protocol_Error("bad message chunk content") - YXML.parse_body_failsafe(YXML.decode_chars(Isabelle_System.symbols.decode, buf, 0, n)) + YXML.parse_body_failsafe(YXML.decode_chars(Symbol.decode, buf, 0, n)) //}}} } diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/System/isabelle_system.scala --- a/src/Pure/System/isabelle_system.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/System/isabelle_system.scala Fri Jul 08 15:18:28 2011 +0200 @@ -1,8 +1,8 @@ /* Title: Pure/System/isabelle_system.scala Author: Makarius -Fundamental Isabelle system environment: settings, symbols etc. -Quasi-static module with optional init operation. +Fundamental Isabelle system environment: quasi-static module with +optional init operation. */ package isabelle @@ -24,10 +24,7 @@ { /** implicit state **/ - private case class State( - standard_system: Standard_System, - settings: Map[String, String], - symbols: Symbol.Interpretation) + private case class State(standard_system: Standard_System, settings: Map[String, String]) @volatile private var _state: Option[State] = None @@ -39,7 +36,6 @@ def standard_system: Standard_System = check_state().standard_system def settings: Map[String, String] = check_state().settings - def symbols: Symbol.Interpretation = check_state().symbols /* isabelle_home precedence: @@ -51,8 +47,9 @@ if (_state.isEmpty) { import scala.collection.JavaConversions._ + System.err.println("### Isabelle system initialization") + val standard_system = new Standard_System - val settings = { val env = Map(System.getenv.toList: _*) + @@ -89,17 +86,7 @@ ("PATH" -> System.getenv("PATH")) } } - - val symbols = - { - val isabelle_symbols = settings.getOrElse("ISABELLE_SYMBOLS", "") - if (isabelle_symbols == "") error("Undefined environment variable: ISABELLE_SYMBOLS") - val files = - Path.split(isabelle_symbols).map(p => new File(standard_system.jvm_path(p.implode))) - new Symbol.Interpretation(split_lines(Standard_System.try_read(files))) - } - - _state = Some(State(standard_system, settings, symbols)) + _state = Some(State(standard_system, settings)) } } diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/System/session.scala --- a/src/Pure/System/session.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/System/session.scala Fri Jul 08 15:18:28 2011 +0200 @@ -117,7 +117,7 @@ @volatile var loaded_theories: Set[String] = Set() - @volatile private var syntax = new Outer_Syntax(Isabelle_System.symbols) + @volatile private var syntax = new Outer_Syntax def current_syntax(): Outer_Syntax = syntax @volatile private var reverse_syslog = List[XML.Elem]() @@ -159,10 +159,8 @@ /* actor messages */ private case object Interrupt_Prover - private case class Edit_Node(thy_name: String, - header: Exn.Result[Thy_Header.Header], edits: List[Text.Edit]) - private case class Init_Node(thy_name: String, - header: Exn.Result[Thy_Header.Header], text: String) + private case class Edit_Node(name: String, header: Document.Node.Header, edits: List[Text.Edit]) + private case class Init_Node(name: String, header: Document.Node.Header, text: String) private case class Start(timeout: Time, args: List[String]) var verbose: Boolean = false @@ -202,9 +200,7 @@ case Some(command) => if (global_state.peek().lookup_command(command.id).isEmpty) { global_state.change(_.define_command(command)) - prover.get. - define_command(command.id, - Isabelle_System.symbols.encode(command.source)) + prover.get.define_command(command.id, Symbol.encode(command.source)) } Some(command.id) } @@ -295,15 +291,17 @@ case Interrupt_Prover => prover.map(_.interrupt) - case Edit_Node(thy_name, header, text_edits) if prover.isDefined => - edit_version(List((thy_name, Some(text_edits)))) + case Edit_Node(name, header, text_edits) if prover.isDefined => + val node_name = (header.master_dir + Path.basic(name)).implode // FIXME + edit_version(List((node_name, Some(text_edits)))) reply(()) - case Init_Node(thy_name, header, text) if prover.isDefined => + case Init_Node(name, header, text) if prover.isDefined => // FIXME compare with existing node + val node_name = (header.master_dir + Path.basic(name)).implode // FIXME edit_version(List( - (thy_name, None), - (thy_name, Some(List(Text.Edit.insert(0, text)))))) + (node_name, None), + (node_name, Some(List(Text.Edit.insert(0, text)))))) reply(()) case change: Document.Change if prover.isDefined => @@ -343,14 +341,14 @@ def interrupt() { session_actor ! Interrupt_Prover } - def edit_node(thy_name: String, header: Exn.Result[Thy_Header.Header], edits: List[Text.Edit]) + def edit_node(name: String, header: Document.Node.Header, edits: List[Text.Edit]) { - session_actor !? Edit_Node(thy_name, header, edits) + session_actor !? Edit_Node(name, header, edits) } - def init_node(thy_name: String, header: Exn.Result[Thy_Header.Header], text: String) + def init_node(name: String, header: Document.Node.Header, text: String) { - session_actor !? Init_Node(thy_name, header, text) + session_actor !? Init_Node(name, header, text) } def snapshot(name: String, pending_edits: List[Text.Edit]): Document.Snapshot = diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/System/standard_system.scala --- a/src/Pure/System/standard_system.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/System/standard_system.scala Fri Jul 08 15:18:28 2011 +0200 @@ -96,16 +96,6 @@ def read_file(file: File): String = slurp(new FileInputStream(file)) - def try_read(files: Seq[File]): String = - { - val buf = new StringBuilder - for { - file <- files if file.isFile - c <- (Source.fromFile(file) ++ Iterator.single('\n')) - } buf.append(c) - buf.toString - } - def write_file(file: File, text: CharSequence) { val writer = diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/Thy/completion.scala --- a/src/Pure/Thy/completion.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/Thy/completion.scala Fri Jul 08 15:18:28 2011 +0200 @@ -62,15 +62,15 @@ def + (keyword: String): Completion = this + (keyword, keyword) - def + (symbols: Symbol.Interpretation): Completion = + def add_symbols: Completion = { val words = - (for ((x, _) <- symbols.names) yield (x, x)).toList ::: - (for ((x, y) <- symbols.names) yield (y, x)).toList ::: - (for ((x, y) <- symbols.abbrevs if Completion.is_word(y)) yield (y, x)).toList + (for ((x, _) <- Symbol.names) yield (x, x)).toList ::: + (for ((x, y) <- Symbol.names) yield (y, x)).toList ::: + (for ((x, y) <- Symbol.abbrevs if Completion.is_word(y)) yield (y, x)).toList val abbrs = - (for ((x, y) <- symbols.abbrevs if !Completion.is_word(y)) + (for ((x, y) <- Symbol.abbrevs if !Completion.is_word(y)) yield (y.reverse.toString, (y, x))).toList val old = this diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/Thy/html.scala --- a/src/Pure/Thy/html.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/Thy/html.scala Fri Jul 08 15:18:28 2011 +0200 @@ -57,8 +57,6 @@ def spans(input: XML.Tree, original_data: Boolean = false): XML.Body = { - val symbols = Isabelle_System.symbols - def html_spans(tree: XML.Tree): XML.Body = tree match { case XML.Elem(m @ Markup(name, props), ts) => @@ -85,14 +83,14 @@ val s1 = syms.next def s2() = if (syms.hasNext) syms.next else "" if (s1 == "\n") add(XML.elem(BR)) - else if (s1 == symbols.bsub_decoded) t ++= s1 // FIXME - else if (s1 == symbols.esub_decoded) t ++= s1 // FIXME - else if (s1 == symbols.bsup_decoded) t ++= s1 // FIXME - else if (s1 == symbols.esup_decoded) t ++= s1 // FIXME - else if (symbols.is_subscript_decoded(s1)) { add(hidden(s1)); add(sub(s2())) } - else if (symbols.is_superscript_decoded(s1)) { add(hidden(s1)); add(sup(s2())) } - else if (s1 == symbols.bold_decoded) { add(hidden(s1)); add(bold(s2())) } - else if (symbols.fonts.isDefinedAt(s1)) { add(user_font(symbols.fonts(s1), s1)) } + else if (Symbol.is_bsub_decoded(s1)) t ++= s1 // FIXME + else if (Symbol.is_esub_decoded(s1)) t ++= s1 // FIXME + else if (Symbol.is_bsup_decoded(s1)) t ++= s1 // FIXME + else if (Symbol.is_esup_decoded(s1)) t ++= s1 // FIXME + else if (Symbol.is_subscript_decoded(s1)) { add(hidden(s1)); add(sub(s2())) } + else if (Symbol.is_superscript_decoded(s1)) { add(hidden(s1)); add(sup(s2())) } + else if (Symbol.is_bold_decoded(s1)) { add(hidden(s1)); add(bold(s2())) } + else if (Symbol.fonts.isDefinedAt(s1)) { add(user_font(Symbol.fonts(s1), s1)) } else t ++= s1 } flush() diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/Thy/thy_header.scala --- a/src/Pure/Thy/thy_header.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/Thy/thy_header.scala Fri Jul 08 15:18:28 2011 +0200 @@ -25,12 +25,10 @@ val lexicon = Scan.Lexicon("%", "(", ")", ";", BEGIN, HEADER, IMPORTS, THEORY, USES) - final case class Header(val name: String, val imports: List[String], val uses: List[String]) + sealed case class Header(val name: String, val imports: List[String], val uses: List[String]) { - def decode_permissive_utf8: Header = - Header(Standard_System.decode_permissive_utf8(name), - imports.map(Standard_System.decode_permissive_utf8), - uses.map(Standard_System.decode_permissive_utf8)) + def map(f: String => String): Header = + Header(f(name), imports.map(f), uses.map(f)) } @@ -38,14 +36,10 @@ def thy_path(name: String): Path = Path.basic(name).ext("thy") - private val Thy_Path1 = new Regex("([^/]*)\\.thy") - private val Thy_Path2 = new Regex("(.*)/([^/]*)\\.thy") - - def split_thy_path(path: String): Option[(String, String)] = - path match { - case Thy_Path1(name) => Some(("", name)) - case Thy_Path2(dir, name) => Some((dir, name)) - case _ => None + def split_thy_path(path: Path): (Path, String) = + path.split_ext match { + case (path1, "thy") => (path1.dir, path1.base.implode) + case _ => error("Bad theory file specification: " + path) } @@ -75,7 +69,7 @@ def read(reader: Reader[Char]): Header = { - val token = lexicon.token(Isabelle_System.symbols, _ => false) + val token = lexicon.token(_ => false) val toks = new mutable.ListBuffer[Token] @tailrec def scan_to_begin(in: Reader[Char]) @@ -101,7 +95,7 @@ def read(file: File): Header = { val reader = Scan.byte_reader(file) - try { read(reader).decode_permissive_utf8 } + try { read(reader).map(Standard_System.decode_permissive_utf8) } finally { reader.close } } diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/Thy/thy_load.ML --- a/src/Pure/Thy/thy_load.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/Thy/thy_load.ML Fri Jul 08 15:18:28 2011 +0200 @@ -12,9 +12,10 @@ val check_file: Path.T -> Path.T -> Path.T val check_thy: Path.T -> string -> {master: Path.T * SHA1.digest, text: string, imports: string list, uses: (Path.T * bool) list} + val load_file: theory -> Path.T -> (Path.T * SHA1.digest) * string + val use_file: Path.T -> theory -> string * theory val loaded_files: theory -> Path.T list val all_current: theory -> bool - val provide_file: Path.T -> theory -> theory val use_ml: Path.T -> unit val exec_ml: Path.T -> generic_theory -> generic_theory val begin_theory: Path.T -> string -> string list -> theory list -> (Path.T * bool) list -> theory @@ -71,12 +72,6 @@ fun check_file dir file = File.check_file (File.full_path dir file); -fun digest_file dir file = - let - val full_path = check_file dir file; - val id = SHA1.digest (File.read full_path); - in (full_path, id) end; - fun check_thy dir name = let val master_file = check_file dir (Thy_Header.thy_path name); @@ -88,7 +83,20 @@ in {master = (master_file, SHA1.digest text), text = text, imports = imports, uses = uses} end; -(* loaded files *) +(* load files *) + +fun load_file thy src_path = + let + val full_path = check_file (master_directory thy) src_path; + val text = File.read full_path; + val id = SHA1.digest text; + in ((full_path, id), text) end; + +fun use_file src_path thy = + let + val (path_id, text) = load_file thy src_path; + val thy' = provide (src_path, path_id) thy; + in (text, thy') end; val loaded_files = map (#1 o #2) o #provided o Files.get; @@ -108,11 +116,11 @@ fun all_current thy = let - val {master_dir, provided, ...} = Files.get thy; + val {provided, ...} = Files.get thy; fun current (src_path, (_, id)) = - (case try (digest_file master_dir) src_path of + (case try (load_file thy) src_path of NONE => false - | SOME (_, id') => id = id'); + | SOME ((_, id'), _) => id = id'); in can check_loaded thy andalso forall current provided end; val _ = Context.>> (Context.map_theory (Theory.at_end (fn thy => (check_loaded thy; NONE)))); @@ -120,18 +128,18 @@ (* provide files *) -fun provide_file src_path thy = - provide (src_path, digest_file (master_directory thy) src_path) thy; +fun eval_file path text = ML_Context.eval_text true (Path.position path) text; fun use_ml src_path = if is_none (Context.thread_data ()) then - ML_Context.eval_file (check_file Path.current src_path) + let val path = check_file Path.current src_path + in eval_file path (File.read path) end else let val thy = ML_Context.the_global_context (); - val (path, id) = digest_file (master_directory thy) src_path; - val _ = ML_Context.eval_file path; + val ((path, id), text) = load_file thy src_path; + val _ = eval_file path text; val _ = Context.>> Local_Theory.propagate_ml_env; val provide = provide (src_path, (path, id)); diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/Thy/thy_syntax.scala --- a/src/Pure/Thy/thy_syntax.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/Thy/thy_syntax.scala Fri Jul 08 15:18:28 2011 +0200 @@ -181,7 +181,8 @@ nodes -= name case (name, Some(text_edits)) => - val commands0 = nodes(name).commands + val node = nodes(name) + val commands0 = node.commands val commands1 = edit_text(text_edits, commands0) val commands2 = recover_spans(commands1) // FIXME somewhat slow @@ -193,7 +194,7 @@ inserted_commands.map(cmd => (commands2.prev(cmd), Some(cmd))) doc_edits += (name -> Some(cmd_edits)) - nodes += (name -> new Document.Node(commands2)) + nodes += (name -> new Document.Node(node.header, commands2)) } (doc_edits.toList, new Document.Version(Document.new_id(), nodes)) } diff -r 4068e95f1e43 -r 8a61f2441b62 src/Pure/build-jars --- a/src/Pure/build-jars Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Pure/build-jars Fri Jul 08 15:18:28 2011 +0200 @@ -138,9 +138,7 @@ if [ "$OUTDATED" = true ] then - echo "###" echo "### Building Isabelle/Scala layer ..." - echo "###" [ "${#UPDATED[@]}" -gt 0 ] && { echo "Changed files:" diff -r 4068e95f1e43 -r 8a61f2441b62 src/Tools/WWW_Find/http_util.ML --- a/src/Tools/WWW_Find/http_util.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Tools/WWW_Find/http_util.ML Fri Jul 08 15:18:28 2011 +0200 @@ -17,7 +17,7 @@ val crlf = "\r\n"; -fun make_header_field (name, value) = concat [name, ": ", value, crlf]; +fun make_header_field (name, value) = implode [name, ": ", value, crlf]; fun reply_header (status, content_type, extra_fields) = let @@ -25,9 +25,9 @@ val reason = HttpStatus.to_reason status; val show_content_type = pair "Content-Type" o Mime.show_type; in - concat + implode (map make_header_field - (("Status", concat [code, " ", reason]) + (("Status", implode [code, " ", reason]) :: (the_list o Option.map show_content_type) content_type @ extra_fields) @ [crlf]) @@ -89,7 +89,7 @@ val make_query_string = Symtab.dest #> map join_pairs - #> String.concatWith "&"; + #> space_implode "&"; end; diff -r 4068e95f1e43 -r 8a61f2441b62 src/Tools/WWW_Find/mime.ML --- a/src/Tools/WWW_Find/mime.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Tools/WWW_Find/mime.ML Fri Jul 08 15:18:28 2011 +0200 @@ -37,10 +37,10 @@ #> apsnd (Substring.triml 1) #> pairself (Substring.string o strip); -fun show_param (n, v) = concat ["; ", n, "=", v]; +fun show_param (n, v) = implode ["; ", n, "=", v]; fun show_type (Type {main, sub, params}) = - concat ([main, "/", sub] @ map show_param params); + implode ([main, "/", sub] @ map show_param params); fun parse_type s = (case Substring.fields (Char.contains "/;") (Substring.full s) of diff -r 4068e95f1e43 -r 8a61f2441b62 src/Tools/WWW_Find/scgi_req.ML --- a/src/Tools/WWW_Find/scgi_req.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Tools/WWW_Find/scgi_req.ML Fri Jul 08 15:18:28 2011 +0200 @@ -129,7 +129,7 @@ fun show (n, v) r = ["\t", n, " = \"", to_string v, "\"\n"] @ r; in Symtab.fold show table [] end; in - concat + implode (["path_info: \"", path_info, "\"\n", "path_translated: \"", path_translated, "\"\n", "script_name: \"", script_name, "\"\n", diff -r 4068e95f1e43 -r 8a61f2441b62 src/Tools/WWW_Find/socket_util.ML --- a/src/Tools/WWW_Find/socket_util.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Tools/WWW_Find/socket_util.ML Fri Jul 08 15:18:28 2011 +0200 @@ -41,7 +41,7 @@ val (haddr, port) = INetSock.fromAddr (Socket.Ctl.getSockName sock); val sock_name = - String.concat [ NetHostDB.toString haddr, ":", string_of_int port ]; + implode [ NetHostDB.toString haddr, ":", string_of_int port ]; val rd = BinPrimIO.RD { diff -r 4068e95f1e43 -r 8a61f2441b62 src/Tools/WWW_Find/xhtml.ML --- a/src/Tools/WWW_Find/xhtml.ML Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Tools/WWW_Find/xhtml.ML Fri Jul 08 15:18:28 2011 +0200 @@ -169,7 +169,7 @@ fun append (Tag (nm, atts, inner)) inner' = Tag (nm, atts, inner @ inner') | append _ _ = raise Fail "cannot append to a text element"; -fun show_att (n, v) = concat [" ", n, "=\"", XML.text v, "\""]; +fun show_att (n, v) = implode [" ", n, "=\"", XML.text v, "\""]; fun show_text (Text t) = XML.text t | show_text (RawText t) = t diff -r 4068e95f1e43 -r 8a61f2441b62 src/Tools/jEdit/lib/Tools/jedit --- a/src/Tools/jEdit/lib/Tools/jedit Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Tools/jEdit/lib/Tools/jedit Fri Jul 08 15:18:28 2011 +0200 @@ -202,9 +202,7 @@ if [ "$OUTDATED" = true ] then - echo "###" echo "### Building Isabelle/jEdit ..." - echo "###" [ "${#UPDATED[@]}" -gt 0 ] && { echo "Changed files:" diff -r 4068e95f1e43 -r 8a61f2441b62 src/Tools/jEdit/src/document_model.scala --- a/src/Tools/jEdit/src/document_model.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Tools/jEdit/src/document_model.scala Fri Jul 08 15:18:28 2011 +0200 @@ -45,7 +45,7 @@ } } - def init(session: Session, buffer: Buffer, master_dir: String, thy_name: String): Document_Model = + def init(session: Session, buffer: Buffer, master_dir: Path, thy_name: String): Document_Model = { exit(buffer) val model = new Document_Model(session, buffer, master_dir, thy_name) @@ -57,14 +57,13 @@ class Document_Model(val session: Session, - val buffer: Buffer, val master_dir: String, val thy_name: String) + val buffer: Buffer, val master_dir: Path, val thy_name: String) { /* pending text edits */ - private def capture_header(): Exn.Result[Thy_Header.Header] = - Exn.capture { - Thy_Header.check(thy_name, buffer.getSegment(0, buffer.getLength)) - } + private def node_header(): Document.Node.Header = + new Document.Node.Header(master_dir, + Exn.capture { Thy_Header.check(thy_name, buffer.getSegment(0, buffer.getLength)) }) private object pending_edits // owned by Swing thread { @@ -78,14 +77,14 @@ case Nil => case edits => pending.clear - session.edit_node(master_dir + "/" + thy_name, capture_header(), edits) + session.edit_node(thy_name, node_header(), edits) } } def init() { flush() - session.init_node(master_dir + "/" + thy_name, capture_header(), Isabelle.buffer_text(buffer)) + session.init_node(thy_name, node_header(), Isabelle.buffer_text(buffer)) } private val delay_flush = @@ -105,7 +104,8 @@ def snapshot(): Document.Snapshot = { Swing_Thread.require() - session.snapshot(master_dir + "/" + thy_name, pending_edits.snapshot()) + val node_name = (master_dir + Path.basic(thy_name)).implode // FIXME + session.snapshot(node_name, pending_edits.snapshot()) } diff -r 4068e95f1e43 -r 8a61f2441b62 src/Tools/jEdit/src/isabelle_encoding.scala --- a/src/Tools/jEdit/src/isabelle_encoding.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Tools/jEdit/src/isabelle_encoding.scala Fri Jul 08 15:18:28 2011 +0200 @@ -34,7 +34,7 @@ private def text_reader(in: InputStream, codec: Codec): Reader = { val source = new BufferedSource(in)(codec) - new CharArrayReader(Isabelle_System.symbols.decode(source.mkString).toArray) + new CharArrayReader(Symbol.decode(source.mkString).toArray) } override def getTextReader(in: InputStream): Reader = @@ -53,7 +53,7 @@ val buffer = new ByteArrayOutputStream(BUFSIZE) { override def flush() { - val text = Isabelle_System.symbols.encode(toString(Standard_System.charset_name)) + val text = Symbol.encode(toString(Standard_System.charset_name)) out.write(text.getBytes(Standard_System.charset)) out.flush() } diff -r 4068e95f1e43 -r 8a61f2441b62 src/Tools/jEdit/src/isabelle_sidekick.scala --- a/src/Tools/jEdit/src/isabelle_sidekick.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Tools/jEdit/src/isabelle_sidekick.scala Fri Jul 08 15:18:28 2011 +0200 @@ -96,7 +96,7 @@ case Some((word, cs)) => val ds = (if (Isabelle_Encoding.is_active(buffer)) - cs.map(Isabelle_System.symbols.decode(_)).sortWith(_ < _) + cs.map(Symbol.decode(_)).sortWith(_ < _) else cs).filter(_ != word) if (ds.isEmpty) null else new SideKickCompletion( diff -r 4068e95f1e43 -r 8a61f2441b62 src/Tools/jEdit/src/plugin.scala --- a/src/Tools/jEdit/src/plugin.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Tools/jEdit/src/plugin.scala Fri Jul 08 15:18:28 2011 +0200 @@ -199,10 +199,15 @@ case Some(model) => Some(model) case None => // FIXME strip protocol prefix of URL - Thy_Header.split_thy_path(Isabelle_System.posix_path(buffer.getPath)) match { - case Some((master_dir, thy_name)) => - Some(Document_Model.init(session, buffer, master_dir, thy_name)) - case None => None + { + try { + Some(Thy_Header.split_thy_path( + Path.explode(Isabelle_System.posix_path(buffer.getPath)))) + } + catch { case _ => None } + } map { + case (master_dir, thy_name) => + Document_Model.init(session, buffer, master_dir, thy_name) } } if (opt_model.isDefined) { diff -r 4068e95f1e43 -r 8a61f2441b62 src/Tools/jEdit/src/token_markup.scala --- a/src/Tools/jEdit/src/token_markup.scala Fri Jul 08 12:18:46 2011 +0200 +++ b/src/Tools/jEdit/src/token_markup.scala Fri Jul 08 15:18:28 2011 +0200 @@ -81,9 +81,8 @@ class Style_Extender extends SyntaxUtilities.StyleExtender { - val symbols = Isabelle_System.symbols - if (symbols.font_names.length > 2) - error("Too many user symbol fonts (max 2 permitted): " + symbols.font_names.mkString(", ")) + if (Symbol.font_names.length > 2) + error("Too many user symbol fonts (max 2 permitted): " + Symbol.font_names.mkString(", ")) override def extendStyles(styles: Array[SyntaxStyle]): Array[SyntaxStyle] = { @@ -94,7 +93,7 @@ new_styles(subscript(i.toByte)) = script_style(style, -1) new_styles(superscript(i.toByte)) = script_style(style, 1) new_styles(bold(i.toByte)) = bold_style(style) - for ((family, idx) <- symbols.font_index) + for ((family, idx) <- Symbol.font_index) new_styles(user_font(idx, i.toByte)) = font_style(style, imitate_font(family, _)) } new_styles(hidden) = @@ -108,13 +107,11 @@ def extended_styles(text: CharSequence): Map[Text.Offset, Byte => Byte] = { - val symbols = Isabelle_System.symbols - - // FIXME symbols.bsub_decoded etc. + // FIXME Symbol.is_bsub_decoded etc. def ctrl_style(sym: String): Option[Byte => Byte] = - if (symbols.is_subscript_decoded(sym)) Some(subscript(_)) - else if (symbols.is_superscript_decoded(sym)) Some(superscript(_)) - else if (sym == symbols.bold_decoded) Some(bold(_)) + if (Symbol.is_subscript_decoded(sym)) Some(subscript(_)) + else if (Symbol.is_superscript_decoded(sym)) Some(superscript(_)) + else if (Symbol.is_bold_decoded(sym)) Some(bold(_)) else None var result = Map[Text.Offset, Byte => Byte]() @@ -127,13 +124,13 @@ for (sym <- Symbol.iterator(text)) { if (ctrl_style(sym).isDefined) ctrl = sym else if (ctrl != "") { - if (symbols.is_controllable(sym) && sym != "\"" && !symbols.fonts.isDefinedAt(sym)) { + if (Symbol.is_controllable(sym) && sym != "\"" && !Symbol.fonts.isDefinedAt(sym)) { mark(offset - ctrl.length, offset, _ => hidden) mark(offset, offset + sym.length, ctrl_style(ctrl).get) } ctrl = "" } - symbols.lookup_font(sym) match { + Symbol.lookup_font(sym) match { case Some(idx) => mark(offset, offset + sym.length, user_font(idx, _)) case _ => }