author | wenzelm |
Mon, 21 Dec 2009 10:40:14 +0100 | |
changeset 34154 | 763559e5356b |
parent 34153 | 5da0f7abbe29 (current diff) |
parent 34144 | bd7b3b91abab (diff) |
child 34155 | 14aaccb399b3 |
src/Pure/ML-Systems/exn.ML | file | annotate | diff | comparison | revisions |
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/General/exn.ML Mon Dec 21 10:40:14 2009 +0100 @@ -0,0 +1,61 @@ +(* Title: Pure/General/exn.ML + Author: Makarius + +Extra support for exceptions. +*) + +signature EXN = +sig + datatype 'a result = Result of 'a | Exn of exn + val get_result: 'a result -> 'a option + val get_exn: 'a result -> exn option + val capture: ('a -> 'b) -> 'a -> 'b result + val release: 'a result -> 'a + exception Interrupt + exception EXCEPTIONS of exn list + val flatten: exn -> exn list + val flatten_list: exn list -> exn list + val release_all: 'a result list -> 'a list + val release_first: 'a result list -> 'a list +end; + +structure Exn: EXN = +struct + +(* runtime exceptions as values *) + +datatype 'a result = + Result of 'a | + Exn of exn; + +fun get_result (Result x) = SOME x + | get_result _ = NONE; + +fun get_exn (Exn exn) = SOME exn + | get_exn _ = NONE; + +fun capture f x = Result (f x) handle e => Exn e; + +fun release (Result y) = y + | release (Exn e) = reraise e; + + +(* interrupt and nested exceptions *) + +exception Interrupt = Interrupt; +exception EXCEPTIONS of exn list; + +fun flatten Interrupt = [] + | flatten (EXCEPTIONS exns) = flatten_list exns + | flatten exn = [exn] +and flatten_list exns = List.concat (map flatten exns); + +fun release_all results = + if List.all (fn Result _ => true | _ => false) results + then map (fn Result x => x) results + else raise EXCEPTIONS (flatten_list (List.mapPartial get_exn results)); + +fun release_first results = release_all results + handle EXCEPTIONS (exn :: _) => reraise exn; + +end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/General/exn.scala Mon Dec 21 10:40:14 2009 +0100 @@ -0,0 +1,28 @@ +/* Title: Pure/General/exn.scala + Author: Makarius + +Extra support for exceptions. +*/ + +package isabelle + + +object Exn +{ + /* runtime exceptions as values */ + + sealed abstract class Result[A] + case class Res[A](val result: A) extends Result[A] + case class Exn[A](val exn: Exception) extends Result[A] + + def capture[A](e: => A): Result[A] = + try { Res(e) } + catch { case exn: RuntimeException => Exn[A](exn) } + + def release[A](result: Result[A]): A = + result match { + case Res(x) => x + case Exn(exn) => throw exn + } +} +
--- a/src/Pure/General/scan.scala Mon Dec 21 08:32:22 2009 +0100 +++ b/src/Pure/General/scan.scala Mon Dec 21 10:40:14 2009 +0100 @@ -91,7 +91,8 @@ Tree(tree.branches + (c -> (if (end) elem else "", extend(Lexicon.empty_tree, i + 1)))) } - } else tree + } + else tree val old = this new Lexicon { override val main_tree = extend(old.main_tree, 0) } } @@ -102,32 +103,182 @@ def ++ (elems: Iterator[String]): Lexicon = (this /: elems) ((s, elem) => s + elem) - /* RegexParsers methods */ + /** RegexParsers methods **/ override val whiteSpace = "".r - def keyword: Parser[String] = new Parser[String] { + + /* keywords from lexicon */ + + def keyword: Parser[String] = new Parser[String] + { def apply(in: Input) = { val source = in.source val offset = in.offset val len = source.length - offset - def scan(tree: Tree, text: String, i: Int): String = + def scan(tree: Tree, result: String, i: Int): String = { if (i < len) { tree.branches.get(source.charAt(offset + i)) match { - case Some((s, tr)) => scan(tr, if (s.isEmpty) text else s, i + 1) - case None => text + case Some((s, tr)) => scan(tr, if (s.isEmpty) result else s, i + 1) + case None => result } - } else text + } + else result } - val text = scan(main_tree, "", 0) - if (text.isEmpty) Failure("keyword expected", in) - else Success(text, in.drop(text.length)) + val result = scan(main_tree, "", 0) + if (result.isEmpty) Failure("keyword expected", in) + else Success(result, in.drop(result.length)) } }.named("keyword") + + /* symbols */ + + def symbols(pred: String => Boolean, min_count: Int, max_count: Int): Parser[String] = + new Parser[String] + { + def apply(in: Input) = + { + val start = in.offset + val end = in.source.length + val matcher = new Symbol.Matcher(in.source) + + var i = start + var count = 0 + var finished = false + while (!finished) { + if (i < end && count < max_count) { + val n = matcher(i, end) + val sym = in.source.subSequence(i, i + n).toString + if (pred(sym)) { i += n; count += 1 } + else finished = true + } + else finished = true + } + if (count < min_count) Failure("bad input", in) + else Success(in.source.subSequence(start, i).toString, in.drop(i - start)) + } + }.named("symbols") + + def one(pred: String => Boolean): Parser[String] = symbols(pred, 1, 1) + def many(pred: String => Boolean): Parser[String] = symbols(pred, 0, Integer.MAX_VALUE) + def many1(pred: String => Boolean): Parser[String] = symbols(pred, 1, Integer.MAX_VALUE) + + + /* quoted strings */ + + private def quoted(quote: String): Parser[String] = + { + quote ~ + rep(many1(sym => sym != quote && sym != "\\" && Symbol.is_closed(sym)) | + "\\" + quote | "\\\\" | + (("""\\\d\d\d""".r) ^? { case x if x.substring(1, 4).toInt <= 255 => x })) ~ + quote ^^ { case x ~ ys ~ z => x + ys.mkString + z } + }.named("quoted") + + def quoted_content(quote: String, source: String): String = + { + require(parseAll(quoted(quote), source).successful) + source.substring(1, source.length - 1) // FIXME proper escapes, recode utf8 (!?) + } + + + /* verbatim text */ + + private def verbatim: Parser[String] = + { + "{*" ~ rep(many1(sym => sym != "*" && Symbol.is_closed(sym)) | """\*(?!\})""".r) ~ "*}" ^^ + { case x ~ ys ~ z => x + ys.mkString + z } + }.named("verbatim") + + def verbatim_content(source: String): String = + { + require(parseAll(verbatim, source).successful) + source.substring(2, source.length - 2) + } + + + /* nested comments */ + + def comment: Parser[String] = new Parser[String] + { + val comment_text = + rep(many1(sym => sym != "*" && sym != "(" && Symbol.is_closed(sym)) | + """\*(?!\))|\((?!\*)""".r) + val comment_open = "(*" ~ comment_text ^^ (_ => ()) + val comment_close = comment_text ~ "*)" ^^ (_ => ()) + + def apply(in: Input) = + { + var rest = in + def try_parse(p: Parser[Unit]): Boolean = + { + parse(p, rest) match { + case Success(_, next) => { rest = next; true } + case _ => false + } + } + var depth = 0 + var finished = false + while (!finished) { + if (try_parse(comment_open)) depth += 1 + else if (depth > 0 && try_parse(comment_close)) depth -= 1 + else finished = true + } + if (in.offset < rest.offset && depth == 0) + Success(in.source.subSequence(in.offset, rest.offset).toString, rest) + else Failure("comment expected", in) + } + }.named("comment") + + def comment_content(source: String): String = + { + require(parseAll(comment, source).successful) + source.substring(2, source.length - 2) + } + + + /* outer syntax tokens */ + + def token(symbols: Symbol.Interpretation, is_command: String => Boolean): + Parser[Outer_Lex.Token] = + { + import Outer_Lex._ + + val id = one(symbols.is_letter) ~ many(symbols.is_letdig) ^^ { case x ~ y => x + y } + val nat = many1(symbols.is_digit) + val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x } + + val ident = id ~ rep("." ~> id) ^^ + { case x ~ Nil => Ident(x) + case x ~ ys => Long_Ident((x :: ys).mkString(".")) } + + val var_ = "?" ~ id_nat ^^ { case x ~ y => Var(x + y) } + val type_ident = "'" ~ id ^^ { case x ~ y => Type_Ident(x + y) } + val type_var = "?'" ~ id_nat ^^ { case x ~ y => Type_Var(x + y) } + val nat_ = nat ^^ Nat + + val sym_ident = + (many1(symbols.is_symbolic_char) | + one(sym => symbols.is_symbolic(sym) & Symbol.is_closed(sym))) ^^ Sym_Ident + + val space = many1(symbols.is_blank) ^^ Space + + val string = quoted("\"") ^^ String_Token + val alt_string = quoted("`") ^^ Alt_String_Token + + val bad_input = many1(sym => !(symbols.is_blank(sym))) ^^ Bad_Input + + + /* tokens */ + + (space | (string | (alt_string | (verbatim ^^ Verbatim | comment ^^ Comment)))) | + ((ident | (var_ | (type_ident | (type_var | (nat_ | sym_ident))))) ||| + keyword ^^ (x => if (is_command(x)) Command(x) else Keyword(x))) | + bad_input + } } } -
--- a/src/Pure/General/symbol.scala Mon Dec 21 08:32:22 2009 +0100 +++ b/src/Pure/General/symbol.scala Mon Dec 21 10:40:14 2009 +0100 @@ -29,36 +29,45 @@ // total pattern val regex = new Regex(plain + "|" + symbol + "|" + bad_symbol + "| .") - // prefix of another symbol - def is_open(s: CharSequence): Boolean = + + /* basic matching */ + + def is_closed(c: Char): Boolean = + !(c == '\\' || Character.isHighSurrogate(c)) + + def is_closed(s: CharSequence): Boolean = { - val len = s.length - len == 1 && Character.isHighSurrogate(s.charAt(0)) || - s == "\\" || - s == "\\<" || - len > 2 && s.charAt(len - 1) != '>' + if (s.length == 1) is_closed(s.charAt(0)) + else !bad_symbol.pattern.matcher(s).matches + } + + class Matcher(text: CharSequence) + { + private val matcher = regex.pattern.matcher(text) + def apply(start: Int, end: Int): Int = + { + require(0 <= start && start < end && end <= text.length) + if (is_closed(text.charAt(start))) 1 + else { + matcher.region(start, end).lookingAt + matcher.group.length + } + } } /* elements */ - private def could_open(c: Char): Boolean = - c == '\\' || Character.isHighSurrogate(c) - - def elements(text: CharSequence) = new Iterator[String] { - private val matcher = regex.pattern.matcher(text) + def elements(text: CharSequence) = new Iterator[CharSequence] + { + private val matcher = new Matcher(text) private var i = 0 def hasNext = i < text.length def next = { - val len = - if (could_open(text.charAt(i))) { - matcher.region(i, text.length).lookingAt - matcher.group.length - } - else 1 - val s = text.subSequence(i, i + len) - i += len - s.toString + val n = matcher(i, text.length) + val s = text.subSequence(i, i + n) + i += n + s } } @@ -70,20 +79,15 @@ case class Entry(chr: Int, sym: Int) val index: Array[Entry] = { - val matcher = regex.pattern.matcher(text) + val matcher = new Matcher(text) val buf = new mutable.ArrayBuffer[Entry] var chr = 0 var sym = 0 while (chr < text.length) { - val len = - if (could_open(text.charAt(chr))) { - matcher.region(chr, text.length).lookingAt - matcher.group.length - } - else 1 - chr += len + val n = matcher(chr, text.length) + chr += n sym += 1 - if (len > 1) buf += Entry(chr, sym) + if (n > 1) buf += Entry(chr, sym) } buf.toArray } @@ -152,7 +156,7 @@ /** Symbol interpretation **/ - class Interpretation(symbol_decls: Iterator[String]) + class Interpretation(symbol_decls: List[String]) { /* read symbols */ @@ -179,13 +183,14 @@ } private val symbols: List[(String, Map[String, String])] = - for (decl <- symbol_decls.toList if !empty.pattern.matcher(decl).matches) + for (decl <- symbol_decls if !empty.pattern.matcher(decl).matches) yield read_decl(decl) /* misc properties */ - val names: Map[String, String] = { + val names: Map[String, String] = + { val name = new Regex("""\\<([A-Za-z][A-Za-z0-9_']*)>""") Map((for ((sym @ name(a), _) <- symbols) yield (sym -> a)): _*) } @@ -216,5 +221,66 @@ def decode(text: String): String = decoder.recode(text) def encode(text: String): String = encoder.recode(text) + + + /* classification */ + + private object Decode_Set + { + def apply(elems: String*): Set[String] = + { + val content = elems.toList + Set((content ::: content.map(decode)): _*) + } + } + + private val letters = Decode_Set( + "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", + "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", + "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", + "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", + + "\\<A>", "\\<B>", "\\<C>", "\\<D>", "\\<E>", "\\<F>", "\\<G>", + "\\<H>", "\\<I>", "\\<J>", "\\<K>", "\\<L>", "\\<M>", "\\<N>", + "\\<O>", "\\<P>", "\\<Q>", "\\<R>", "\\<S>", "\\<T>", "\\<U>", + "\\<V>", "\\<W>", "\\<X>", "\\<Y>", "\\<Z>", "\\<a>", "\\<b>", + "\\<c>", "\\<d>", "\\<e>", "\\<f>", "\\<g>", "\\<h>", "\\<i>", + "\\<j>", "\\<k>", "\\<l>", "\\<m>", "\\<n>", "\\<o>", "\\<p>", + "\\<q>", "\\<r>", "\\<s>", "\\<t>", "\\<u>", "\\<v>", "\\<w>", + "\\<x>", "\\<y>", "\\<z>", + + "\\<AA>", "\\<BB>", "\\<CC>", "\\<DD>", "\\<EE>", "\\<FF>", + "\\<GG>", "\\<HH>", "\\<II>", "\\<JJ>", "\\<KK>", "\\<LL>", + "\\<MM>", "\\<NN>", "\\<OO>", "\\<PP>", "\\<QQ>", "\\<RR>", + "\\<SS>", "\\<TT>", "\\<UU>", "\\<VV>", "\\<WW>", "\\<XX>", + "\\<YY>", "\\<ZZ>", "\\<aa>", "\\<bb>", "\\<cc>", "\\<dd>", + "\\<ee>", "\\<ff>", "\\<gg>", "\\<hh>", "\\<ii>", "\\<jj>", + "\\<kk>", "\\<ll>", "\\<mm>", "\\<nn>", "\\<oo>", "\\<pp>", + "\\<qq>", "\\<rr>", "\\<ss>", "\\<tt>", "\\<uu>", "\\<vv>", + "\\<ww>", "\\<xx>", "\\<yy>", "\\<zz>", + + "\\<alpha>", "\\<beta>", "\\<gamma>", "\\<delta>", "\\<epsilon>", + "\\<zeta>", "\\<eta>", "\\<theta>", "\\<iota>", "\\<kappa>", + "\\<mu>", "\\<nu>", "\\<xi>", "\\<pi>", "\\<rho>", "\\<sigma>", + "\\<tau>", "\\<upsilon>", "\\<phi>", "\\<chi>", "\\<psi>", + "\\<omega>", "\\<Gamma>", "\\<Delta>", "\\<Theta>", "\\<Lambda>", + "\\<Xi>", "\\<Pi>", "\\<Sigma>", "\\<Upsilon>", "\\<Phi>", + "\\<Psi>", "\\<Omega>", + + "\\<^isub>", "\\<^isup>") + + private val blanks = + Decode_Set(" ", "\t", "\n", "\u000B", "\f", "\r", "\\<spacespace>", "\\<^newline>") + + private val sym_chars = + Set("!", "#", "$", "%", "&", "*", "+", "-", "/", "<", "=", ">", "?", "@", "^", "_", "|", "~") + + def is_letter(sym: String): Boolean = letters.contains(sym) + def is_digit(sym: String): Boolean = sym.length == 1 && '0' <= sym(0) && sym(0) <= '9' + def is_quasi(sym: String): Boolean = sym == "_" || sym == "'" + def is_letdig(sym: String): Boolean = is_letter(sym) || is_digit(sym) || is_quasi(sym) + def is_blank(sym: String): Boolean = blanks.contains(sym) + def is_symbolic_char(sym: String): Boolean = sym_chars.contains(sym) + def is_symbolic(sym: String): Boolean = sym.startsWith("\\<") && !sym.startsWith("\\<^") } }
--- a/src/Pure/IsaMakefile Mon Dec 21 08:32:22 2009 +0100 +++ b/src/Pure/IsaMakefile Mon Dec 21 10:40:14 2009 +0100 @@ -23,7 +23,7 @@ BOOTSTRAP_FILES = ML-Systems/compiler_polyml-5.0.ML \ ML-Systems/compiler_polyml-5.2.ML ML-Systems/compiler_polyml-5.3.ML \ - ML-Systems/exn.ML ML-Systems/ml_name_space.ML \ + ML-Systems/ml_name_space.ML \ ML-Systems/ml_pretty.ML ML-Systems/mosml.ML \ ML-Systems/multithreading.ML ML-Systems/multithreading_polyml.ML \ ML-Systems/overloading_smlnj.ML ML-Systems/polyml-5.0.ML \ @@ -51,38 +51,38 @@ Concurrent/synchronized_sequential.ML Concurrent/task_queue.ML \ General/alist.ML General/antiquote.ML General/balanced_tree.ML \ General/basics.ML General/binding.ML General/buffer.ML \ - General/file.ML General/graph.ML General/heap.ML General/integer.ML \ - General/long_name.ML General/markup.ML General/name_space.ML \ - General/ord_list.ML General/output.ML General/path.ML \ - General/position.ML General/pretty.ML General/print_mode.ML \ - General/properties.ML General/queue.ML General/same.ML \ - General/scan.ML General/secure.ML General/seq.ML General/source.ML \ - General/stack.ML General/symbol.ML General/symbol_pos.ML \ - General/table.ML General/url.ML General/xml.ML General/yxml.ML \ - Isar/args.ML Isar/attrib.ML Isar/auto_bind.ML Isar/calculation.ML \ - Isar/class.ML Isar/class_target.ML Isar/code.ML Isar/constdefs.ML \ - Isar/context_rules.ML Isar/element.ML Isar/expression.ML \ - Isar/isar_cmd.ML Isar/isar_document.ML Isar/isar_syn.ML \ - Isar/local_defs.ML Isar/local_syntax.ML Isar/local_theory.ML \ - Isar/locale.ML Isar/method.ML Isar/object_logic.ML Isar/obtain.ML \ - Isar/outer_keyword.ML Isar/outer_lex.ML Isar/outer_parse.ML \ - Isar/outer_syntax.ML Isar/overloading.ML Isar/proof.ML \ - Isar/proof_context.ML Isar/proof_display.ML Isar/proof_node.ML \ - Isar/rule_cases.ML Isar/rule_insts.ML Isar/runtime.ML \ - Isar/skip_proof.ML Isar/spec_parse.ML Isar/spec_rules.ML \ - Isar/specification.ML Isar/theory_target.ML Isar/toplevel.ML \ - Isar/value_parse.ML ML/ml_antiquote.ML ML/ml_compiler.ML \ - ML/ml_compiler_polyml-5.3.ML ML/ml_context.ML ML/ml_env.ML \ - ML/ml_lex.ML ML/ml_parse.ML ML/ml_syntax.ML ML/ml_thms.ML \ - ML-Systems/install_pp_polyml.ML ML-Systems/install_pp_polyml-5.3.ML \ - ML-Systems/use_context.ML Proof/extraction.ML \ - Proof/proof_rewrite_rules.ML Proof/proof_syntax.ML \ - Proof/proofchecker.ML Proof/reconstruct.ML ProofGeneral/pgip.ML \ - ProofGeneral/pgip_input.ML ProofGeneral/pgip_isabelle.ML \ - ProofGeneral/pgip_markup.ML ProofGeneral/pgip_output.ML \ - ProofGeneral/pgip_parser.ML ProofGeneral/pgip_tests.ML \ - ProofGeneral/pgip_types.ML ProofGeneral/preferences.ML \ - ProofGeneral/proof_general_emacs.ML \ + General/exn.ML General/file.ML General/graph.ML General/heap.ML \ + General/integer.ML General/long_name.ML General/markup.ML \ + General/name_space.ML General/ord_list.ML General/output.ML \ + General/path.ML General/position.ML General/pretty.ML \ + General/print_mode.ML General/properties.ML General/queue.ML \ + General/same.ML General/scan.ML General/secure.ML General/seq.ML \ + General/source.ML General/stack.ML General/symbol.ML \ + General/symbol_pos.ML General/table.ML General/url.ML General/xml.ML \ + General/yxml.ML Isar/args.ML Isar/attrib.ML Isar/auto_bind.ML \ + Isar/calculation.ML Isar/class.ML Isar/class_target.ML Isar/code.ML \ + Isar/constdefs.ML Isar/context_rules.ML Isar/element.ML \ + Isar/expression.ML Isar/isar_cmd.ML Isar/isar_document.ML \ + Isar/isar_syn.ML Isar/local_defs.ML Isar/local_syntax.ML \ + Isar/local_theory.ML Isar/locale.ML Isar/method.ML \ + Isar/object_logic.ML Isar/obtain.ML Isar/outer_keyword.ML \ + Isar/outer_lex.ML Isar/outer_parse.ML Isar/outer_syntax.ML \ + Isar/overloading.ML Isar/proof.ML Isar/proof_context.ML \ + Isar/proof_display.ML Isar/proof_node.ML Isar/rule_cases.ML \ + Isar/rule_insts.ML Isar/runtime.ML Isar/skip_proof.ML \ + Isar/spec_parse.ML Isar/spec_rules.ML Isar/specification.ML \ + Isar/theory_target.ML Isar/toplevel.ML Isar/value_parse.ML \ + ML/ml_antiquote.ML ML/ml_compiler.ML ML/ml_compiler_polyml-5.3.ML \ + ML/ml_context.ML ML/ml_env.ML ML/ml_lex.ML ML/ml_parse.ML \ + ML/ml_syntax.ML ML/ml_thms.ML ML-Systems/install_pp_polyml.ML \ + ML-Systems/install_pp_polyml-5.3.ML ML-Systems/use_context.ML \ + Proof/extraction.ML Proof/proof_rewrite_rules.ML \ + Proof/proof_syntax.ML Proof/proofchecker.ML Proof/reconstruct.ML \ + ProofGeneral/pgip.ML ProofGeneral/pgip_input.ML \ + ProofGeneral/pgip_isabelle.ML ProofGeneral/pgip_markup.ML \ + ProofGeneral/pgip_output.ML ProofGeneral/pgip_parser.ML \ + ProofGeneral/pgip_tests.ML ProofGeneral/pgip_types.ML \ + ProofGeneral/preferences.ML ProofGeneral/proof_general_emacs.ML \ ProofGeneral/proof_general_pgip.ML Pure.thy ROOT.ML Syntax/ast.ML \ Syntax/lexicon.ML Syntax/mixfix.ML Syntax/parser.ML \ Syntax/printer.ML Syntax/simple_syntax.ML Syntax/syn_ext.ML \ @@ -121,14 +121,15 @@ ## Scala material -SCALA_FILES = General/event_bus.scala General/linear_set.scala \ - General/markup.scala General/position.scala General/scan.scala \ - General/swing_thread.scala General/symbol.scala General/xml.scala \ - General/yxml.scala Isar/isar_document.scala Isar/outer_keyword.scala \ - System/cygwin.scala System/gui_setup.scala \ - System/isabelle_process.scala System/isabelle_syntax.scala \ - System/isabelle_system.scala System/platform.scala \ - Thy/completion.scala Thy/html.scala Thy/thy_header.scala +SCALA_FILES = General/event_bus.scala General/exn.scala \ + General/linear_set.scala General/markup.scala General/position.scala \ + General/scan.scala General/swing_thread.scala General/symbol.scala \ + General/xml.scala General/yxml.scala Isar/isar_document.scala \ + Isar/outer_keyword.scala Isar/outer_lex.scala System/cygwin.scala \ + System/gui_setup.scala System/isabelle_process.scala \ + System/isabelle_syntax.scala System/isabelle_system.scala \ + System/platform.scala Thy/completion.scala Thy/html.scala \ + Thy/thy_header.scala library.scala JAR_DIR = $(ISABELLE_HOME)/lib/classes PURE_JAR = $(JAR_DIR)/Pure.jar
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/Isar/outer_lex.scala Mon Dec 21 10:40:14 2009 +0100 @@ -0,0 +1,100 @@ +/* Title: Pure/Isar/outer_lex.scala + Author: Makarius + +Outer lexical syntax for Isabelle/Isar. +*/ + +package isabelle + + +object Outer_Lex +{ + sealed abstract class Token + { + def source: String + def content: String = source + + def is_delimited: Boolean = false + def is_name: Boolean = false + def is_xname: Boolean = false + def is_text: Boolean = false + def is_space: Boolean = false + def is_comment: Boolean = false + def is_proper: Boolean = !(is_space || is_comment) + } + + case class Command(val source: String) extends Token + + case class Keyword(val source: String) extends Token + + case class Ident(val source: String) extends Token + { + override def is_name = true + override def is_xname = true + override def is_text = true + } + + case class Long_Ident(val source: String) extends Token + { + override def is_xname = true + override def is_text = true + } + + case class Sym_Ident(val source: String) extends Token + { + override def is_name = true + override def is_xname = true + override def is_text = true + } + + case class Var(val source: String) extends Token + + case class Type_Ident(val source: String) extends Token + + case class Type_Var(val source: String) extends Token + + case class Nat(val source: String) extends Token + { + override def is_name = true + override def is_xname = true + override def is_text = true + } + + case class String_Token(val source: String) extends Token + { + override def content = Scan.Lexicon.empty.quoted_content("\"", source) + override def is_delimited = true + override def is_name = true + override def is_xname = true + override def is_text = true + } + + case class Alt_String_Token(val source: String) extends Token + { + override def content = Scan.Lexicon.empty.quoted_content("`", source) + override def is_delimited = true + } + + case class Verbatim(val source: String) extends Token + { + override def content = Scan.Lexicon.empty.verbatim_content(source) + override def is_delimited = true + override def is_text = true + } + + case class Space(val source: String) extends Token + { + override def is_space = true + } + + case class Comment(val source: String) extends Token + { + override def content = Scan.Lexicon.empty.comment_content(source) + override def is_delimited = true + override def is_comment = true + } + + case class Bad_Input(val source: String) extends Token + case class Unparsed(val source: String) extends Token +} +
--- a/src/Pure/ML-Systems/exn.ML Mon Dec 21 08:32:22 2009 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,61 +0,0 @@ -(* Title: Pure/ML-Systems/exn.ML - Author: Makarius - -Extra support for exceptions. -*) - -signature EXN = -sig - datatype 'a result = Exn of exn | Result of 'a - val get_result: 'a result -> 'a option - val get_exn: 'a result -> exn option - val capture: ('a -> 'b) -> 'a -> 'b result - val release: 'a result -> 'a - exception Interrupt - exception EXCEPTIONS of exn list - val flatten: exn -> exn list - val flatten_list: exn list -> exn list - val release_all: 'a result list -> 'a list - val release_first: 'a result list -> 'a list -end; - -structure Exn: EXN = -struct - -(* runtime exceptions as values *) - -datatype 'a result = - Result of 'a | - Exn of exn; - -fun get_result (Result x) = SOME x - | get_result _ = NONE; - -fun get_exn (Exn exn) = SOME exn - | get_exn _ = NONE; - -fun capture f x = Result (f x) handle e => Exn e; - -fun release (Result y) = y - | release (Exn e) = reraise e; - - -(* interrupt and nested exceptions *) - -exception Interrupt = Interrupt; -exception EXCEPTIONS of exn list; - -fun flatten Interrupt = [] - | flatten (EXCEPTIONS exns) = flatten_list exns - | flatten exn = [exn] -and flatten_list exns = List.concat (map flatten exns); - -fun release_all results = - if List.all (fn Result _ => true | _ => false) results - then map (fn Result x => x) results - else raise EXCEPTIONS (flatten_list (List.mapPartial get_exn results)); - -fun release_first results = release_all results - handle EXCEPTIONS (exn :: _) => reraise exn; - -end;
--- a/src/Pure/ML-Systems/mosml.ML Mon Dec 21 08:32:22 2009 +0100 +++ b/src/Pure/ML-Systems/mosml.ML Mon Dec 21 10:40:14 2009 +0100 @@ -41,7 +41,7 @@ fun reraise exn = raise exn; use "ML-Systems/unsynchronized.ML"; -use "ML-Systems/exn.ML"; +use "General/exn.ML"; use "ML-Systems/universal.ML"; use "ML-Systems/thread_dummy.ML"; use "ML-Systems/multithreading.ML";
--- a/src/Pure/ML-Systems/polyml_common.ML Mon Dec 21 08:32:22 2009 +0100 +++ b/src/Pure/ML-Systems/polyml_common.ML Mon Dec 21 10:40:14 2009 +0100 @@ -5,7 +5,7 @@ exception Interrupt = SML90.Interrupt; -use "ML-Systems/exn.ML"; +use "General/exn.ML"; use "ML-Systems/multithreading.ML"; use "ML-Systems/time_limit.ML"; use "ML-Systems/timing.ML";
--- a/src/Pure/ML-Systems/smlnj.ML Mon Dec 21 08:32:22 2009 +0100 +++ b/src/Pure/ML-Systems/smlnj.ML Mon Dec 21 10:40:14 2009 +0100 @@ -9,7 +9,7 @@ use "ML-Systems/proper_int.ML"; use "ML-Systems/unsynchronized.ML"; use "ML-Systems/overloading_smlnj.ML"; -use "ML-Systems/exn.ML"; +use "General/exn.ML"; use "ML-Systems/universal.ML"; use "ML-Systems/thread_dummy.ML"; use "ML-Systems/multithreading.ML";
--- a/src/Pure/System/isabelle_system.scala Mon Dec 21 08:32:22 2009 +0100 +++ b/src/Pure/System/isabelle_system.scala Mon Dec 21 10:40:14 2009 +0100 @@ -316,14 +316,14 @@ /* symbols */ - private def read_symbols(path: String): Iterator[String] = + private def read_symbols(path: String): List[String] = { val file = new File(platform_path(path)) - if (file.isFile) Source.fromFile(file).getLines - else Iterator.empty + if (file.isFile) Source.fromFile(file).getLines.toList + else Nil } val symbols = new Symbol.Interpretation( - read_symbols("$ISABELLE_HOME/etc/symbols") ++ + read_symbols("$ISABELLE_HOME/etc/symbols") ::: read_symbols("$ISABELLE_HOME_USER/etc/symbols"))
--- a/src/Pure/Thy/completion.scala Mon Dec 21 08:32:22 2009 +0100 +++ b/src/Pure/Thy/completion.scala Mon Dec 21 10:40:14 2009 +0100 @@ -12,34 +12,6 @@ private object Completion { - /** String/CharSequence utilities */ - - def length_ord(s1: String, s2: String): Boolean = - s1.length < s2.length || s1.length == s2.length && s1 <= s2 - - class Reverse(text: CharSequence, start: Int, end: Int) extends CharSequence - { - require(0 <= start && start <= end && end <= text.length) - - def this(text: CharSequence) = this(text, 0, text.length) - - def length: Int = end - start - def charAt(i: Int): Char = text.charAt(end - i - 1) - - def subSequence(i: Int, j: Int): CharSequence = - if (0 <= i && i <= j && j <= length) new Reverse(text, end - j, end - i) - else throw new IndexOutOfBoundsException - - override def toString: String = - { - val buf = new StringBuilder(length) - for (i <- 0 until length) - buf.append(charAt(i)) - buf.toString - } - } - - /** word completion **/ val word_regex = "[a-zA-Z0-9_']+".r @@ -55,7 +27,7 @@ def read(in: CharSequence): Option[String] = { - val rev_in = new Reverse(in) + val rev_in = new Library.Reverse(in) parse((rev_symbol | rev_symb | word) ^^ (_.reverse), rev_in) match { case Success(result, _) => Some(result) case _ => None @@ -113,7 +85,7 @@ def complete(line: CharSequence): Option[(String, List[String])] = { - abbrevs_lex.parse(abbrevs_lex.keyword, new Completion.Reverse(line)) match { + abbrevs_lex.parse(abbrevs_lex.keyword, new Library.Reverse(line)) match { case abbrevs_lex.Success(rev_a, _) => val (word, c) = abbrevs_map(rev_a) if (word == c) None @@ -123,7 +95,7 @@ case Some(word) => words_lex.completions(word).map(words_map(_)).filter(_ != word) match { case Nil => None - case cs => Some (word, cs.sort(Completion.length_ord _)) + case cs => Some(word, cs.sort(_ < _)) } case None => None }
--- a/src/Pure/Thy/html.scala Mon Dec 21 08:32:22 2009 +0100 +++ b/src/Pure/Thy/html.scala Mon Dec 21 10:40:14 2009 +0100 @@ -49,7 +49,7 @@ flush() ts + elem } - val syms = Symbol.elements(txt) + val syms = Symbol.elements(txt).map(_.toString) while (syms.hasNext) { val s1 = syms.next def s2() = if (syms.hasNext) syms.next else ""
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/library.scala Mon Dec 21 10:40:14 2009 +0100 @@ -0,0 +1,49 @@ +/* Title: Pure/library.scala + Author: Makarius + +Basic library. +*/ + +package isabelle + +import java.lang.System + + +object Library +{ + /* reverse CharSequence */ + + class Reverse(text: CharSequence, start: Int, end: Int) extends CharSequence + { + require(0 <= start && start <= end && end <= text.length) + + def this(text: CharSequence) = this(text, 0, text.length) + + def length: Int = end - start + def charAt(i: Int): Char = text.charAt(end - i - 1) + + def subSequence(i: Int, j: Int): CharSequence = + if (0 <= i && i <= j && j <= length) new Reverse(text, end - j, end - i) + else throw new IndexOutOfBoundsException + + override def toString: String = + { + val buf = new StringBuilder(length) + for (i <- 0 until length) + buf.append(charAt(i)) + buf.toString + } + } + + + /* timing */ + + def timeit[A](e: => A) = + { + val start = System.currentTimeMillis() + val result = Exn.capture(e) + val stop = System.currentTimeMillis() + System.err.println((stop - start) + "ms elapsed time") + Exn.release(result) + } +}