author | wenzelm |
Thu, 24 May 2018 22:28:26 +0200 | |
changeset 68269 | 5ff0ccc74884 |
parent 68263 | e4e536a71e3d (current diff) |
parent 68268 | 38b4d4f39434 (diff) |
child 68270 | 2bc921b2159b |
child 68271 | 77f6fa78b6e1 |
child 68283 | e2f235b9662a |
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/Admin/Windows/Cygwin/setup_server Thu May 24 22:28:26 2018 +0200 @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +CYGWIN_MAIN="https://cygwin.com" +CYGWIN_MIRROR="https://ftp.eq.uc.pt/software/pc/prog/cygwin" + +function fail() +{ + echo "$1" >&2 + exit 2 +} + +function download() +{ + local URL="$1" + local DIR="${2:-.}" + mkdir -p "$DIR" || fail "Cannot create directory: \"$DIR\"" + echo "Downloading $URL ..." + curl --fail --silent "$URL" > "$DIR"/"$(basename "$URL")" || fail "FAILED" +} + +download "$CYGWIN_MAIN/setup-x86.exe" +download "$CYGWIN_MAIN/setup-x86_64.exe" +download "$CYGWIN_MIRROR/x86/setup.xz" "x86" +download "$CYGWIN_MIRROR/x86/setup.xz.sig" "x86" +download "$CYGWIN_MIRROR/x86_64/setup.xz" "x86_64" +download "$CYGWIN_MIRROR/x86_64/setup.xz.sig" "x86_64"
--- a/src/HOL/Tools/typedef.ML Thu May 24 17:06:39 2018 +0200 +++ b/src/HOL/Tools/typedef.ML Thu May 24 22:28:26 2018 +0200 @@ -348,4 +348,27 @@ val overloaded = typedef_overloaded; + + +(** theory export **) + +val _ = Export_Theory.setup_presentation (fn _ => fn thy => + let + val parent_spaces = map Sign.type_space (Theory.parents_of thy); + val typedefs = + Name_Space.dest_table (#types (Type.rep_tsig (Sign.tsig_of thy))) + |> maps (fn (name, _) => + if exists (fn space => Name_Space.declared space name) parent_spaces then [] + else + get_info_global thy name + |> map (fn ({rep_type, abs_type, Rep_name, Abs_name, axiom_name}, _) => + (name, (rep_type, (abs_type, (Rep_name, (Abs_name, axiom_name))))))); + in + if null typedefs then () + else + Export_Theory.export_body thy "typedefs" + let open XML.Encode Term_XML.Encode + in list (pair string (pair typ (pair typ (pair string (pair string string))))) typedefs end + end); + end;
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/General/cache.scala Thu May 24 22:28:26 2018 +0200 @@ -0,0 +1,63 @@ +/* Title: Pure/cache.scala + Author: Makarius + +cache for partial sharing (weak table). +*/ + +package isabelle + + +import java.util.{Collections, WeakHashMap} +import java.lang.ref.WeakReference + + +class Cache(initial_size: Int = 131071, max_string: Int = 100) +{ + private val table = + Collections.synchronizedMap(new WeakHashMap[Any, WeakReference[Any]](initial_size)) + + def size: Int = table.size + + override def toString: String = "Cache(" + size + ")" + + protected def lookup[A](x: A): Option[A] = + { + val ref = table.get(x) + if (ref == null) None + else { + val y = ref.asInstanceOf[WeakReference[A]].get + if (y == null) None + else Some(y) + } + } + + protected def store[A](x: A): A = + { + table.put(x, new WeakReference[Any](x)) + x + } + + protected def cache_int(x: Int): Int = + lookup(x) getOrElse store(x) + + protected def cache_string(x: String): String = + { + if (x == "") "" + else if (x == "true") "true" + else if (x == "false") "false" + else if (x == "0.0") "0.0" + else if (Library.is_small_int(x)) Library.signed_string_of_int(Integer.parseInt(x)) + else { + lookup(x) match { + case Some(y) => y + case None => + val z = Library.isolate_substring(x) + if (z.length > max_string) z else store(z) + } + } + } + + // main methods + def int(x: Int): Int = synchronized { cache_int(x) } + def string(x: String): String = synchronized { cache_string(x) } +}
--- a/src/Pure/PIDE/xml.scala Thu May 24 17:06:39 2018 +0200 +++ b/src/Pure/PIDE/xml.scala Thu May 24 22:28:26 2018 +0200 @@ -7,11 +7,6 @@ package isabelle -import java.util.{Collections, WeakHashMap} -import java.lang.ref.WeakReference -import javax.xml.parsers.DocumentBuilderFactory - - object XML { /** XML trees **/ @@ -152,55 +147,26 @@ - /** cache for partial sharing (weak table) **/ + /** cache **/ def make_cache(initial_size: Int = 131071, max_string: Int = 100): Cache = new Cache(initial_size, max_string) class Cache private[XML](initial_size: Int, max_string: Int) + extends isabelle.Cache(initial_size, max_string) { - private val table = - Collections.synchronizedMap(new WeakHashMap[Any, WeakReference[Any]](initial_size)) - - def size: Int = table.size - - private def lookup[A](x: A): Option[A] = - { - val ref = table.get(x) - if (ref == null) None - else { - val y = ref.asInstanceOf[WeakReference[A]].get - if (y == null) None - else Some(y) - } - } - private def store[A](x: A): A = + protected def cache_props(x: Properties.T): Properties.T = { - table.put(x, new WeakReference[Any](x)) - x - } - - private def cache_string(x: String): String = - if (x == "") "" - else if (x == "true") "true" - else if (x == "false") "false" - else if (x == "0.0") "0.0" - else if (Library.is_small_int(x)) Library.signed_string_of_int(Integer.parseInt(x)) - else - lookup(x) match { - case Some(y) => y - case None => - val z = Library.isolate_substring(x) - if (z.length > max_string) z else store(z) - } - private def cache_props(x: Properties.T): Properties.T = if (x.isEmpty) x else lookup(x) match { case Some(y) => y case None => store(x.map(p => (Library.isolate_substring(p._1).intern, cache_string(p._2)))) } - private def cache_markup(x: Markup): Markup = + } + + protected def cache_markup(x: Markup): Markup = + { lookup(x) match { case Some(y) => y case None => @@ -209,7 +175,10 @@ store(Markup(cache_string(name), cache_props(props))) } } - private def cache_tree(x: XML.Tree): XML.Tree = + } + + protected def cache_tree(x: XML.Tree): XML.Tree = + { lookup(x) match { case Some(y) => y case None => @@ -219,16 +188,19 @@ case XML.Text(text) => store(XML.Text(cache_string(text))) } } - private def cache_body(x: XML.Body): XML.Body = + } + + protected def cache_body(x: XML.Body): XML.Body = + { if (x.isEmpty) x else lookup(x) match { case Some(y) => y case None => x.map(cache_tree(_)) } + } // main methods - def string(x: String): String = synchronized { cache_string(x) } def props(x: Properties.T): Properties.T = synchronized { cache_props(x) } def markup(x: Markup): Markup = synchronized { cache_markup(x) } def tree(x: XML.Tree): XML.Tree = synchronized { cache_tree(x) }
--- a/src/Pure/Thy/export_theory.ML Thu May 24 17:06:39 2018 +0200 +++ b/src/Pure/Thy/export_theory.ML Thu May 24 22:28:26 2018 +0200 @@ -27,6 +27,9 @@ val _ = setup_presentation (fn {adjust_pos, ...} => fn thy => let + val rep_tsig = Type.rep_tsig (Sign.tsig_of thy); + + (* parents *) val parents = Theory.parents_of thy; @@ -76,7 +79,7 @@ val _ = export_entities "types" (K export_type) Sign.type_space - (Name_Space.dest_table (#types (Type.rep_tsig (Sign.tsig_of thy)))); + (Name_Space.dest_table (#types rep_tsig)); (* consts *) @@ -120,6 +123,24 @@ export_entities "facts" (K (SOME o export_fact)) (Facts.space_of o Global_Theory.facts_of) (Facts.dest_static true [] (Global_Theory.facts_of thy)); + + (* type classes *) + + val encode_class = + let open XML.Encode Term_XML.Encode + in pair (list (pair string typ)) (list term) end; + + fun export_class name = + (case try (Axclass.get_info thy) name of + NONE => ([], []) + | SOME {params, axioms, ...} => + (params, map (Logic.unvarify_global o Thm.full_prop_of) axioms)) + |> encode_class |> SOME; + + val _ = + export_entities "classes" (fn name => fn () => export_class name) + Sign.class_space (map (rpair ()) (Graph.keys (Sorts.classes_of (#2 (#classes rep_tsig))))); + in () end); end;
--- a/src/Pure/Thy/export_theory.scala Thu May 24 17:06:39 2018 +0200 +++ b/src/Pure/Thy/export_theory.scala Thu May 24 22:28:26 2018 +0200 @@ -26,14 +26,21 @@ def read_session(store: Sessions.Store, session_name: String, types: Boolean = true, - consts: Boolean = true): Session = + consts: Boolean = true, + axioms: Boolean = true, + facts: Boolean = true, + classes: Boolean = true, + typedefs: Boolean = true, + cache: Term.Cache = Term.make_cache()): Session = { val thys = using(store.open_database(session_name))(db => { db.transaction { Export.read_theory_names(db, session_name).map((theory_name: String) => - read_theory(db, session_name, theory_name, types = types, consts = consts)) + read_theory(db, session_name, theory_name, types = types, consts = consts, + axioms = axioms, facts = facts, classes = classes, typedefs = typedefs, + cache = Some(cache))) } }) @@ -55,18 +62,33 @@ types: List[Type], consts: List[Const], axioms: List[Axiom], - facts: List[Fact]) + facts: List[Fact], + classes: List[Class], + typedefs: List[Typedef]) { override def toString: String = name + + def cache(cache: Term.Cache): Theory = + Theory(cache.string(name), + parents.map(cache.string(_)), + types.map(_.cache(cache)), + consts.map(_.cache(cache)), + axioms.map(_.cache(cache)), + facts.map(_.cache(cache)), + classes.map(_.cache(cache)), + typedefs.map(_.cache(cache))) } - def empty_theory(name: String): Theory = Theory(name, Nil, Nil, Nil, Nil, Nil) + def empty_theory(name: String): Theory = Theory(name, Nil, Nil, Nil, Nil, Nil, Nil, Nil) def read_theory(db: SQL.Database, session_name: String, theory_name: String, types: Boolean = true, consts: Boolean = true, axioms: Boolean = true, - facts: Boolean = true): Theory = + facts: Boolean = true, + classes: Boolean = true, + typedefs: Boolean = true, + cache: Option[Term.Cache] = None): Theory = { val parents = Export.read_entry(db, session_name, theory_name, "theory/parents") match { @@ -75,11 +97,15 @@ error("Missing theory export in session " + quote(session_name) + ": " + quote(theory_name)) } - Theory(theory_name, parents, - if (types) read_types(db, session_name, theory_name) else Nil, - if (consts) read_consts(db, session_name, theory_name) else Nil, - if (axioms) read_axioms(db, session_name, theory_name) else Nil, - if (facts) read_facts(db, session_name, theory_name) else Nil) + val theory = + Theory(theory_name, parents, + if (types) read_types(db, session_name, theory_name) else Nil, + if (consts) read_consts(db, session_name, theory_name) else Nil, + if (axioms) read_axioms(db, session_name, theory_name) else Nil, + if (facts) read_facts(db, session_name, theory_name) else Nil, + if (classes) read_classes(db, session_name, theory_name) else Nil, + if (typedefs) read_typedefs(db, session_name, theory_name) else Nil) + if (cache.isDefined) theory.cache(cache.get) else theory } @@ -88,6 +114,9 @@ sealed case class Entity(name: String, serial: Long, pos: Position.T) { override def toString: String = name + + def cache(cache: Term.Cache): Entity = + Entity(cache.string(name), serial, cache.position(pos)) } def decode_entity(tree: XML.Tree): (Entity, XML.Body) = @@ -104,19 +133,32 @@ } } + def read_export[A](db: SQL.Database, session_name: String, theory_name: String, + export_name: String, decode: XML.Body => List[A]): List[A] = + { + Export.read_entry(db, session_name, theory_name, "theory/" + export_name) match { + case Some(entry) => decode(entry.uncompressed_yxml()) + case None => Nil + } + } + def read_entities[A](db: SQL.Database, session_name: String, theory_name: String, export_name: String, decode: XML.Tree => A): List[A] = { - Export.read_entry(db, session_name, theory_name, "theory/" + export_name) match { - case Some(entry) => entry.uncompressed_yxml().map(decode(_)) - case None => Nil - } + read_export(db, session_name, theory_name, export_name, + (body: XML.Body) => body.map(decode(_))) } /* types */ sealed case class Type(entity: Entity, args: List[String], abbrev: Option[Term.Typ]) + { + def cache(cache: Term.Cache): Type = + Type(entity.cache(cache), + args.map(cache.string(_)), + abbrev.map(cache.typ(_))) + } def read_types(db: SQL.Database, session_name: String, theory_name: String): List[Type] = read_entities(db, session_name, theory_name, "types", @@ -136,6 +178,13 @@ sealed case class Const( entity: Entity, typargs: List[String], typ: Term.Typ, abbrev: Option[Term.Term]) + { + def cache(cache: Term.Cache): Const = + Const(entity.cache(cache), + typargs.map(cache.string(_)), + cache.typ(typ), + abbrev.map(cache.term(_))) + } def read_consts(db: SQL.Database, session_name: String, theory_name: String): List[Const] = read_entities(db, session_name, theory_name, "consts", @@ -166,6 +215,13 @@ typargs: List[(String, Term.Sort)], args: List[(String, Term.Typ)], prop: Term.Term) + { + def cache(cache: Term.Cache): Axiom = + Axiom(entity.cache(cache), + typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }), + args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }), + cache.term(prop)) + } def read_axioms(db: SQL.Database, session_name: String, theory_name: String): List[Axiom] = read_entities(db, session_name, theory_name, "axioms", @@ -181,6 +237,13 @@ typargs: List[(String, Term.Sort)], args: List[(String, Term.Typ)], props: List[Term.Term]) + { + def cache(cache: Term.Cache): Fact = + Fact(entity.cache(cache), + typargs.map({ case (name, sort) => (cache.string(name), cache.sort(sort)) }), + args.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }), + props.map(cache.term(_))) + } def read_facts(db: SQL.Database, session_name: String, theory_name: String): List[Fact] = read_entities(db, session_name, theory_name, "facts", @@ -190,4 +253,59 @@ val (typargs, args, props) = decode_props(body) Fact(entity, typargs, args, props) }) + + + /* type classes */ + + sealed case class Class( + entity: Entity, params: List[(String, Term.Typ)], axioms: List[Term.Term]) + { + def cache(cache: Term.Cache): Class = + Class(entity.cache(cache), + params.map({ case (name, typ) => (cache.string(name), cache.typ(typ)) }), + axioms.map(cache.term(_))) + } + + def read_classes(db: SQL.Database, session_name: String, theory_name: String): List[Class] = + read_entities(db, session_name, theory_name, "classes", + (tree: XML.Tree) => + { + val (entity, body) = decode_entity(tree) + val (params, axioms) = + { + import XML.Decode._ + import Term_XML.Decode._ + pair(list(pair(string, typ)), list(term))(body) + } + Class(entity, params, axioms) + }) + + + /* HOL typedefs */ + + sealed case class Typedef(name: String, + rep_type: Term.Typ, abs_type: Term.Typ, rep_name: String, abs_name: String, axiom_name: String) + { + def cache(cache: Term.Cache): Typedef = + Typedef(cache.string(name), + cache.typ(rep_type), + cache.typ(abs_type), + cache.string(rep_name), + cache.string(abs_name), + cache.string(axiom_name)) + } + + def read_typedefs(db: SQL.Database, session_name: String, theory_name: String): List[Typedef] = + read_export(db, session_name, theory_name, "typedefs", + (body: XML.Body) => + { + val typedefs = + { + import XML.Decode._ + import Term_XML.Decode._ + list(pair(string, pair(typ, pair(typ, pair(string, pair(string, string))))))(body) + } + for { (name, (rep_type, (abs_type, (rep_name, (abs_name, axiom_name))))) <- typedefs } + yield Typedef(name, rep_type, abs_type, rep_name, abs_name, axiom_name) + }) }
--- a/src/Pure/build-jars Thu May 24 17:06:39 2018 +0200 +++ b/src/Pure/build-jars Thu May 24 22:28:26 2018 +0200 @@ -41,6 +41,7 @@ GUI/wrap_panel.scala General/antiquote.scala General/bytes.scala + General/cache.scala General/codepoint.scala General/comment.scala General/completion.scala
--- a/src/Pure/term.scala Thu May 24 17:06:39 2018 +0200 +++ b/src/Pure/term.scala Thu May 24 22:28:26 2018 +0200 @@ -29,5 +29,63 @@ case class Bound(index: Int) extends Term case class Abs(name: String, typ: Typ = dummyT, body: Term) extends Term case class App(fun: Term, arg: Term) extends Term + + + + /** cache **/ + + def make_cache(initial_size: Int = 131071, max_string: Int = Integer.MAX_VALUE): Cache = + new Cache(initial_size, max_string) + + class Cache private[Term](initial_size: Int, max_string: Int) + extends isabelle.Cache(initial_size, max_string) + { + protected def cache_indexname(x: Indexname): Indexname = + lookup(x) getOrElse store(cache_string(x._1), cache_int(x._2)) + + protected def cache_sort(x: Sort): Sort = + if (x == dummyS) dummyS + else lookup(x) getOrElse store(x.map(cache_string(_))) + + protected def cache_typ(x: Typ): Typ = + { + if (x == dummyT) dummyT + else + lookup(x) match { + case Some(y) => y + case None => + x match { + case Type(name, args) => store(Type(cache_string(name), args.map(cache_typ(_)))) + case TFree(name, sort) => store(TFree(cache_string(name), cache_sort(sort))) + case TVar(name, sort) => store(TVar(cache_indexname(name), cache_sort(sort))) + } + } + } + + protected def cache_term(x: Term): Term = + { + lookup(x) match { + case Some(y) => y + case None => + x match { + case Const(name, typ) => store(Const(cache_string(name), cache_typ(typ))) + case Free(name, typ) => store(Free(cache_string(name), cache_typ(typ))) + case Var(name, typ) => store(Var(cache_indexname(name), cache_typ(typ))) + case Bound(index) => store(Bound(cache_int(index))) + case Abs(name, typ, body) => + store(Abs(cache_string(name), cache_typ(typ), cache_term(body))) + case App(fun, arg) => store(App(cache_term(fun), cache_term(arg))) + } + } + } + + // main methods + def indexname(x: Indexname): Indexname = synchronized { cache_indexname(x) } + def sort(x: Sort): Sort = synchronized { cache_sort(x) } + def typ(x: Typ): Typ = synchronized { cache_typ(x) } + def term(x: Term): Term = synchronized { cache_term(x) } + + def position(x: Position.T): Position.T = + synchronized { x.map({ case (a, b) => (cache_string(a), cache_string(b)) }) } + } } -