# HG changeset patch # User wenzelm # Date 1492799780 -7200 # Node ID c09c11386ca536b79936aab91881ebb132087ee8 # Parent 03efd17e083b643432c8bd4bf5eff8074cb3fdf7# Parent 8369b33fda0a5418f1afc60bdd84e07b6dc69024 merged diff -r 03efd17e083b -r c09c11386ca5 NEWS --- a/NEWS Thu Apr 20 16:21:29 2017 +0200 +++ b/NEWS Fri Apr 21 20:36:20 2017 +0200 @@ -56,9 +56,6 @@ entry of the specified logic session in the editor, while its parent is used for formal checking. -* Improved support for editing of a complex session hierarchy with -session-qualified theory imports: "isabelle jedit -A". - * The PIDE document model maintains file content independently of the status of jEdit editor buffers. Reloading jEdit buffers no longer causes changes of formal document content. Theory dependencies are always diff -r 03efd17e083b -r c09c11386ca5 etc/settings --- a/etc/settings Thu Apr 20 16:21:29 2017 +0200 +++ b/etc/settings Fri Apr 21 20:36:20 2017 +0200 @@ -12,7 +12,7 @@ ### Isabelle/Scala ### -ISABELLE_SCALAC_OPTIONS="-encoding UTF-8 -nowarn -target:jvm-1.8 -Xmax-classfile-name 130" +ISABELLE_SCALAC_OPTIONS="-encoding UTF-8 -nowarn -target:jvm-1.8 -Xmax-classfile-name 130 -J-Xms128m -J-Xmx1024m -J-Xss2m" ISABELLE_JAVA_SYSTEM_OPTIONS="-server -Dfile.encoding=UTF-8 -Disabelle.threads=0" diff -r 03efd17e083b -r c09c11386ca5 src/Doc/JEdit/JEdit.thy --- a/src/Doc/JEdit/JEdit.thy Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Doc/JEdit/JEdit.thy Fri Apr 21 20:36:20 2017 +0200 @@ -231,7 +231,6 @@ \Usage: isabelle jedit [OPTIONS] [FILES ...] Options are: - -A explore theory imports of all known sessions -D NAME=X set JVM system property -J OPTION add JVM runtime option -R open ROOT entry of logic session and use its parent @@ -258,11 +257,6 @@ The \<^verbatim>\-n\ option bypasses the implicit build process for the selected session image. - Option \<^verbatim>\-A\ explores theory imports of all known sessions (according to the - directories specified via option \<^verbatim>\-d\). This facilitates editing of a - complex session hierarchy with session-qualified theory imports, while using - a different base session image than usual. - Option \<^verbatim>\-R\ modifies the meaning of option \<^verbatim>\-l\ as follows: the \<^verbatim>\ROOT\ entry of the specified session is opened in the editor, while its parent session is used for formal checking. This facilitates maintenance of a diff -r 03efd17e083b -r c09c11386ca5 src/FOLP/ROOT --- a/src/FOLP/ROOT Thu Apr 20 16:21:29 2017 +0200 +++ b/src/FOLP/ROOT Fri Apr 21 20:36:20 2017 +0200 @@ -10,7 +10,9 @@ Presence of unknown proof term means that matching does not behave as expected. *} options [document = false] - theories FOLP + theories + IFOLP (global) + FOLP (global) session "FOLP-ex" in ex = FOLP + description {* diff -r 03efd17e083b -r c09c11386ca5 src/HOL/Analysis/Conformal_Mappings.thy --- a/src/HOL/Analysis/Conformal_Mappings.thy Thu Apr 20 16:21:29 2017 +0200 +++ b/src/HOL/Analysis/Conformal_Mappings.thy Fri Apr 21 20:36:20 2017 +0200 @@ -5,7 +5,7 @@ text\Also Cauchy's residue theorem by Wenda Li (2016)\ theory Conformal_Mappings -imports "Cauchy_Integral_Theorem" +imports Cauchy_Integral_Theorem begin diff -r 03efd17e083b -r c09c11386ca5 src/HOL/Auth/Auth_Public.thy --- a/src/HOL/Auth/Auth_Public.thy Thu Apr 20 16:21:29 2017 +0200 +++ b/src/HOL/Auth/Auth_Public.thy Fri Apr 21 20:36:20 2017 +0200 @@ -6,10 +6,10 @@ theory Auth_Public imports - "NS_Public_Bad" - "NS_Public" - "TLS" - "CertifiedEmail" + NS_Public_Bad + NS_Public + TLS + CertifiedEmail begin end diff -r 03efd17e083b -r c09c11386ca5 src/HOL/Auth/Auth_Shared.thy --- a/src/HOL/Auth/Auth_Shared.thy Thu Apr 20 16:21:29 2017 +0200 +++ b/src/HOL/Auth/Auth_Shared.thy Fri Apr 21 20:36:20 2017 +0200 @@ -6,22 +6,22 @@ theory Auth_Shared imports - "NS_Shared" - "Kerberos_BAN" - "Kerberos_BAN_Gets" - "KerberosIV" - "KerberosIV_Gets" - "KerberosV" - "OtwayRees" - "OtwayRees_AN" - "OtwayRees_Bad" - "OtwayReesBella" - "WooLam" - "Recur" - "Yahalom" - "Yahalom2" - "Yahalom_Bad" - "ZhouGollmann" + NS_Shared + Kerberos_BAN + Kerberos_BAN_Gets + KerberosIV + KerberosIV_Gets + KerberosV + OtwayRees + OtwayRees_AN + OtwayRees_Bad + OtwayReesBella + WooLam + Recur + Yahalom + Yahalom2 + Yahalom_Bad + ZhouGollmann begin end diff -r 03efd17e083b -r c09c11386ca5 src/HOL/Auth/Guard/Auth_Guard_Public.thy --- a/src/HOL/Auth/Guard/Auth_Guard_Public.thy Thu Apr 20 16:21:29 2017 +0200 +++ b/src/HOL/Auth/Guard/Auth_Guard_Public.thy Fri Apr 21 20:36:20 2017 +0200 @@ -6,10 +6,10 @@ theory Auth_Guard_Public imports - "P1" - "P2" - "Guard_NS_Public" - "Proto" + P1 + P2 + Guard_NS_Public + Proto begin end diff -r 03efd17e083b -r c09c11386ca5 src/HOL/Auth/Guard/Auth_Guard_Shared.thy --- a/src/HOL/Auth/Guard/Auth_Guard_Shared.thy Thu Apr 20 16:21:29 2017 +0200 +++ b/src/HOL/Auth/Guard/Auth_Guard_Shared.thy Fri Apr 21 20:36:20 2017 +0200 @@ -6,8 +6,8 @@ theory Auth_Guard_Shared imports - "Guard_OtwayRees" - "Guard_Yahalom" + Guard_OtwayRees + Guard_Yahalom begin end diff -r 03efd17e083b -r c09c11386ca5 src/HOL/Auth/Smartcard/Auth_Smartcard.thy --- a/src/HOL/Auth/Smartcard/Auth_Smartcard.thy Thu Apr 20 16:21:29 2017 +0200 +++ b/src/HOL/Auth/Smartcard/Auth_Smartcard.thy Fri Apr 21 20:36:20 2017 +0200 @@ -6,8 +6,8 @@ theory Auth_Smartcard imports - "ShoupRubin" - "ShoupRubinBella" + ShoupRubin + ShoupRubinBella begin end diff -r 03efd17e083b -r c09c11386ca5 src/HOL/Corec_Examples/Paper_Examples.thy --- a/src/HOL/Corec_Examples/Paper_Examples.thy Thu Apr 20 16:21:29 2017 +0200 +++ b/src/HOL/Corec_Examples/Paper_Examples.thy Fri Apr 21 20:36:20 2017 +0200 @@ -9,7 +9,7 @@ section \Small Examples from the Paper ``Friends with Benefits''\ theory Paper_Examples -imports "~~/src/HOL/Library/BNF_Corec" "~~/src/HOL/Library/FSet" "Complex_Main" +imports "~~/src/HOL/Library/BNF_Corec" "~~/src/HOL/Library/FSet" Complex_Main begin section \Examples from the introduction\ diff -r 03efd17e083b -r c09c11386ca5 src/HOL/Library/code_test.ML --- a/src/HOL/Library/code_test.ML Thu Apr 20 16:21:29 2017 +0200 +++ b/src/HOL/Library/code_test.ML Fri Apr 21 20:36:20 2017 +0200 @@ -551,11 +551,11 @@ "}\n" val compile_cmd = - "\"$SCALA_HOME/bin/scalac\" $ISABELLE_SCALAC_OPTIONS -d " ^ File.bash_path path ^ + "isabelle_scala scalac $ISABELLE_SCALAC_OPTIONS -d " ^ File.bash_path path ^ " -classpath " ^ File.bash_path path ^ " " ^ File.bash_path code_path ^ " " ^ File.bash_path driver_path - val run_cmd = "\"$SCALA_HOME/bin/scala\" -cp " ^ File.bash_path path ^ " Test" + val run_cmd = "isabelle_scala scala -cp " ^ File.bash_path path ^ " Test" in {files = [(driver_path, driver)], compile_cmd = SOME compile_cmd, run_cmd = run_cmd, mk_code_file = K code_path} diff -r 03efd17e083b -r c09c11386ca5 src/HOL/MicroJava/Comp/CorrComp.thy --- a/src/HOL/MicroJava/Comp/CorrComp.thy Thu Apr 20 16:21:29 2017 +0200 +++ b/src/HOL/MicroJava/Comp/CorrComp.thy Fri Apr 21 20:36:20 2017 +0200 @@ -5,7 +5,7 @@ (* Compiler correctness statement and proof *) theory CorrComp -imports "../J/JTypeSafe" "LemmasComp" +imports "../J/JTypeSafe" LemmasComp begin declare wf_prog_ws_prog [simp add] diff -r 03efd17e083b -r c09c11386ca5 src/HOL/Proofs/Lambda/WeakNorm.thy --- a/src/HOL/Proofs/Lambda/WeakNorm.thy Thu Apr 20 16:21:29 2017 +0200 +++ b/src/HOL/Proofs/Lambda/WeakNorm.thy Fri Apr 21 20:36:20 2017 +0200 @@ -6,8 +6,7 @@ section \Weak normalization for simply-typed lambda calculus\ theory WeakNorm -imports LambdaType NormalForm "~~/src/HOL/Library/Old_Datatype" - "~~/src/HOL/Library/Code_Target_Int" +imports LambdaType NormalForm "HOL-Library.Old_Datatype" "HOL-Library.Code_Target_Int" begin text \ diff -r 03efd17e083b -r c09c11386ca5 src/HOL/ROOT --- a/src/HOL/ROOT Thu Apr 20 16:21:29 2017 +0200 +++ b/src/HOL/ROOT Fri Apr 21 20:36:20 2017 +0200 @@ -20,8 +20,12 @@ *} options [document = false, theory_qualifier = "HOL", quick_and_dirty = false, record_proofs = 2, parallel_proofs = 0] - sessions "HOL-Library" - theories "HOL-Library.Old_Datatype" + sessions + "HOL-Library" + theories + GCD + Binomial + "HOL-Library.Old_Datatype" files "Tools/Quickcheck/Narrowing_Engine.hs" "Tools/Quickcheck/PNF_Narrowing_Engine.hs" @@ -59,6 +63,9 @@ document_files "root.bib" "root.tex" session "HOL-Analysis" (main timing) in Analysis = HOL + + sessions + "HOL-Library" + "HOL-Computational_Algebra" theories Analysis document_files @@ -70,6 +77,8 @@ Circle_Area session "HOL-Computational_Algebra" in "Computational_Algebra" = HOL + + sessions + "HOL-Library" theories Computational_Algebra (*conflicting type class instantiations and dependent applications*) @@ -94,7 +103,10 @@ subspaces (not only one-dimensional subspaces), can be extended to a continous linearform on the whole vectorspace. *} - theories Hahn_Banach + sessions + "HOL-Library" + theories + Hahn_Banach document_files "root.bib" "root.tex" session "HOL-Induct" in Induct = HOL + @@ -114,6 +126,8 @@ Exp demonstrates the use of iterated inductive definitions to reason about mutually recursive relations. *} + sessions + "HOL-Library" theories [document = false] "~~/src/HOL/Library/Old_Datatype" theories [quick_and_dirty] @@ -190,7 +204,7 @@ session "HOL-Data_Structures" (timing) in Data_Structures = HOL + options [document_variants = document] theories [document = false] - "Less_False" + Less_False "~~/src/HOL/Library/Multiset" "~~/src/HOL/Number_Theory/Fib" theories @@ -215,6 +229,10 @@ Fundamental Theorem of Arithmetic, Chinese Remainder Theorem, Fermat/Euler Theorem, Wilson's Theorem, some lemmas for Quadratic Reciprocity. *} + sessions + "HOL-Library" + "HOL-Algebra" + "HOL-Computational_Algebra" theories [document = false] "~~/src/HOL/Library/FuncSet" "~~/src/HOL/Library/Multiset" @@ -243,6 +261,9 @@ session "HOL-Codegenerator_Test" in Codegenerator_Test = "HOL-Library" + options [document = false, browser_info = false] + sessions + "HOL-Computational_Algebra" + "HOL-Number_Theory" theories Generate Generate_Binary_Nat @@ -303,6 +324,9 @@ The Isabelle Algebraic Library. *} + sessions + "HOL-Library" + "HOL-Computational_Algebra" theories [document = false] (* Preliminaries from set and number theory *) "~~/src/HOL/Library/FuncSet" @@ -348,7 +372,7 @@ *} theories (*Basic meta-theory*) - "UNITY_Main" + UNITY_Main (*Simple examples: no composition*) "Simple/Deadlock" @@ -380,7 +404,7 @@ "Comp/Client" (*obsolete*) - "ELT" + ELT document_files "root.tex" session "HOL-Unix" in Unix = HOL + @@ -394,6 +418,8 @@ session "HOL-Imperative_HOL" in Imperative_HOL = HOL + options [print_mode = "iff,no_brackets"] + sessions + "HOL-Library" theories [document = false] "~~/src/HOL/Library/Countable" "~~/src/HOL/Library/Monad_Syntax" @@ -406,7 +432,11 @@ Various decision procedures, typically involving reflection. *} options [document = false] - theories Decision_Procs + sessions + "HOL-Library" + "HOL-Algebra" + theories + Decision_Procs session "HOL-Proofs-ex" in "Proofs/ex" = "HOL-Proofs" + options [document = false] @@ -420,6 +450,9 @@ Examples for program extraction in Higher-Order Logic. *} options [parallel_proofs = 0, quick_and_dirty = false] + sessions + "HOL-Library" + "HOL-Computational_Algebra" theories [document = false] "~~/src/HOL/Library/Code_Target_Numeral" "~~/src/HOL/Library/Monad_Syntax" @@ -445,8 +478,8 @@ *} options [print_mode = "no_brackets", parallel_proofs = 0, quick_and_dirty = false] - theories [document = false] - "~~/src/HOL/Library/Code_Target_Int" + sessions + "HOL-Library" theories Eta StrongNorm @@ -473,6 +506,8 @@ machine and a specification of its bytecode verifier and a lightweight bytecode verifier, including proofs of type-safety. *} + sessions + "HOL-Library" theories [document = false] "~~/src/HOL/Library/While_Combinator" theories @@ -542,6 +577,9 @@ description {* Miscellaneous examples for Higher-Order Logic. *} + sessions + "HOL-Library" + "HOL-Number_Theory" theories [document = false] "~~/src/HOL/Library/State_Monad" Code_Binary_Nat_examples @@ -638,6 +676,9 @@ Miscellaneous Isabelle/Isar examples. *} options [quick_and_dirty] + sessions + "HOL-Library" + "HOL-Computational_Algebra" theories [document = false] "~~/src/HOL/Library/Lattice_Syntax" "../Computational_Algebra/Primes" @@ -677,8 +718,12 @@ description {* Verification of the SET Protocol. *} - theories [document = false] "~~/src/HOL/Library/Nat_Bijection" - theories SET_Protocol + sessions + "HOL-Library" + theories [document = false] + "~~/src/HOL/Library/Nat_Bijection" + theories + SET_Protocol document_files "root.tex" session "HOL-Matrix_LP" in Matrix_LP = HOL + @@ -726,6 +771,8 @@ ATP_Problem_Import session "HOL-Probability" (main timing) in "Probability" = "HOL-Analysis" + + sessions + "HOL-Library" theories [document = false] "~~/src/HOL/Library/Countable" "~~/src/HOL/Library/Permutation" @@ -738,9 +785,9 @@ session "HOL-Probability-ex" (timing) in "Probability/ex" = "HOL-Probability" + theories - "Dining_Cryptographers" - "Koepf_Duermuth_Countermeasure" - "Measure_Not_CCC" + Dining_Cryptographers + Koepf_Duermuth_Countermeasure + Measure_Not_CCC session "HOL-Nominal" in Nominal = HOL + options [document = false] @@ -852,7 +899,10 @@ session "HOL-Nonstandard_Analysis-Examples" (timing) in "Nonstandard_Analysis/Examples" = "HOL-Nonstandard_Analysis" + options [document = false] - theories NSPrimes + sessions + "HOL-Computational_Algebra" + theories + NSPrimes session "HOL-Mirabelle" in Mirabelle = HOL + options [document = false] @@ -989,6 +1039,8 @@ Author: Cezary Kaliszyk and Christian Urban *} options [document = false] + sessions + "HOL-Library" theories DList Quotient_FSet @@ -1042,6 +1094,8 @@ HOLCF -- a semantic extension of HOL by the LCF logic. *} + sessions + "HOL-Library" theories [document = false] "~~/src/HOL/Library/Nat_Bijection" "~~/src/HOL/Library/Countable" @@ -1121,7 +1175,7 @@ finite and infinite sequences. *} options [document = false] - theories "Abstraction" + theories Abstraction session "IOA-ABP" in "HOLCF/IOA/ABP" = IOA + description {* diff -r 03efd17e083b -r c09c11386ca5 src/Pure/Admin/build_doc.scala --- a/src/Pure/Admin/build_doc.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/Admin/build_doc.scala Fri Apr 21 20:36:20 2017 +0200 @@ -24,11 +24,11 @@ { val selection = for { - (name, info) <- Sessions.load(options).build_topological_order + info <- Sessions.load(options).build_topological_order if info.groups.contains("doc") doc = info.options.string("document_variants") if all_docs || docs.contains(doc) - } yield (doc, name) + } yield (doc, info.name) val selected_docs = selection.map(_._1) val sessions = selection.map(_._2) diff -r 03efd17e083b -r c09c11386ca5 src/Pure/General/symbol.scala --- a/src/Pure/General/symbol.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/General/symbol.scala Fri Apr 21 20:36:20 2017 +0200 @@ -216,7 +216,6 @@ { case (List(a), Nil) => File(a) })) } - def apply(text: CharSequence): Text_Chunk = new Text_Chunk(Text.Range(0, text.length), Index(text)) } diff -r 03efd17e083b -r c09c11386ca5 src/Pure/Isar/token.scala --- a/src/Pure/Isar/token.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/Isar/token.scala Fri Apr 21 20:36:20 2017 +0200 @@ -152,6 +152,19 @@ val newline: Token = explode(Keyword.Keywords.empty, "\n").head + /* names */ + + def read_name(keywords: Keyword.Keywords, inp: CharSequence): Option[Token] = + explode(keywords, inp) match { + case List(tok) if tok.is_name => Some(tok) + case _ => None + } + + def quote_name(keywords: Keyword.Keywords, name: String): String = + if (read_name(keywords, name).isDefined) name + else quote(name.replace("\"", "\\\"")) + + /* implode */ def implode(toks: List[Token]): String = diff -r 03efd17e083b -r c09c11386ca5 src/Pure/ML/ml_process.scala --- a/src/Pure/ML/ml_process.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/ML/ml_process.scala Fri Apr 21 20:36:20 2017 +0200 @@ -99,8 +99,8 @@ ML_Syntax.print_list( ML_Syntax.print_pair( ML_Syntax.print_string, ML_Syntax.print_string))(table) - List("Resources.init_session_base {default_qualifier = \"\"" + - ", global_theories = " + print_table(base.global_theories.toList) + + List("Resources.init_session_base" + + " {global_theories = " + print_table(base.global_theories.toList) + ", loaded_theories = " + print_table(base.loaded_theories.toList) + ", known_theories = " + print_table(base.dest_known_theories) + "}") } diff -r 03efd17e083b -r c09c11386ca5 src/Pure/PIDE/command.scala --- a/src/Pure/PIDE/command.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/PIDE/command.scala Fri Apr 21 20:36:20 2017 +0200 @@ -520,7 +520,7 @@ Text.Range(0, (length /: span.content.reverse.iterator.takeWhile(_.is_improper))(_ - _.source.length)) - def source(range: Text.Range): String = source.substring(range.start, range.stop) + def source(range: Text.Range): String = range.substring(source) /* accumulated results */ diff -r 03efd17e083b -r c09c11386ca5 src/Pure/PIDE/line.scala --- a/src/Pure/PIDE/line.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/PIDE/line.scala Fri Apr 21 20:36:20 2017 +0200 @@ -123,8 +123,7 @@ lazy val text: String = Document.text(lines) def try_get_text(range: Text.Range): Option[String] = - if (text_range.contains(range)) Some(text.substring(range.start, range.stop)) - else None + if (text_range.contains(range)) Some(range.substring(text)) else None override def toString: String = text diff -r 03efd17e083b -r c09c11386ca5 src/Pure/PIDE/protocol.ML --- a/src/Pure/PIDE/protocol.ML Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/PIDE/protocol.ML Fri Apr 21 20:36:20 2017 +0200 @@ -19,13 +19,12 @@ val _ = Isabelle_Process.protocol_command "Prover.session_base" - (fn [default_qualifier, global_theories_yxml, loaded_theories_yxml, known_theories_yxml] => + (fn [global_theories_yxml, loaded_theories_yxml, known_theories_yxml] => let val decode_table = YXML.parse_body #> let open XML.Decode in list (pair string string) end; in Resources.init_session_base - {default_qualifier = default_qualifier, - global_theories = decode_table global_theories_yxml, + {global_theories = decode_table global_theories_yxml, loaded_theories = decode_table loaded_theories_yxml, known_theories = decode_table known_theories_yxml} end); diff -r 03efd17e083b -r c09c11386ca5 src/Pure/PIDE/protocol.scala --- a/src/Pure/PIDE/protocol.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/PIDE/protocol.scala Fri Apr 21 20:36:20 2017 +0200 @@ -312,7 +312,6 @@ def session_base(resources: Resources): Unit = protocol_command("Prover.session_base", - Symbol.encode(resources.default_qualifier), encode_table(resources.session_base.global_theories.toList), encode_table(resources.session_base.loaded_theories.toList), encode_table(resources.session_base.dest_known_theories)) diff -r 03efd17e083b -r c09c11386ca5 src/Pure/PIDE/resources.ML --- a/src/Pure/PIDE/resources.ML Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/PIDE/resources.ML Fri Apr 21 20:36:20 2017 +0200 @@ -6,13 +6,12 @@ signature RESOURCES = sig + val default_qualifier: string val init_session_base: - {default_qualifier: string, - global_theories: (string * string) list, + {global_theories: (string * string) list, loaded_theories: (string * string) list, known_theories: (string * string) list} -> unit val finish_session_base: unit -> unit - val default_qualifier: unit -> string val global_theory: string -> string option val loaded_theory: string -> string option val known_theory: string -> Path.T option @@ -37,36 +36,32 @@ (* session base *) +val default_qualifier = "Draft"; + val empty_session_base = - {default_qualifier = "Draft", - global_theories = Symtab.empty: string Symtab.table, + {global_theories = Symtab.empty: string Symtab.table, loaded_theories = Symtab.empty: string Symtab.table, known_theories = Symtab.empty: Path.T Symtab.table}; val global_session_base = Synchronized.var "Sessions.base" empty_session_base; -fun init_session_base {default_qualifier, global_theories, loaded_theories, known_theories} = +fun init_session_base {global_theories, loaded_theories, known_theories} = Synchronized.change global_session_base (fn _ => - {default_qualifier = - if default_qualifier <> "" then default_qualifier - else #default_qualifier empty_session_base, - global_theories = Symtab.make global_theories, + {global_theories = Symtab.make global_theories, loaded_theories = Symtab.make loaded_theories, known_theories = Symtab.make (map (apsnd Path.explode) known_theories)}); fun finish_session_base () = Synchronized.change global_session_base (fn {global_theories, loaded_theories, ...} => - {default_qualifier = #default_qualifier empty_session_base, - global_theories = global_theories, + {global_theories = global_theories, loaded_theories = loaded_theories, known_theories = #known_theories empty_session_base}); fun get_session_base f = f (Synchronized.value global_session_base); -fun default_qualifier () = get_session_base #default_qualifier; fun global_theory a = Symtab.lookup (get_session_base #global_theories) a; fun loaded_theory a = Symtab.lookup (get_session_base #loaded_theories) a; fun known_theory a = Symtab.lookup (get_session_base #known_theories) a; diff -r 03efd17e083b -r c09c11386ca5 src/Pure/PIDE/resources.scala --- a/src/Pure/PIDE/resources.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/PIDE/resources.scala Fri Apr 21 20:36:20 2017 +0200 @@ -15,7 +15,6 @@ class Resources( val session_base: Sessions.Base, - val default_qualifier: String = Sessions.DRAFT, val log: Logger = No_Logger) { val thy_info = new Thy_Info(this) @@ -93,8 +92,8 @@ } } - def import_name(node_name: Document.Node.Name, s: String): Document.Node.Name = - import_name(theory_qualifier(node_name), node_name.master_dir, s) + def import_name(name: Document.Node.Name, s: String): Document.Node.Name = + import_name(theory_qualifier(name), name.master_dir, s) def with_thy_reader[A](name: Document.Node.Name, f: Reader[Char] => A): A = { diff -r 03efd17e083b -r c09c11386ca5 src/Pure/PIDE/text.scala --- a/src/Pure/PIDE/text.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/PIDE/text.scala Fri Apr 21 20:36:20 2017 +0200 @@ -71,6 +71,8 @@ def try_join(that: Range): Option[Range] = if (this apart that) None else Some(Range(this.start min that.start, this.stop max that.stop)) + + def substring(text: String): String = text.substring(start, stop) } diff -r 03efd17e083b -r c09c11386ca5 src/Pure/System/isabelle_tool.scala --- a/src/Pure/System/isabelle_tool.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/System/isabelle_tool.scala Fri Apr 21 20:36:20 2017 +0200 @@ -115,6 +115,7 @@ Remote_DMG.isabelle_tool, Update_Cartouches.isabelle_tool, Update_Header.isabelle_tool, + Update_Imports.isabelle_tool, Update_Then.isabelle_tool, Update_Theorems.isabelle_tool, isabelle.vscode.Build_VSCode.isabelle_tool, diff -r 03efd17e083b -r c09c11386ca5 src/Pure/Thy/sessions.scala --- a/src/Pure/Thy/sessions.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/Thy/sessions.scala Fri Apr 21 20:36:20 2017 +0200 @@ -81,6 +81,9 @@ copy(theories = for ((a, b) <- theories) yield (a, b.map(File.platform_path(_))), theories_local = for ((a, b) <- theories_local) yield (a, b.map(File.platform_path(_))), files = for ((a, b) <- files) yield (a, b.map(c => c.map(File.platform_path(_))))) + + def get_file(file: JFile): Option[Document.Node.Name] = + files.getOrElse(file.getCanonicalFile, Nil).headOption } object Base @@ -95,6 +98,7 @@ } sealed case class Base( + imports: Option[Base] = None, global_theories: Map[String, String] = Map.empty, loaded_theories: Map[String, String] = Map.empty, known: Known = Known.empty, @@ -103,6 +107,8 @@ sources: List[(Path, SHA1.Digest)] = Nil, session_graph: Graph_Display.Graph = Graph_Display.empty_graph) { + def get_imports: Base = imports getOrElse Base.bootstrap(global_theories) + def platform_path: Base = copy(known = known.platform_path) def loaded_theory(name: Document.Node.Name): Boolean = @@ -111,9 +117,6 @@ def known_theory(name: String): Option[Document.Node.Name] = known.theories.get(name) - def known_file(file: JFile): Option[Document.Node.Name] = - known.files.getOrElse(file.getCanonicalFile, Nil).headOption - def dest_known_theories: List[(String, String)] = for ((theory, node_name) <- known.theories.toList) yield (theory, node_name.node) @@ -137,7 +140,7 @@ { val session_bases = (Map.empty[String, Base] /: sessions.imports_topological_order)({ - case (session_bases, (session_name, info)) => + case (session_bases, info) => if (progress.stopped) throw Exn.Interrupt() try { @@ -150,22 +153,21 @@ parent_base.copy(known = Known.make(info.dir, parent_base :: info.imports.map(session_bases(_)), Nil)) - val resources = new Resources(imports_base, - default_qualifier = info.theory_qualifier(session_name)) + val resources = new Resources(imports_base) if (verbose || list_files) { val groups = if (info.groups.isEmpty) "" else info.groups.mkString(" (", " ", ")") - progress.echo("Session " + info.chapter + "/" + session_name + groups) + progress.echo("Session " + info.chapter + "/" + info.name + groups) } val thy_deps = { val root_theories = info.theories.flatMap({ case (_, thys) => - thys.map(thy => - (resources.import_name(session_name, info.dir.implode, thy), info.pos)) + thys.map({ case (thy, pos) => + (resources.import_name(info.theory_qualifier, info.dir.implode, thy), pos) }) }) val thy_deps = resources.thy_info.dependencies(root_theories) @@ -181,7 +183,7 @@ val loaded_files = if (inlined_files) { val pure_files = - if (is_pure(session_name)) { + if (is_pure(info.name)) { val roots = Thy_Header.ml_roots.map(p => info.dir + Path.explode(p._1)) val files = roots.flatMap(root => resources.loaded_files(syntax, File.read(root))). @@ -211,10 +213,10 @@ def node(name: Document.Node.Name): Graph_Display.Node = { - val session = resources.theory_qualifier(name) - if (session == session_name) + val qualifier = resources.theory_qualifier(name) + if (qualifier == info.theory_qualifier) Graph_Display.Node(name.theory_base_name, "theory." + name.theory) - else session_node(session) + else session_node(qualifier) } val imports_subgraph = @@ -238,7 +240,8 @@ } val base = - Base(global_theories = global_theories, + Base(imports = Some(imports_base), + global_theories = global_theories, loaded_theories = thy_deps.loaded_theories, known = Known.make(info.dir, List(imports_base), thy_deps.deps.map(_.name)), keywords = thy_deps.keywords, @@ -246,12 +249,12 @@ sources = all_files.map(p => (p, SHA1.digest(p.file))), session_graph = session_graph) - session_bases + (session_name -> base) + session_bases + (info.name -> base) } catch { case ERROR(msg) => cat_error(msg, "The error(s) above occurred in session " + - quote(session_name) + Position.here(info.pos)) + quote(info.name) + Position.here(info.pos)) } }) @@ -283,6 +286,7 @@ /* cumulative session info */ sealed case class Info( + name: String, chapter: String, select: Boolean, pos: Position.T, @@ -292,7 +296,7 @@ description: String, options: Options, imports: List[String], - theories: List[(Options, List[String])], + theories: List[(Options, List[(String, Position.T)])], global_theories: List[String], files: List[Path], document_files: List[(Path, Path)], @@ -300,9 +304,9 @@ { def timeout: Time = Time.seconds(options.real("timeout") * options.real("timeout_scale")) - def theory_qualifier(default_qualifier: String): String = + def theory_qualifier: String = options.string("theory_qualifier") match { - case "" => default_qualifier + case "" => name case qualifier => qualifier } } @@ -310,6 +314,7 @@ object Selection { val empty: Selection = Selection() + val all: Selection = Selection(all_sessions = true) } sealed case class Selection( @@ -416,10 +421,11 @@ def global_theories: Map[String, String] = (Thy_Header.bootstrap_global_theories.toMap /: (for { - (session_name, (info, _)) <- imports_graph.iterator - thy <- info.global_theories.iterator } yield (thy, session_name, info)))({ - case (global, (thy, session_name, info)) => - val qualifier = info.theory_qualifier(session_name) + (_, (info, _)) <- imports_graph.iterator + thy <- info.global_theories.iterator } + yield (thy, info)))({ + case (global, (thy, info)) => + val qualifier = info.theory_qualifier global.get(thy) match { case Some(qualifier1) if qualifier != qualifier1 => error("Duplicate global theory " + quote(thy) + Position.here(info.pos)) @@ -440,11 +446,11 @@ def build_descendants(names: List[String]): List[String] = build_graph.all_succs(names) - def build_topological_order: List[(String, Info)] = - build_graph.topological_order.map(name => (name, apply(name))) + def build_topological_order: List[Info] = + build_graph.topological_order.map(apply(_)) - def imports_topological_order: List[(String, Info)] = - imports_graph.topological_order.map(name => (name, apply(name))) + def imports_topological_order: List[Info] = + imports_graph.topological_order.map(apply(_)) override def toString: String = imports_graph.keys_iterator.mkString("Sessions.T(", ", ", ")") @@ -491,7 +497,7 @@ description: String, options: List[Options.Spec], imports: List[String], - theories: List[(List[Options.Spec], List[(String, Boolean)])], + theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])], files: List[String], document_files: List[(String, String)]) extends Entry @@ -515,7 +521,7 @@ ($$$("(") ~! $$$(GLOBAL) ~ $$$(")")) ^^ { case _ => true } | success(false) val theory_entry = - theory_name ~ global ^^ { case x ~ y => (x, y) } + position(theory_name) ~ global ^^ { case x ~ y => (x, y) } val theories = $$$(THEORIES) ~! @@ -561,11 +567,12 @@ entry.theories.map({ case (opts, thys) => (session_options ++ opts, thys.map(_._1)) }) val global_theories = - for { (_, thys) <- entry.theories; (thy, global) <- thys if global } + for { (_, thys) <- entry.theories; ((thy, pos), global) <- thys if global } yield { val thy_name = Path.explode(thy).expand.base.implode if (Long_Name.is_qualified(thy_name)) - error("Bad qualified name for global theory " + quote(thy_name)) + error("Bad qualified name for global theory " + + quote(thy_name) + Position.here(pos)) else thy_name } @@ -578,9 +585,9 @@ entry.imports, entry.theories, entry.files, entry.document_files).toString) val info = - Info(entry_chapter, select, entry.pos, entry.groups, dir + Path.explode(entry.path), - entry.parent, entry.description, session_options, entry.imports, theories, - global_theories, files, document_files, meta_digest) + Info(name, entry_chapter, select, entry.pos, entry.groups, + dir + Path.explode(entry.path), entry.parent, entry.description, session_options, + entry.imports, theories, global_theories, files, document_files, meta_digest) (name, info) } diff -r 03efd17e083b -r c09c11386ca5 src/Pure/Thy/thy_header.scala --- a/src/Pure/Thy/thy_header.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/Thy/thy_header.scala Fri Apr 21 20:36:20 2017 +0200 @@ -81,6 +81,9 @@ private val Thy_File_Name = new Regex(""".*?([^/\\:]+)\.thy""") private val Import_Name = new Regex(""".*?([^/\\:]+)""") + def is_base_name(s: String): Boolean = + s != "" && !s.exists("/\\:".contains(_)) + def import_name(s: String): String = s match { case Import_Name(name) => name case _ => error("Malformed import: " + quote(s)) } @@ -153,7 +156,7 @@ /* read -- lazy scanning */ - def read(reader: Reader[Char], start: Token.Pos, strict: Boolean = true): Thy_Header = + private def read_tokens(reader: Reader[Char], strict: Boolean): (List[Token], List[Token]) = { val token = Token.Parsers.token(bootstrap_keywords) def make_tokens(in: Reader[Char]): Stream[Token] = @@ -162,14 +165,30 @@ case _ => Stream.empty } - val tokens = - if (strict) make_tokens(reader) - else make_tokens(reader).dropWhile(tok => !tok.is_command(Thy_Header.THEORY)) + val all_tokens = make_tokens(reader) + val drop_tokens = + if (strict) Nil + else all_tokens.takeWhile(tok => !tok.is_command(Thy_Header.THEORY)).toList + val tokens = all_tokens.drop(drop_tokens.length) val tokens1 = tokens.takeWhile(tok => !tok.is_begin).toList val tokens2 = tokens.dropWhile(tok => !tok.is_begin).headOption.toList - parse(commit(header), Token.reader(tokens1 ::: tokens2, start)) match { + (drop_tokens, tokens1 ::: tokens2) + } + + def read(reader: Reader[Char], start: Token.Pos, strict: Boolean = true): Thy_Header = + { + val (_, tokens0) = read_tokens(reader, true) + val text = + if (reader.isInstanceOf[Scan.Byte_Reader]) + UTF8.decode_permissive(Token.implode(tokens0)) + else Token.implode(tokens0) + + val (drop_tokens, tokens) = read_tokens(Scan.char_reader(text), strict) + val pos = (start /: drop_tokens)(_.advance(_)) + + parse(commit(header), Token.reader(tokens, pos)) match { case Success(result, _) => result case bad => error(bad.toString) } diff -r 03efd17e083b -r c09c11386ca5 src/Pure/Thy/thy_info.ML --- a/src/Pure/Thy/thy_info.ML Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/Thy/thy_info.ML Fri Apr 21 20:36:20 2017 +0200 @@ -408,7 +408,7 @@ fun use_thy name = use_theories {document = false, symbols = HTML.no_symbols, last_timing = K Time.zeroTime, - qualifier = Resources.default_qualifier (), master_dir = Path.current} + qualifier = Resources.default_qualifier, master_dir = Path.current} [(name, Position.none)]; diff -r 03efd17e083b -r c09c11386ca5 src/Pure/Tools/build.ML --- a/src/Pure/Tools/build.ML Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/Tools/build.ML Fri Apr 21 20:36:20 2017 +0200 @@ -153,13 +153,14 @@ fun decode_args yxml = let open XML.Decode; + val position = Position.of_properties o properties; val (symbol_codes, (command_timings, (do_output, (verbose, (browser_info, (document_files, (graph_file, (parent_name, (chapter, (name, (master_dir, (theories, (global_theories, (loaded_theories, known_theories)))))))))))))) = pair (list (pair string int)) (pair (list properties) (pair bool (pair bool (pair string (pair (list (pair string string)) (pair string (pair string (pair string (pair string (pair string - (pair (((list (pair Options.decode (list (string #> rpair Position.none)))))) + (pair (((list (pair Options.decode (list (pair string position)))))) (pair (list (pair string string)) (pair (list (pair string string)) (list (pair string string))))))))))))))) (YXML.parse_body yxml); @@ -181,8 +182,7 @@ val _ = Resources.init_session_base - {default_qualifier = name, - global_theories = global_theories, + {global_theories = global_theories, loaded_theories = loaded_theories, known_theories = known_theories}; diff -r 03efd17e083b -r c09c11386ca5 src/Pure/Tools/build.scala --- a/src/Pure/Tools/build.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/Tools/build.scala Fri Apr 21 20:36:20 2017 +0200 @@ -203,7 +203,7 @@ pair(list(pair(string, int)), pair(list(properties), pair(bool, pair(bool, pair(Path.encode, pair(list(pair(Path.encode, Path.encode)), pair(string, pair(string, pair(string, pair(string, pair(Path.encode, - pair(list(pair(Options.encode, list(string))), + pair(list(pair(Options.encode, list(pair(string, properties)))), pair(list(pair(string, string)), pair(list(pair(string, string)), list(pair(string, string))))))))))))))))( (Symbol.codes, (command_timings, (do_output, (verbose, @@ -223,8 +223,7 @@ ML_Syntax.print_string0(File.platform_path(output)) if (pide && !Sessions.is_pure(name)) { - val resources = - new Resources(deps(parent), default_qualifier = info.theory_qualifier(name)) + val resources = new Resources(deps(parent)) val session = new Session(options, resources) val handler = new Handler(progress, session, name) session.init_protocol_handler(handler) diff -r 03efd17e083b -r c09c11386ca5 src/Pure/Tools/update_imports.scala --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/Tools/update_imports.scala Fri Apr 21 20:36:20 2017 +0200 @@ -0,0 +1,206 @@ +/* Title: Pure/Tools/update_imports.scala + Author: Makarius + +Update theory imports to use session qualifiers. +*/ + +package isabelle + + +import java.io.{File => JFile} + + +object Update_Imports +{ + /* update imports */ + + sealed case class Update(range: Text.Range, old_text: String, new_text: String) + { + def edits: List[Text.Edit] = + Text.Edit.replace(range.start, old_text, new_text) + + override def toString: String = + range.toString + ": " + old_text + " -> " + new_text + } + + def update_name(keywords: Keyword.Keywords, pos: Position.T, update: String => String) + : Option[(JFile, Update)] = + { + val file = + pos match { + case Position.File(file) => Path.explode(file).file.getCanonicalFile + case _ => error("Missing file in position" + Position.here(pos)) + } + + val text = File.read(file) + + val range = + pos match { + case Position.Range(symbol_range) => Symbol.Text_Chunk(text).decode(symbol_range) + case _ => error("Missing range in position" + Position.here(pos)) + } + + Token.read_name(keywords, range.substring(text)) match { + case Some(tok) => + val s1 = tok.source + val s2 = Token.quote_name(keywords, update(tok.content)) + if (s1 == s2) None else Some((file, Update(range, s1, s2))) + case None => error("Name token expected" + Position.here(pos)) + } + } + + def update_imports( + options: Options, + progress: Progress = No_Progress, + selection: Sessions.Selection = Sessions.Selection.empty, + dirs: List[Path] = Nil, + select_dirs: List[Path] = Nil, + verbose: Boolean = false): List[(JFile, Update)] = + { + val full_sessions = Sessions.load(options, dirs, select_dirs) + val (selected, selected_sessions) = full_sessions.selection(selection) + + val deps = + Sessions.deps(selected_sessions, progress = progress, verbose = verbose, + global_theories = full_sessions.global_theories) + + selected.flatMap(session_name => + { + val info = full_sessions(session_name) + val session_base = deps(session_name) + val session_resources = new Resources(session_base) + val imports_resources = new Resources(session_base.get_imports) + + val local_theories = + (for { + (_, name) <- session_base.known.theories_local.iterator + if session_resources.theory_qualifier(name) == info.theory_qualifier + } yield name).toSet + + def standard_import(qualifier: String, dir: String, s: String): String = + { + val name = imports_resources.import_name(qualifier, dir, s) + val file = Path.explode(name.node).file + val s1 = + imports_resources.session_base.known.get_file(file) match { + case Some(name1) if session_resources.theory_qualifier(name1) != qualifier => + name1.theory + case Some(name1) if Thy_Header.is_base_name(s) => + name1.theory_base_name + case _ => s + } + val name2 = imports_resources.import_name(qualifier, dir, s1) + if (name.node == name2.node) s1 else s + } + + val updates_root = + for { + (_, pos) <- info.theories.flatMap(_._2) + upd <- update_name(Sessions.root_syntax.keywords, pos, + standard_import(info.theory_qualifier, info.dir.implode, _)) + } yield upd + + val updates_theories = + for { + (_, name) <- session_base.known.theories_local.toList + (_, pos) <- session_resources.check_thy(name, Token.Pos.file(name.node)).imports + upd <- update_name(session_base.syntax.keywords, pos, + standard_import(session_resources.theory_qualifier(name), name.master_dir, _)) + } yield upd + + updates_root ::: updates_theories + }) + } + + def apply_updates(updates: List[(JFile, Update)]) + { + val file_updates = (Multi_Map.empty[JFile, Update] /: updates)(_ + _) + val conflicts = + file_updates.iterator_list.flatMap({ case (file, upds) => + Library.duplicates(upds.sortBy(_.range.start), + (x: Update, y: Update) => x.range overlaps y.range) match + { + case Nil => None + case bad => Some((file, bad)) + } + }) + if (conflicts.nonEmpty) + error(cat_lines( + conflicts.map({ case (file, bad) => + "Conflicting updates for file " + file + bad.mkString("\n ", "\n ", "") }))) + + for ((file, upds) <- file_updates.iterator_list) { + val edits = + upds.sortBy(upd => - upd.range.start).flatMap(upd => + Text.Edit.replace(upd.range.start, upd.old_text, upd.new_text)) + val new_text = + (File.read(file) /: edits)({ case (text, edit) => + edit.edit(text, 0) match { + case (None, text1) => text1 + case (Some(_), _) => error("Failed to apply edit " + edit + " to file " + file) + } + }) + File.write_backup2(Path.explode(File.standard_path(file)), new_text) + } + } + + + /* Isabelle tool wrapper */ + + val isabelle_tool = + Isabelle_Tool("update_imports", "update theory imports to use session qualifiers", args => + { + var select_dirs: List[Path] = Nil + var requirements = false + var exclude_session_groups: List[String] = Nil + var all_sessions = false + var dirs: List[Path] = Nil + var session_groups: List[String] = Nil + var options = Options.init() + var verbose = false + var exclude_sessions: List[String] = Nil + + val getopts = Getopts(""" +Usage: isabelle update_imports [OPTIONS] [SESSIONS ...] + + Options are: + -D DIR include session directory and select its sessions + -R operate on requirements of selected sessions + -X NAME exclude sessions from group NAME and all descendants + -a select all sessions + -d DIR include session directory + -g NAME select session group NAME + -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) + -v verbose + -x NAME exclude session NAME and all descendants + + Update theory imports to use session qualifiers. + + Old versions of files are preserved by appending "~~". +""", + "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), + "R" -> (_ => requirements = true), + "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), + "a" -> (_ => all_sessions = true), + "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), + "g:" -> (arg => session_groups = session_groups ::: List(arg)), + "o:" -> (arg => options = options + arg), + "v" -> (_ => verbose = true), + "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg))) + + val sessions = getopts(args) + if (args.isEmpty) getopts.usage() + + val selection = + Sessions.Selection(requirements, all_sessions, exclude_session_groups, + exclude_sessions, session_groups, sessions) + + val progress = new Console_Progress(verbose = verbose) + + val updates = + update_imports(options, progress = progress, selection = selection, + dirs = dirs, select_dirs = select_dirs, verbose = verbose) + + apply_updates(updates) + }) +} diff -r 03efd17e083b -r c09c11386ca5 src/Pure/build-jars --- a/src/Pure/build-jars Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Pure/build-jars Fri Apr 21 20:36:20 2017 +0200 @@ -143,6 +143,7 @@ Tools/task_statistics.scala Tools/update_cartouches.scala Tools/update_header.scala + Tools/update_imports.scala Tools/update_then.scala Tools/update_theorems.scala library.scala diff -r 03efd17e083b -r c09c11386ca5 src/Tools/Code/code_scala.ML --- a/src/Tools/Code/code_scala.ML Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Tools/Code/code_scala.ML Fri Apr 21 20:36:20 2017 +0200 @@ -465,7 +465,7 @@ check = { env_var = "SCALA_HOME", make_destination = fn p => Path.append p (Path.explode "ROOT.scala"), make_command = fn _ => - "env JAVA_OPTS='-Xms128m -Xmx512m -Xss2m' \"$SCALA_HOME/bin/scalac\" $ISABELLE_SCALAC_OPTIONS ROOT.scala" } }) + "isabelle_scala scalac $ISABELLE_SCALAC_OPTIONS ROOT.scala" } }) #> Code_Target.set_printings (Type_Constructor ("fun", [(target, SOME (2, fn print_typ => fn fxy => fn [ty1, ty2] => brackify_infix (1, R) fxy ( diff -r 03efd17e083b -r c09c11386ca5 src/Tools/VSCode/src/vscode_resources.scala --- a/src/Tools/VSCode/src/vscode_resources.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Tools/VSCode/src/vscode_resources.scala Fri Apr 21 20:36:20 2017 +0200 @@ -61,9 +61,9 @@ def node_file(name: Document.Node.Name): JFile = new JFile(name.node) def node_name(file: JFile): Document.Node.Name = - session_base.known_file(file) getOrElse { + session_base.known.get_file(file) getOrElse { val node = file.getPath - theory_name(default_qualifier, Thy_Header.theory_name(node)) match { + theory_name(Sessions.DRAFT, Thy_Header.theory_name(node)) match { case (true, theory) => Document.Node.Name.loaded_theory(theory) case (false, theory) => val master_dir = if (theory == "") "" else file.getParent diff -r 03efd17e083b -r c09c11386ca5 src/Tools/jEdit/lib/Tools/jedit --- a/src/Tools/jEdit/lib/Tools/jedit Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Tools/jEdit/lib/Tools/jedit Fri Apr 21 20:36:20 2017 +0200 @@ -97,7 +97,6 @@ echo "Usage: isabelle $PRG [OPTIONS] [FILES ...]" echo echo " Options are:" - echo " -A explore theory imports of all known sessions" echo " -D NAME=X set JVM system property" echo " -J OPTION add JVM runtime option (default $JEDIT_JAVA_SYSTEM_OPTIONS $JEDIT_JAVA_OPTIONS)" echo " -R open ROOT entry of logic session and use its parent" @@ -134,7 +133,6 @@ # options -JEDIT_ALL_KNOWN="" BUILD_ONLY=false BUILD_JARS="jars" ML_PROCESS_POLICY="" @@ -147,12 +145,9 @@ function getoptions() { OPTIND=1 - while getopts "AD:J:Rbd:fj:l:m:np:s" OPT + while getopts "D:J:Rbd:fj:l:m:np:s" OPT do case "$OPT" in - A) - JEDIT_ALL_KNOWN="true" - ;; D) JAVA_ARGS["${#JAVA_ARGS[@]}"]="-D$OPTARG" ;; @@ -350,7 +345,7 @@ classpath "$JAR" done export CLASSPATH="$(platform_path "$ISABELLE_CLASSPATH")" - exec "$SCALA_HOME/bin/scalac" $ISABELLE_SCALAC_OPTIONS -d dist/classes "${SOURCES[@]}" + isabelle_scala scalac $ISABELLE_SCALAC_OPTIONS -d dist/classes "${SOURCES[@]}" ) || fail "Failed to compile sources" cd dist/classes @@ -376,7 +371,7 @@ if [ "$BUILD_ONLY" = false ] then - export JEDIT_SESSION_DIRS JEDIT_LOGIC JEDIT_LOGIC_ROOT JEDIT_ALL_KNOWN JEDIT_PRINT_MODE JEDIT_BUILD_MODE + export JEDIT_SESSION_DIRS JEDIT_LOGIC JEDIT_LOGIC_ROOT JEDIT_PRINT_MODE JEDIT_BUILD_MODE export JEDIT_ML_PROCESS_POLICY="$ML_PROCESS_POLICY" classpath "$JEDIT_HOME/dist/jedit.jar" exec isabelle java "${JAVA_ARGS[@]}" isabelle.Main "${ARGS[@]}" diff -r 03efd17e083b -r c09c11386ca5 src/Tools/jEdit/src/jedit_lib.scala --- a/src/Tools/jEdit/src/jedit_lib.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Tools/jEdit/src/jedit_lib.scala Fri Apr 21 20:36:20 2017 +0200 @@ -176,7 +176,7 @@ catch { case _: ArrayIndexOutOfBoundsException => None } def try_get_text(text: String, range: Text.Range): Option[String] = - try { Some(text.substring(range.start, range.stop)) } + try { Some(range.substring(text)) } catch { case _: IndexOutOfBoundsException => None } diff -r 03efd17e083b -r c09c11386ca5 src/Tools/jEdit/src/jedit_resources.scala --- a/src/Tools/jEdit/src/jedit_resources.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Tools/jEdit/src/jedit_resources.scala Fri Apr 21 20:36:20 2017 +0200 @@ -26,13 +26,13 @@ /* document node name */ def known_file(path: String): Option[Document.Node.Name] = - JEdit_Lib.check_file(path).flatMap(session_base.known_file(_)) + JEdit_Lib.check_file(path).flatMap(session_base.known.get_file(_)) def node_name(path: String): Document.Node.Name = known_file(path) getOrElse { val vfs = VFSManager.getVFSForPath(path) val node = if (vfs.isInstanceOf[FileVFS]) MiscUtilities.resolveSymlinks(path) else path - theory_name(default_qualifier, Thy_Header.theory_name(node)) match { + theory_name(Sessions.DRAFT, Thy_Header.theory_name(node)) match { case (true, theory) => Document.Node.Name.loaded_theory(theory) case (false, theory) => val master_dir = if (theory == "") "" else vfs.getParentOfPath(path) diff -r 03efd17e083b -r c09c11386ca5 src/Tools/jEdit/src/jedit_sessions.scala --- a/src/Tools/jEdit/src/jedit_sessions.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Tools/jEdit/src/jedit_sessions.scala Fri Apr 21 20:36:20 2017 +0200 @@ -77,8 +77,8 @@ { val sessions = Sessions.load(options, dirs = session_dirs()) val (main_sessions, other_sessions) = - sessions.imports_topological_order.partition(p => p._2.groups.contains("main")) - main_sessions.map(_._1).sorted ::: other_sessions.map(_._1).sorted + sessions.imports_topological_order.partition(info => info.groups.contains("main")) + main_sessions.map(_.name).sorted ::: other_sessions.map(_.name).sorted } diff -r 03efd17e083b -r c09c11386ca5 src/Tools/jEdit/src/plugin.scala --- a/src/Tools/jEdit/src/plugin.scala Thu Apr 20 16:21:29 2017 +0200 +++ b/src/Tools/jEdit/src/plugin.scala Fri Apr 21 20:36:20 2017 +0200 @@ -74,9 +74,8 @@ val session_name = JEdit_Sessions.session_name(options) val session_base = try { - Sessions.session_base(options, session_name, - dirs = JEdit_Sessions.session_dirs(), - all_known = Isabelle_System.getenv("JEDIT_ALL_KNOWN") == "true") + Sessions.session_base( + options, session_name, dirs = JEdit_Sessions.session_dirs(), all_known = true) } catch { case ERROR(_) => Sessions.Base.pure(options) } diff -r 03efd17e083b -r c09c11386ca5 src/ZF/ROOT --- a/src/ZF/ROOT Thu Apr 20 16:21:29 2017 +0200 +++ b/src/ZF/ROOT Fri Apr 21 20:36:20 2017 +0200 @@ -43,7 +43,8 @@ (North-Holland, 1980) *} theories - ZFC + ZF (global) + ZFC (global) document_files "root.tex" session "ZF-AC" (ZF) in AC = ZF +