1 /* Title: Pure/Thy/sessions.scala |
|
2 Author: Makarius |
|
3 |
|
4 Cumulative session information. |
|
5 */ |
|
6 |
|
7 package isabelle |
|
8 |
|
9 import java.io.{File => JFile} |
|
10 |
|
11 import scala.collection.immutable.{SortedSet, SortedMap} |
|
12 import scala.collection.mutable |
|
13 |
|
14 |
|
15 object Sessions { |
|
16 /* main operations */ |
|
17 |
|
18 def background0(session: String): Background = Background.empty(session) |
|
19 |
|
20 def background(options: Options, |
|
21 session: String, |
|
22 progress: Progress = new Progress, |
|
23 dirs: List[Path] = Nil, |
|
24 include_sessions: List[String] = Nil, |
|
25 session_ancestor: Option[String] = None, |
|
26 session_requirements: Boolean = false |
|
27 ): Background = { |
|
28 Background.load(options, session, progress = progress, dirs = dirs, |
|
29 include_sessions = include_sessions, session_ancestor = session_ancestor, |
|
30 session_requirements = session_requirements) |
|
31 } |
|
32 |
|
33 def load_structure( |
|
34 options: Options, |
|
35 dirs: List[Path] = Nil, |
|
36 select_dirs: List[Path] = Nil, |
|
37 infos: List[Info] = Nil, |
|
38 augment_options: String => List[Options.Spec] = _ => Nil |
|
39 ): Structure = { |
|
40 val roots = load_root_files(dirs = dirs, select_dirs = select_dirs) |
|
41 Structure.make(options, augment_options, roots = roots, infos = infos) |
|
42 } |
|
43 |
|
44 def deps(sessions_structure: Structure, |
|
45 progress: Progress = new Progress, |
|
46 inlined_files: Boolean = false, |
|
47 list_files: Boolean = false, |
|
48 check_keywords: Set[String] = Set.empty |
|
49 ): Deps = { |
|
50 Deps.load(sessions_structure, progress = progress, inlined_files = inlined_files, |
|
51 list_files = list_files, check_keywords = check_keywords) |
|
52 } |
|
53 |
|
54 |
|
55 /* session and theory names */ |
|
56 |
|
57 val ROOTS: Path = Path.explode("ROOTS") |
|
58 val ROOT: Path = Path.explode("ROOT") |
|
59 |
|
60 val roots_name: String = "ROOTS" |
|
61 val root_name: String = "ROOT" |
|
62 val theory_import: String = "Pure.Sessions" |
|
63 |
|
64 val UNSORTED = "Unsorted" |
|
65 val DRAFT = "Draft" |
|
66 |
|
67 def is_pure(name: String): Boolean = name == Thy_Header.PURE |
|
68 |
|
69 def illegal_session(name: String): Boolean = name == "" || name == DRAFT |
|
70 def illegal_theory(name: String): Boolean = |
|
71 name == root_name || File_Format.registry.theory_excluded(name) |
|
72 |
|
73 |
|
74 /* ROOTS file format */ |
|
75 |
|
76 class ROOTS_File_Format extends File_Format { |
|
77 val format_name: String = roots_name |
|
78 val file_ext = "" |
|
79 |
|
80 override def detect(name: String): Boolean = |
|
81 Url.get_base_name(name) match { |
|
82 case Some(base_name) => base_name == roots_name |
|
83 case None => false |
|
84 } |
|
85 |
|
86 override def theory_suffix: String = "ROOTS_file" |
|
87 override def theory_content(name: String): String = |
|
88 """theory "ROOTS" imports Pure begin ROOTS_file "." end""" |
|
89 override def theory_excluded(name: String): Boolean = name == "ROOTS" |
|
90 } |
|
91 |
|
92 |
|
93 /* base info */ |
|
94 |
|
95 object Base { |
|
96 val bootstrap: Base = Base(overall_syntax = Thy_Header.bootstrap_syntax) |
|
97 } |
|
98 |
|
99 sealed case class Base( |
|
100 session_name: String = "", |
|
101 session_pos: Position.T = Position.none, |
|
102 proper_session_theories: List[Document.Node.Name] = Nil, |
|
103 document_theories: List[Document.Node.Name] = Nil, |
|
104 loaded_theories: Graph[String, Outer_Syntax] = Graph.string, // cumulative imports |
|
105 used_theories: List[(Document.Node.Name, Options)] = Nil, // new imports |
|
106 theory_load_commands: Map[String, List[Command_Span.Span]] = Map.empty, |
|
107 known_theories: Map[String, Document.Node.Entry] = Map.empty, |
|
108 known_loaded_files: Map[String, List[Path]] = Map.empty, |
|
109 overall_syntax: Outer_Syntax = Outer_Syntax.empty, |
|
110 imported_sources: List[(Path, SHA1.Digest)] = Nil, |
|
111 session_sources: List[(Path, SHA1.Digest)] = Nil, |
|
112 session_graph_display: Graph_Display.Graph = Graph_Display.empty_graph, |
|
113 errors: List[String] = Nil |
|
114 ) { |
|
115 def session_entry: (String, Base) = session_name -> this |
|
116 |
|
117 override def toString: String = "Sessions.Base(" + print_body + ")" |
|
118 def print_body: String = |
|
119 "session_name = " + quote(session_name) + |
|
120 ", loaded_theories = " + loaded_theories.size + |
|
121 ", used_theories = " + used_theories.length |
|
122 |
|
123 def all_sources: List[(Path, SHA1.Digest)] = imported_sources ::: session_sources |
|
124 |
|
125 def all_document_theories: List[Document.Node.Name] = |
|
126 proper_session_theories ::: document_theories |
|
127 |
|
128 def loaded_theory(name: String): Boolean = loaded_theories.defined(name) |
|
129 def loaded_theory(name: Document.Node.Name): Boolean = loaded_theory(name.theory) |
|
130 |
|
131 def loaded_theory_syntax(name: String): Option[Outer_Syntax] = |
|
132 if (loaded_theory(name)) Some(loaded_theories.get_node(name)) else None |
|
133 def loaded_theory_syntax(name: Document.Node.Name): Option[Outer_Syntax] = |
|
134 loaded_theory_syntax(name.theory) |
|
135 |
|
136 def theory_syntax(name: Document.Node.Name): Outer_Syntax = |
|
137 loaded_theory_syntax(name) getOrElse overall_syntax |
|
138 |
|
139 def node_syntax(nodes: Document.Nodes, name: Document.Node.Name): Outer_Syntax = |
|
140 nodes(name).syntax orElse loaded_theory_syntax(name) getOrElse overall_syntax |
|
141 } |
|
142 |
|
143 |
|
144 /* background context */ |
|
145 |
|
146 object Background { |
|
147 def empty(session: String): Background = |
|
148 Background(Base(session_name = session)) |
|
149 |
|
150 def load(options: Options, |
|
151 session: String, |
|
152 progress: Progress = new Progress, |
|
153 dirs: List[Path] = Nil, |
|
154 include_sessions: List[String] = Nil, |
|
155 session_ancestor: Option[String] = None, |
|
156 session_requirements: Boolean = false |
|
157 ): Background = { |
|
158 val full_sessions = load_structure(options, dirs = dirs) |
|
159 |
|
160 val selected_sessions = |
|
161 full_sessions.selection(Selection(sessions = session :: session_ancestor.toList)) |
|
162 val info = selected_sessions(session) |
|
163 val ancestor = session_ancestor orElse info.parent |
|
164 |
|
165 val (session1, infos1) = |
|
166 if (session_requirements && ancestor.isDefined) { |
|
167 val deps = Sessions.deps(selected_sessions, progress = progress) |
|
168 val base = deps(session) |
|
169 |
|
170 val ancestor_loaded = |
|
171 deps.get(ancestor.get) match { |
|
172 case Some(ancestor_base) |
|
173 if !selected_sessions.imports_requirements(List(ancestor.get)).contains(session) => |
|
174 ancestor_base.loaded_theories.defined _ |
|
175 case _ => |
|
176 error("Bad ancestor " + quote(ancestor.get) + " for session " + quote(session)) |
|
177 } |
|
178 |
|
179 val required_theories = |
|
180 for { |
|
181 thy <- base.loaded_theories.keys |
|
182 if !ancestor_loaded(thy) && selected_sessions.theory_qualifier(thy) != session |
|
183 } |
|
184 yield thy |
|
185 |
|
186 if (required_theories.isEmpty) (ancestor.get, Nil) |
|
187 else { |
|
188 val other_name = info.name + "_requirements(" + ancestor.get + ")" |
|
189 Isabelle_System.isabelle_tmp_prefix() |
|
190 |
|
191 (other_name, |
|
192 List( |
|
193 Info.make( |
|
194 Chapter_Defs.empty, |
|
195 Options.init0(), |
|
196 info.options, |
|
197 augment_options = _ => Nil, |
|
198 dir_selected = false, |
|
199 dir = Path.explode("$ISABELLE_TMP_PREFIX"), |
|
200 chapter = info.chapter, |
|
201 Session_Entry( |
|
202 pos = info.pos, |
|
203 name = other_name, |
|
204 groups = info.groups, |
|
205 path = ".", |
|
206 parent = ancestor, |
|
207 description = "Required theory imports from other sessions", |
|
208 options = Nil, |
|
209 imports = info.deps, |
|
210 directories = Nil, |
|
211 theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))), |
|
212 document_theories = Nil, |
|
213 document_files = Nil, |
|
214 export_files = Nil, |
|
215 export_classpath = Nil)))) |
|
216 } |
|
217 } |
|
218 else (session, Nil) |
|
219 |
|
220 val full_sessions1 = |
|
221 if (infos1.isEmpty) full_sessions |
|
222 else load_structure(options, dirs = dirs, infos = infos1) |
|
223 |
|
224 val selected_sessions1 = |
|
225 full_sessions1.selection(Selection(sessions = session1 :: session :: include_sessions)) |
|
226 |
|
227 val deps1 = Sessions.deps(selected_sessions1, progress = progress) |
|
228 |
|
229 Background(deps1(session1), sessions_structure = full_sessions1, |
|
230 errors = deps1.errors, infos = infos1) |
|
231 } |
|
232 } |
|
233 |
|
234 sealed case class Background( |
|
235 base: Base, |
|
236 sessions_structure: Structure = Structure.empty, |
|
237 errors: List[String] = Nil, |
|
238 infos: List[Info] = Nil |
|
239 ) { |
|
240 def session_name: String = base.session_name |
|
241 def info: Info = sessions_structure(session_name) |
|
242 |
|
243 def check_errors: Background = |
|
244 if (errors.isEmpty) this |
|
245 else error(cat_lines(errors)) |
|
246 } |
|
247 |
|
248 |
|
249 /* source dependencies */ |
|
250 |
|
251 object Deps { |
|
252 def load(sessions_structure: Structure, |
|
253 progress: Progress = new Progress, |
|
254 inlined_files: Boolean = false, |
|
255 list_files: Boolean = false, |
|
256 check_keywords: Set[String] = Set.empty |
|
257 ): Deps = { |
|
258 var cache_sources = Map.empty[JFile, SHA1.Digest] |
|
259 def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] = { |
|
260 for { |
|
261 path <- paths |
|
262 file = path.file |
|
263 if cache_sources.isDefinedAt(file) || file.isFile |
|
264 } |
|
265 yield { |
|
266 cache_sources.get(file) match { |
|
267 case Some(digest) => (path, digest) |
|
268 case None => |
|
269 val digest = SHA1.digest(file) |
|
270 cache_sources = cache_sources + (file -> digest) |
|
271 (path, digest) |
|
272 } |
|
273 } |
|
274 } |
|
275 |
|
276 val session_bases = |
|
277 sessions_structure.imports_topological_order.foldLeft(Map(Base.bootstrap.session_entry)) { |
|
278 case (session_bases, session_name) => |
|
279 progress.expose_interrupt() |
|
280 |
|
281 val info = sessions_structure(session_name) |
|
282 try { |
|
283 val deps_base = info.deps_base(session_bases) |
|
284 val session_background = |
|
285 Background(base = deps_base, sessions_structure = sessions_structure) |
|
286 val resources = new Resources(session_background) |
|
287 |
|
288 progress.echo( |
|
289 "Session " + info.chapter + "/" + session_name + |
|
290 if_proper(info.groups, info.groups.mkString(" (", " ", ")")), |
|
291 verbose = !list_files) |
|
292 |
|
293 val dependencies = resources.session_dependencies(info) |
|
294 |
|
295 val overall_syntax = dependencies.overall_syntax |
|
296 |
|
297 val proper_session_theories = |
|
298 dependencies.theories.filter(name => |
|
299 sessions_structure.theory_qualifier(name) == session_name) |
|
300 |
|
301 val theory_files = dependencies.theories.map(_.path) |
|
302 |
|
303 val (load_commands, load_commands_errors) = |
|
304 try { if (inlined_files) (dependencies.load_commands, Nil) else (Nil, Nil) } |
|
305 catch { case ERROR(msg) => (Nil, List(msg)) } |
|
306 |
|
307 val theory_load_commands = |
|
308 (for ((name, span) <- load_commands.iterator) yield name.theory -> span).toMap |
|
309 |
|
310 val loaded_files: List[(String, List[Path])] = |
|
311 for ((name, spans) <- load_commands) yield { |
|
312 val (theory, files) = dependencies.loaded_files(name, spans) |
|
313 theory -> files.map(file => Path.explode(file.node)) |
|
314 } |
|
315 |
|
316 val document_files = |
|
317 for ((path1, path2) <- info.document_files) |
|
318 yield info.dir + path1 + path2 |
|
319 |
|
320 val session_files = |
|
321 (theory_files ::: loaded_files.flatMap(_._2) ::: document_files).map(_.expand) |
|
322 |
|
323 val imported_files = if (inlined_files) dependencies.imported_files else Nil |
|
324 |
|
325 if (list_files) { |
|
326 progress.echo(cat_lines(session_files.map(_.implode).sorted.map(" " + _))) |
|
327 } |
|
328 |
|
329 if (check_keywords.nonEmpty) { |
|
330 Check_Keywords.check_keywords( |
|
331 progress, overall_syntax.keywords, check_keywords, theory_files) |
|
332 } |
|
333 |
|
334 val session_graph_display: Graph_Display.Graph = { |
|
335 def session_node(name: String): Graph_Display.Node = |
|
336 Graph_Display.Node("[" + name + "]", "session." + name) |
|
337 |
|
338 def node(name: Document.Node.Name): Graph_Display.Node = { |
|
339 val qualifier = sessions_structure.theory_qualifier(name) |
|
340 if (qualifier == info.name) { |
|
341 Graph_Display.Node(name.theory_base_name, "theory." + name.theory) |
|
342 } |
|
343 else session_node(qualifier) |
|
344 } |
|
345 |
|
346 val required_sessions = |
|
347 dependencies.loaded_theories.all_preds(dependencies.theories.map(_.theory)) |
|
348 .map(theory => sessions_structure.theory_qualifier(theory)) |
|
349 .filter(name => name != info.name && sessions_structure.defined(name)) |
|
350 |
|
351 val required_subgraph = |
|
352 sessions_structure.imports_graph |
|
353 .restrict(sessions_structure.imports_graph.all_preds(required_sessions).toSet) |
|
354 .transitive_closure |
|
355 .restrict(required_sessions.toSet) |
|
356 .transitive_reduction_acyclic |
|
357 |
|
358 val graph0 = |
|
359 required_subgraph.topological_order.foldLeft(Graph_Display.empty_graph) { |
|
360 case (g, session) => |
|
361 val a = session_node(session) |
|
362 val bs = required_subgraph.imm_preds(session).toList.map(session_node) |
|
363 bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a)) |
|
364 } |
|
365 |
|
366 dependencies.entries.foldLeft(graph0) { |
|
367 case (g, entry) => |
|
368 val a = node(entry.name) |
|
369 val bs = entry.header.imports.map(node).filterNot(_ == a) |
|
370 bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a)) |
|
371 } |
|
372 } |
|
373 |
|
374 val known_theories = |
|
375 dependencies.entries.iterator.map(entry => entry.name.theory -> entry). |
|
376 foldLeft(deps_base.known_theories)(_ + _) |
|
377 |
|
378 val known_loaded_files = deps_base.known_loaded_files ++ loaded_files |
|
379 |
|
380 val import_errors = { |
|
381 val known_sessions = |
|
382 sessions_structure.imports_requirements(List(session_name)).toSet |
|
383 for { |
|
384 name <- dependencies.theories |
|
385 qualifier = sessions_structure.theory_qualifier(name) |
|
386 if !known_sessions(qualifier) |
|
387 } yield "Bad import of theory " + quote(name.toString) + |
|
388 ": need to include sessions " + quote(qualifier) + " in ROOT" |
|
389 } |
|
390 |
|
391 val document_errors = |
|
392 info.document_theories.flatMap( |
|
393 { |
|
394 case (thy, pos) => |
|
395 val build_hierarchy = |
|
396 if (sessions_structure.build_graph.defined(session_name)) { |
|
397 sessions_structure.build_hierarchy(session_name) |
|
398 } |
|
399 else Nil |
|
400 |
|
401 def err(msg: String): Option[String] = |
|
402 Some(msg + " " + quote(thy) + Position.here(pos)) |
|
403 |
|
404 known_theories.get(thy).map(_.name) match { |
|
405 case None => err("Unknown document theory") |
|
406 case Some(name) => |
|
407 val qualifier = sessions_structure.theory_qualifier(name) |
|
408 if (proper_session_theories.contains(name)) { |
|
409 err("Redundant document theory from this session:") |
|
410 } |
|
411 else if ( |
|
412 !build_hierarchy.contains(qualifier) && |
|
413 !dependencies.theories.contains(name) |
|
414 ) { |
|
415 err("Document theory from other session not imported properly:") |
|
416 } |
|
417 else None |
|
418 } |
|
419 }) |
|
420 val document_theories = |
|
421 info.document_theories.map({ case (thy, _) => known_theories(thy).name }) |
|
422 |
|
423 val dir_errors = { |
|
424 val ok = info.dirs.map(_.canonical_file).toSet |
|
425 val bad = |
|
426 (for { |
|
427 name <- proper_session_theories.iterator |
|
428 path = Path.explode(name.master_dir) |
|
429 if !ok(path.canonical_file) |
|
430 path1 = File.relative_path(info.dir.canonical, path).getOrElse(path) |
|
431 } yield (path1, name)).toList |
|
432 val bad_dirs = (for { (path1, _) <- bad } yield path1.toString).distinct.sorted |
|
433 |
|
434 val errs1 = |
|
435 for { (path1, name) <- bad } |
|
436 yield "Implicit use of directory " + path1 + " for theory " + quote(name.toString) |
|
437 val errs2 = |
|
438 if (bad_dirs.isEmpty) Nil |
|
439 else List("Implicit use of session directories: " + commas(bad_dirs)) |
|
440 val errs3 = for (p <- info.dirs if !p.is_dir) yield "No such directory: " + p |
|
441 val errs4 = |
|
442 (for { |
|
443 name <- proper_session_theories.iterator |
|
444 name1 <- resources.find_theory_node(name.theory) |
|
445 if name.node != name1.node |
|
446 } yield { |
|
447 "Incoherent theory file import:\n " + quote(name.node) + |
|
448 " vs. \n " + quote(name1.node) |
|
449 }).toList |
|
450 |
|
451 errs1 ::: errs2 ::: errs3 ::: errs4 |
|
452 } |
|
453 |
|
454 val sources_errors = |
|
455 for (p <- session_files if !p.is_file) yield "No such file: " + p |
|
456 |
|
457 val path_errors = |
|
458 try { Path.check_case_insensitive(session_files ::: imported_files); Nil } |
|
459 catch { case ERROR(msg) => List(msg) } |
|
460 |
|
461 val bibtex_errors = info.bibtex_entries.errors |
|
462 |
|
463 val base = |
|
464 Base( |
|
465 session_name = info.name, |
|
466 session_pos = info.pos, |
|
467 proper_session_theories = proper_session_theories, |
|
468 document_theories = document_theories, |
|
469 loaded_theories = dependencies.loaded_theories, |
|
470 used_theories = dependencies.theories_adjunct, |
|
471 theory_load_commands = theory_load_commands, |
|
472 known_theories = known_theories, |
|
473 known_loaded_files = known_loaded_files, |
|
474 overall_syntax = overall_syntax, |
|
475 imported_sources = check_sources(imported_files), |
|
476 session_sources = check_sources(session_files), |
|
477 session_graph_display = session_graph_display, |
|
478 errors = dependencies.errors ::: load_commands_errors ::: import_errors ::: |
|
479 document_errors ::: dir_errors ::: sources_errors ::: path_errors ::: |
|
480 bibtex_errors) |
|
481 |
|
482 session_bases + base.session_entry |
|
483 } |
|
484 catch { |
|
485 case ERROR(msg) => |
|
486 cat_error(msg, "The error(s) above occurred in session " + |
|
487 quote(info.name) + Position.here(info.pos)) |
|
488 } |
|
489 } |
|
490 |
|
491 new Deps(sessions_structure, session_bases) |
|
492 } |
|
493 } |
|
494 |
|
495 final class Deps private[Sessions]( |
|
496 val sessions_structure: Structure, |
|
497 val session_bases: Map[String, Base] |
|
498 ) { |
|
499 def background(session: String): Background = |
|
500 Background(base = apply(session), sessions_structure = sessions_structure, errors = errors) |
|
501 |
|
502 def is_empty: Boolean = session_bases.keysIterator.forall(_.isEmpty) |
|
503 def apply(name: String): Base = session_bases(name) |
|
504 def get(name: String): Option[Base] = session_bases.get(name) |
|
505 |
|
506 def sources_shasum(name: String): SHA1.Shasum = { |
|
507 val meta_info = sessions_structure(name).meta_info |
|
508 val sources = |
|
509 SHA1.shasum_sorted( |
|
510 for ((path, digest) <- apply(name).all_sources) |
|
511 yield digest -> File.symbolic_path(path)) |
|
512 meta_info ::: sources |
|
513 } |
|
514 |
|
515 def errors: List[String] = |
|
516 (for { |
|
517 (name, base) <- session_bases.iterator |
|
518 if base.errors.nonEmpty |
|
519 } yield cat_lines(base.errors) + |
|
520 "\nThe error(s) above occurred in session " + quote(name) + Position.here(base.session_pos) |
|
521 ).toList |
|
522 |
|
523 def check_errors: Deps = |
|
524 errors match { |
|
525 case Nil => this |
|
526 case errs => error(cat_lines(errs)) |
|
527 } |
|
528 |
|
529 override def toString: String = "Sessions.Deps(" + sessions_structure + ")" |
|
530 } |
|
531 |
|
532 |
|
533 /* cumulative session info */ |
|
534 |
|
535 private val BUILD_PREFS_BG = "<build_prefs:" |
|
536 private val BUILD_PREFS_EN = ">" |
|
537 |
|
538 def make_build_prefs(name: String): String = |
|
539 BUILD_PREFS_BG + name + BUILD_PREFS_EN |
|
540 |
|
541 def is_build_prefs(s: String): Boolean = { |
|
542 val i = s.indexOf('<') |
|
543 i >= 0 && { |
|
544 val s1 = s.substring(i) |
|
545 s1.startsWith(BUILD_PREFS_BG) && s1.endsWith(BUILD_PREFS_EN) |
|
546 } |
|
547 } |
|
548 |
|
549 def eq_sources(options: Options, shasum1: SHA1.Shasum, shasum2: SHA1.Shasum): Boolean = |
|
550 if (options.bool("build_thorough")) shasum1 == shasum2 |
|
551 else { |
|
552 def trim(shasum: SHA1.Shasum): SHA1.Shasum = |
|
553 shasum.filter(s => !is_build_prefs(s)) |
|
554 |
|
555 trim(shasum1) == trim(shasum2) |
|
556 } |
|
557 |
|
558 sealed case class Chapter_Info( |
|
559 name: String, |
|
560 pos: Position.T, |
|
561 groups: List[String], |
|
562 description: String, |
|
563 sessions: List[String] |
|
564 ) |
|
565 |
|
566 object Info { |
|
567 def make( |
|
568 chapter_defs: Chapter_Defs, |
|
569 options0: Options, |
|
570 options: Options, |
|
571 augment_options: String => List[Options.Spec], |
|
572 dir_selected: Boolean, |
|
573 dir: Path, |
|
574 chapter: String, |
|
575 entry: Session_Entry |
|
576 ): Info = { |
|
577 try { |
|
578 val name = entry.name |
|
579 |
|
580 if (illegal_session(name)) error("Illegal session name " + quote(name)) |
|
581 if (is_pure(name) && entry.parent.isDefined) error("Illegal parent session") |
|
582 if (!is_pure(name) && !entry.parent.isDefined) error("Missing parent session") |
|
583 |
|
584 val session_path = dir + Path.explode(entry.path) |
|
585 val directories = entry.directories.map(dir => session_path + Path.explode(dir)) |
|
586 |
|
587 val session_options0 = options ++ entry.options |
|
588 val session_options = session_options0 ++ augment_options(name) |
|
589 val session_prefs = |
|
590 session_options.make_prefs(defaults = session_options0, filter = _.session_content) |
|
591 |
|
592 val build_prefs_digests = |
|
593 session_options.changed(defaults = options0, filter = _.session_content) |
|
594 .map(ch => SHA1.digest(ch.print_prefs) -> make_build_prefs(ch.name)) |
|
595 |
|
596 val theories = |
|
597 entry.theories.map({ case (opts, thys) => |
|
598 (session_options ++ opts, |
|
599 thys.map({ case ((thy, pos), _) => |
|
600 val thy_name = Thy_Header.import_name(thy) |
|
601 if (illegal_theory(thy_name)) { |
|
602 error("Illegal theory name " + quote(thy_name) + Position.here(pos)) |
|
603 } |
|
604 else (thy, pos) })) }) |
|
605 |
|
606 val global_theories = |
|
607 for { (_, thys) <- entry.theories; ((thy, pos), global) <- thys if global } |
|
608 yield { |
|
609 val thy_name = Path.explode(thy).file_name |
|
610 if (Long_Name.is_qualified(thy_name)) { |
|
611 error("Bad qualified name for global theory " + |
|
612 quote(thy_name) + Position.here(pos)) |
|
613 } |
|
614 else thy_name |
|
615 } |
|
616 |
|
617 val conditions = |
|
618 theories.flatMap(thys => space_explode(',', thys._1.string("condition"))).distinct.sorted. |
|
619 map(x => (x, Isabelle_System.getenv(x) != "")) |
|
620 |
|
621 val document_files = |
|
622 entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) }) |
|
623 |
|
624 val export_files = |
|
625 entry.export_files.map({ case (dir, prune, pats) => (Path.explode(dir), prune, pats) }) |
|
626 |
|
627 val meta_digest = |
|
628 SHA1.digest( |
|
629 (name, chapter, entry.parent, entry.directories, entry.options, entry.imports, |
|
630 entry.theories_no_position, conditions, entry.document_theories_no_position).toString) |
|
631 |
|
632 val meta_info = |
|
633 SHA1.shasum_meta_info(meta_digest) ::: SHA1.shasum_sorted(build_prefs_digests) |
|
634 |
|
635 val chapter_groups = chapter_defs(chapter).groups |
|
636 val groups = chapter_groups ::: entry.groups.filterNot(chapter_groups.contains) |
|
637 |
|
638 Info(name, chapter, dir_selected, entry.pos, groups, session_path, |
|
639 entry.parent, entry.description, directories, session_options, session_prefs, |
|
640 entry.imports, theories, global_theories, entry.document_theories, document_files, |
|
641 export_files, entry.export_classpath, meta_info) |
|
642 } |
|
643 catch { |
|
644 case ERROR(msg) => |
|
645 error(msg + "\nThe error(s) above occurred in session entry " + |
|
646 quote(entry.name) + Position.here(entry.pos)) |
|
647 } |
|
648 } |
|
649 } |
|
650 |
|
651 sealed case class Info( |
|
652 name: String, |
|
653 chapter: String, |
|
654 dir_selected: Boolean, |
|
655 pos: Position.T, |
|
656 groups: List[String], |
|
657 dir: Path, |
|
658 parent: Option[String], |
|
659 description: String, |
|
660 directories: List[Path], |
|
661 options: Options, |
|
662 session_prefs: String, |
|
663 imports: List[String], |
|
664 theories: List[(Options, List[(String, Position.T)])], |
|
665 global_theories: List[String], |
|
666 document_theories: List[(String, Position.T)], |
|
667 document_files: List[(Path, Path)], |
|
668 export_files: List[(Path, Int, List[String])], |
|
669 export_classpath: List[String], |
|
670 meta_info: SHA1.Shasum |
|
671 ) { |
|
672 def deps: List[String] = parent.toList ::: imports |
|
673 |
|
674 def deps_base(session_bases: String => Base): Base = { |
|
675 val parent_base = session_bases(parent.getOrElse("")) |
|
676 val imports_bases = imports.map(session_bases) |
|
677 parent_base.copy( |
|
678 known_theories = |
|
679 (for { |
|
680 base <- imports_bases.iterator |
|
681 (_, entry) <- base.known_theories.iterator |
|
682 } yield (entry.name.theory -> entry)).foldLeft(parent_base.known_theories)(_ + _), |
|
683 known_loaded_files = |
|
684 imports_bases.iterator.map(_.known_loaded_files). |
|
685 foldLeft(parent_base.known_loaded_files)(_ ++ _)) |
|
686 } |
|
687 |
|
688 def dirs: List[Path] = dir :: directories |
|
689 |
|
690 def main_group: Boolean = groups.contains("main") |
|
691 def doc_group: Boolean = groups.contains("doc") |
|
692 |
|
693 def timeout_ignored: Boolean = |
|
694 !options.bool("timeout_build") || Time.seconds(options.real("timeout")) < Time.ms(1) |
|
695 |
|
696 def timeout: Time = Time.seconds(options.real("timeout") * options.real("timeout_scale")) |
|
697 |
|
698 def document_enabled: Boolean = |
|
699 options.string("document") match { |
|
700 case "" | "false" => false |
|
701 case "pdf" | "true" => true |
|
702 case doc => error("Bad document specification " + quote(doc)) |
|
703 } |
|
704 |
|
705 def document_variants: List[Document_Build.Document_Variant] = { |
|
706 val variants = |
|
707 space_explode(':', options.string("document_variants")). |
|
708 map(Document_Build.Document_Variant.parse) |
|
709 |
|
710 val dups = Library.duplicates(variants.map(_.name)) |
|
711 if (dups.nonEmpty) error("Duplicate document variants: " + commas_quote(dups)) |
|
712 |
|
713 variants |
|
714 } |
|
715 |
|
716 def document_echo: Boolean = options.bool("document_echo") |
|
717 |
|
718 def documents: List[Document_Build.Document_Variant] = { |
|
719 val variants = document_variants |
|
720 if (!document_enabled || document_files.isEmpty) Nil else variants |
|
721 } |
|
722 |
|
723 def document_output: Option[Path] = |
|
724 options.string("document_output") match { |
|
725 case "" => None |
|
726 case s => Some(dir + Path.explode(s)) |
|
727 } |
|
728 |
|
729 def browser_info: Boolean = options.bool("browser_info") |
|
730 |
|
731 lazy val bibtex_entries: Bibtex.Entries = |
|
732 (for { |
|
733 (document_dir, file) <- document_files.iterator |
|
734 if File.is_bib(file.file_name) |
|
735 } yield { |
|
736 val path = dir + document_dir + file |
|
737 Bibtex.Entries.parse(File.read(path), start = Token.Pos.file(File.standard_path(path))) |
|
738 }).foldRight(Bibtex.Entries.empty)(_ ::: _) |
|
739 |
|
740 def record_proofs: Boolean = options.int("record_proofs") >= 2 |
|
741 |
|
742 def is_afp: Boolean = chapter == AFP.chapter |
|
743 def is_afp_bulky: Boolean = is_afp && groups.exists(AFP.groups_bulky.contains) |
|
744 } |
|
745 |
|
746 object Selection { |
|
747 val empty: Selection = Selection() |
|
748 val all: Selection = Selection(all_sessions = true) |
|
749 def session(session: String): Selection = Selection(sessions = List(session)) |
|
750 } |
|
751 |
|
752 sealed case class Selection( |
|
753 requirements: Boolean = false, |
|
754 all_sessions: Boolean = false, |
|
755 base_sessions: List[String] = Nil, |
|
756 exclude_session_groups: List[String] = Nil, |
|
757 exclude_sessions: List[String] = Nil, |
|
758 session_groups: List[String] = Nil, |
|
759 sessions: List[String] = Nil |
|
760 ) { |
|
761 def ++ (other: Selection): Selection = |
|
762 Selection( |
|
763 requirements = requirements || other.requirements, |
|
764 all_sessions = all_sessions || other.all_sessions, |
|
765 base_sessions = Library.merge(base_sessions, other.base_sessions), |
|
766 exclude_session_groups = Library.merge(exclude_session_groups, other.exclude_session_groups), |
|
767 exclude_sessions = Library.merge(exclude_sessions, other.exclude_sessions), |
|
768 session_groups = Library.merge(session_groups, other.session_groups), |
|
769 sessions = Library.merge(sessions, other.sessions)) |
|
770 } |
|
771 |
|
772 object Structure { |
|
773 val empty: Structure = make(Options.empty) |
|
774 |
|
775 def make( |
|
776 options: Options, |
|
777 augment_options: String => List[Options.Spec] = _ => Nil, |
|
778 roots: List[Root_File] = Nil, |
|
779 infos: List[Info] = Nil |
|
780 ): Structure = { |
|
781 val chapter_defs: Chapter_Defs = |
|
782 roots.foldLeft(Chapter_Defs.empty) { |
|
783 case (defs1, root) => |
|
784 root.entries.foldLeft(defs1) { |
|
785 case (defs2, entry: Chapter_Def) => defs2 + entry |
|
786 case (defs2, _) => defs2 |
|
787 } |
|
788 } |
|
789 |
|
790 val options0 = Options.init0() |
|
791 val session_prefs = options.make_prefs(defaults = options0, filter = _.session_content) |
|
792 |
|
793 val root_infos = { |
|
794 var chapter = UNSORTED |
|
795 val root_infos = new mutable.ListBuffer[Info] |
|
796 for (root <- roots) { |
|
797 root.entries.foreach { |
|
798 case entry: Chapter_Entry => chapter = entry.name |
|
799 case entry: Session_Entry => |
|
800 root_infos += |
|
801 Info.make(chapter_defs, options0, options, augment_options, |
|
802 root.select, root.dir, chapter, entry) |
|
803 case _ => |
|
804 } |
|
805 chapter = UNSORTED |
|
806 } |
|
807 root_infos.toList |
|
808 } |
|
809 |
|
810 val info_graph = |
|
811 (root_infos ::: infos).foldLeft(Graph.string[Info]) { |
|
812 case (graph, info) => |
|
813 if (graph.defined(info.name)) { |
|
814 error("Duplicate session " + quote(info.name) + Position.here(info.pos) + |
|
815 Position.here(graph.get_node(info.name).pos)) |
|
816 } |
|
817 else graph.new_node(info.name, info) |
|
818 } |
|
819 |
|
820 def augment_graph( |
|
821 graph: Graph[String, Info], |
|
822 kind: String, |
|
823 edges: Info => Iterable[String] |
|
824 ) : Graph[String, Info] = { |
|
825 def add_edge(pos: Position.T, name: String, g: Graph[String, Info], parent: String) = { |
|
826 if (!g.defined(parent)) { |
|
827 error("Bad " + kind + " session " + quote(parent) + " for " + |
|
828 quote(name) + Position.here(pos)) |
|
829 } |
|
830 |
|
831 try { g.add_edge_acyclic(parent, name) } |
|
832 catch { |
|
833 case exn: Graph.Cycles[_] => |
|
834 error(cat_lines(exn.cycles.map(cycle => |
|
835 "Cyclic session dependency of " + |
|
836 cycle.map(c => quote(c.toString)).mkString(" via "))) + Position.here(pos)) |
|
837 } |
|
838 } |
|
839 graph.iterator.foldLeft(graph) { |
|
840 case (g, (name, (info, _))) => |
|
841 edges(info).foldLeft(g)(add_edge(info.pos, name, _, _)) |
|
842 } |
|
843 } |
|
844 |
|
845 val build_graph = augment_graph(info_graph, "parent", _.parent) |
|
846 val imports_graph = augment_graph(build_graph, "imports", _.imports) |
|
847 |
|
848 val session_positions: List[(String, Position.T)] = |
|
849 (for ((name, (info, _)) <- info_graph.iterator) yield (name, info.pos)).toList |
|
850 |
|
851 val session_directories: Map[JFile, String] = |
|
852 (for { |
|
853 session <- imports_graph.topological_order.iterator |
|
854 info = info_graph.get_node(session) |
|
855 dir <- info.dirs.iterator |
|
856 } yield (info, dir)).foldLeft(Map.empty[JFile, String]) { |
|
857 case (dirs, (info, dir)) => |
|
858 val session = info.name |
|
859 val canonical_dir = dir.canonical_file |
|
860 dirs.get(canonical_dir) match { |
|
861 case Some(session1) => |
|
862 val info1 = info_graph.get_node(session1) |
|
863 error("Duplicate use of directory " + dir + |
|
864 "\n for session " + quote(session1) + Position.here(info1.pos) + |
|
865 "\n vs. session " + quote(session) + Position.here(info.pos)) |
|
866 case None => dirs + (canonical_dir -> session) |
|
867 } |
|
868 } |
|
869 |
|
870 val global_theories: Map[String, String] = |
|
871 (for { |
|
872 session <- imports_graph.topological_order.iterator |
|
873 info = info_graph.get_node(session) |
|
874 thy <- info.global_theories.iterator } |
|
875 yield (info, thy) |
|
876 ).foldLeft(Thy_Header.bootstrap_global_theories.toMap) { |
|
877 case (global, (info, thy)) => |
|
878 val qualifier = info.name |
|
879 global.get(thy) match { |
|
880 case Some(qualifier1) if qualifier != qualifier1 => |
|
881 error("Duplicate global theory " + quote(thy) + Position.here(info.pos)) |
|
882 case _ => global + (thy -> qualifier) |
|
883 } |
|
884 } |
|
885 |
|
886 new Structure(chapter_defs, session_prefs, session_positions, session_directories, |
|
887 global_theories, build_graph, imports_graph) |
|
888 } |
|
889 } |
|
890 |
|
891 final class Structure private[Sessions]( |
|
892 chapter_defs: Chapter_Defs, |
|
893 val session_prefs: String, |
|
894 val session_positions: List[(String, Position.T)], |
|
895 val session_directories: Map[JFile, String], |
|
896 val global_theories: Map[String, String], |
|
897 val build_graph: Graph[String, Info], |
|
898 val imports_graph: Graph[String, Info] |
|
899 ) { |
|
900 sessions_structure => |
|
901 |
|
902 def dest_session_directories: List[(String, String)] = |
|
903 for ((file, session) <- session_directories.toList) |
|
904 yield (File.standard_path(file), session) |
|
905 |
|
906 lazy val known_chapters: List[Chapter_Info] = { |
|
907 val chapter_sessions = |
|
908 Multi_Map.from( |
|
909 for ((_, (info, _)) <- build_graph.iterator) |
|
910 yield info.chapter -> info.name) |
|
911 val chapters1 = |
|
912 (for (entry <- chapter_defs.list.iterator) yield { |
|
913 val sessions = chapter_sessions.get_list(entry.name) |
|
914 Chapter_Info(entry.name, entry.pos, entry.groups, entry.description, sessions.sorted) |
|
915 }).toList |
|
916 val chapters2 = |
|
917 (for { |
|
918 (name, sessions) <- chapter_sessions.iterator_list |
|
919 if !chapters1.exists(_.name == name) |
|
920 } yield Chapter_Info(name, Position.none, Nil, "", sessions.sorted)).toList.sortBy(_.name) |
|
921 chapters1 ::: chapters2 |
|
922 } |
|
923 |
|
924 def relevant_chapters: List[Chapter_Info] = known_chapters.filter(_.sessions.nonEmpty) |
|
925 |
|
926 def build_graph_display: Graph_Display.Graph = Graph_Display.make_graph(build_graph) |
|
927 def imports_graph_display: Graph_Display.Graph = Graph_Display.make_graph(imports_graph) |
|
928 |
|
929 def defined(name: String): Boolean = imports_graph.defined(name) |
|
930 def apply(name: String): Info = imports_graph.get_node(name) |
|
931 def get(name: String): Option[Info] = if (defined(name)) Some(apply(name)) else None |
|
932 |
|
933 def theory_qualifier(name: String): String = |
|
934 global_theories.getOrElse(name, Long_Name.qualifier(name)) |
|
935 def theory_qualifier(name: Document.Node.Name): String = theory_qualifier(name.theory) |
|
936 |
|
937 def check_sessions(names: List[String]): Unit = { |
|
938 val bad_sessions = SortedSet(names.filterNot(defined): _*).toList |
|
939 if (bad_sessions.nonEmpty) { |
|
940 error("Undefined session(s): " + commas_quote(bad_sessions)) |
|
941 } |
|
942 } |
|
943 |
|
944 def check_sessions(sel: Selection): Unit = |
|
945 check_sessions(sel.base_sessions ::: sel.exclude_sessions ::: sel.sessions) |
|
946 |
|
947 private def selected(graph: Graph[String, Info], sel: Selection): List[String] = { |
|
948 check_sessions(sel) |
|
949 |
|
950 val select_group = sel.session_groups.toSet |
|
951 val select_session = sel.sessions.toSet ++ imports_graph.all_succs(sel.base_sessions) |
|
952 |
|
953 val selected0 = |
|
954 if (sel.all_sessions) graph.keys |
|
955 else { |
|
956 (for { |
|
957 (name, (info, _)) <- graph.iterator |
|
958 if info.dir_selected || select_session(name) || info.groups.exists(select_group) |
|
959 } yield name).toList |
|
960 } |
|
961 |
|
962 if (sel.requirements) (graph.all_preds(selected0).toSet -- selected0).toList |
|
963 else selected0 |
|
964 } |
|
965 |
|
966 def selection(sel: Selection): Structure = { |
|
967 check_sessions(sel) |
|
968 |
|
969 val excluded = { |
|
970 val exclude_group = sel.exclude_session_groups.toSet |
|
971 val exclude_group_sessions = |
|
972 (for { |
|
973 (name, (info, _)) <- imports_graph.iterator |
|
974 if info.groups.exists(exclude_group) |
|
975 } yield name).toList |
|
976 imports_graph.all_succs(exclude_group_sessions ::: sel.exclude_sessions).toSet |
|
977 } |
|
978 |
|
979 def restrict(graph: Graph[String, Info]): Graph[String, Info] = { |
|
980 val sessions = graph.all_preds(selected(graph, sel)).filterNot(excluded) |
|
981 graph.restrict(graph.all_preds(sessions).toSet) |
|
982 } |
|
983 |
|
984 new Structure(chapter_defs, session_prefs, session_positions, session_directories, |
|
985 global_theories, restrict(build_graph), restrict(imports_graph)) |
|
986 } |
|
987 |
|
988 def selection(session: String): Structure = selection(Selection.session(session)) |
|
989 |
|
990 def selection_deps( |
|
991 selection: Selection, |
|
992 progress: Progress = new Progress, |
|
993 loading_sessions: Boolean = false, |
|
994 inlined_files: Boolean = false |
|
995 ): Deps = { |
|
996 val deps = |
|
997 Sessions.deps(sessions_structure.selection(selection), |
|
998 progress = progress, inlined_files = inlined_files) |
|
999 |
|
1000 if (loading_sessions) { |
|
1001 val selection_size = deps.sessions_structure.build_graph.size |
|
1002 if (selection_size > 1) progress.echo("Loading " + selection_size + " sessions ...") |
|
1003 } |
|
1004 |
|
1005 deps |
|
1006 } |
|
1007 |
|
1008 def build_hierarchy(session: String): List[String] = |
|
1009 if (build_graph.defined(session)) build_graph.all_preds(List(session)) |
|
1010 else List(session) |
|
1011 |
|
1012 def build_selection(sel: Selection): List[String] = selected(build_graph, sel) |
|
1013 def build_descendants(ss: List[String]): List[String] = build_graph.all_succs(ss) |
|
1014 def build_requirements(ss: List[String]): List[String] = build_graph.all_preds_rev(ss) |
|
1015 def build_topological_order: List[String] = build_graph.topological_order |
|
1016 |
|
1017 def imports_selection(sel: Selection): List[String] = selected(imports_graph, sel) |
|
1018 def imports_descendants(ss: List[String]): List[String] = imports_graph.all_succs(ss) |
|
1019 def imports_requirements(ss: List[String]): List[String] = imports_graph.all_preds_rev(ss) |
|
1020 def imports_topological_order: List[String] = imports_graph.topological_order |
|
1021 |
|
1022 override def toString: String = |
|
1023 imports_graph.keys_iterator.mkString("Sessions.Structure(", ", ", ")") |
|
1024 } |
|
1025 |
|
1026 |
|
1027 /* parser */ |
|
1028 |
|
1029 private val CHAPTER_DEFINITION = "chapter_definition" |
|
1030 private val CHAPTER = "chapter" |
|
1031 private val SESSION = "session" |
|
1032 private val DESCRIPTION = "description" |
|
1033 private val DIRECTORIES = "directories" |
|
1034 private val OPTIONS = "options" |
|
1035 private val SESSIONS = "sessions" |
|
1036 private val THEORIES = "theories" |
|
1037 private val GLOBAL = "global" |
|
1038 private val DOCUMENT_THEORIES = "document_theories" |
|
1039 private val DOCUMENT_FILES = "document_files" |
|
1040 private val EXPORT_FILES = "export_files" |
|
1041 private val EXPORT_CLASSPATH = "export_classpath" |
|
1042 |
|
1043 val root_syntax: Outer_Syntax = |
|
1044 Outer_Syntax.empty + "(" + ")" + "+" + "," + "=" + "[" + "]" + "in" + |
|
1045 GLOBAL + |
|
1046 (CHAPTER_DEFINITION, Keyword.THY_DECL) + |
|
1047 (CHAPTER, Keyword.THY_DECL) + |
|
1048 (SESSION, Keyword.THY_DECL) + |
|
1049 (DESCRIPTION, Keyword.QUASI_COMMAND) + |
|
1050 (DIRECTORIES, Keyword.QUASI_COMMAND) + |
|
1051 (OPTIONS, Keyword.QUASI_COMMAND) + |
|
1052 (SESSIONS, Keyword.QUASI_COMMAND) + |
|
1053 (THEORIES, Keyword.QUASI_COMMAND) + |
|
1054 (DOCUMENT_THEORIES, Keyword.QUASI_COMMAND) + |
|
1055 (DOCUMENT_FILES, Keyword.QUASI_COMMAND) + |
|
1056 (EXPORT_FILES, Keyword.QUASI_COMMAND) + |
|
1057 (EXPORT_CLASSPATH, Keyword.QUASI_COMMAND) |
|
1058 |
|
1059 abstract class Entry |
|
1060 object Chapter_Def { |
|
1061 def empty(chapter: String): Chapter_Def = Chapter_Def(Position.none, chapter, Nil, "") |
|
1062 } |
|
1063 sealed case class Chapter_Def( |
|
1064 pos: Position.T, |
|
1065 name: String, |
|
1066 groups: List[String], |
|
1067 description: String |
|
1068 ) extends Entry |
|
1069 sealed case class Chapter_Entry(name: String) extends Entry |
|
1070 sealed case class Session_Entry( |
|
1071 pos: Position.T, |
|
1072 name: String, |
|
1073 groups: List[String], |
|
1074 path: String, |
|
1075 parent: Option[String], |
|
1076 description: String, |
|
1077 options: List[Options.Spec], |
|
1078 imports: List[String], |
|
1079 directories: List[String], |
|
1080 theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])], |
|
1081 document_theories: List[(String, Position.T)], |
|
1082 document_files: List[(String, String)], |
|
1083 export_files: List[(String, Int, List[String])], |
|
1084 export_classpath: List[String] |
|
1085 ) extends Entry { |
|
1086 def theories_no_position: List[(List[Options.Spec], List[(String, Boolean)])] = |
|
1087 theories.map({ case (a, b) => (a, b.map({ case ((c, _), d) => (c, d) })) }) |
|
1088 def document_theories_no_position: List[String] = |
|
1089 document_theories.map(_._1) |
|
1090 } |
|
1091 |
|
1092 object Chapter_Defs { |
|
1093 val empty: Chapter_Defs = new Chapter_Defs(Nil) |
|
1094 } |
|
1095 |
|
1096 class Chapter_Defs private(rev_list: List[Chapter_Def]) { |
|
1097 def list: List[Chapter_Def] = rev_list.reverse |
|
1098 |
|
1099 override def toString: String = |
|
1100 list.map(_.name).mkString("Chapter_Defs(", ", ", ")") |
|
1101 |
|
1102 def get(chapter: String): Option[Chapter_Def] = |
|
1103 rev_list.find(_.name == chapter) |
|
1104 |
|
1105 def apply(chapter: String): Chapter_Def = |
|
1106 get(chapter) getOrElse Chapter_Def.empty(chapter) |
|
1107 |
|
1108 def + (entry: Chapter_Def): Chapter_Defs = |
|
1109 get(entry.name) match { |
|
1110 case None => new Chapter_Defs(entry :: rev_list) |
|
1111 case Some(old_entry) => |
|
1112 error("Duplicate chapter definition " + quote(entry.name) + |
|
1113 Position.here(old_entry.pos) + Position.here(entry.pos)) |
|
1114 } |
|
1115 } |
|
1116 |
|
1117 private object Parsers extends Options.Parsers { |
|
1118 private val groups: Parser[List[String]] = |
|
1119 ($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil) |
|
1120 |
|
1121 private val description: Parser[String] = |
|
1122 ($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("") |
|
1123 |
|
1124 private val chapter_def: Parser[Chapter_Def] = |
|
1125 command(CHAPTER_DEFINITION) ~! (position(chapter_name) ~ groups ~ description) ^^ |
|
1126 { case _ ~ ((a, pos) ~ b ~ c) => Chapter_Def(pos, a, b, c) } |
|
1127 |
|
1128 private val chapter_entry: Parser[Chapter_Entry] = |
|
1129 command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter_Entry(a) } |
|
1130 |
|
1131 private val session_entry: Parser[Session_Entry] = { |
|
1132 val options = $$$("[") ~> rep1sep(option_spec, $$$(",")) <~ $$$("]") |
|
1133 |
|
1134 val theory_entry = |
|
1135 position(theory_name) ~ opt_keyword(GLOBAL) ^^ { case x ~ y => (x, y) } |
|
1136 |
|
1137 val theories = |
|
1138 $$$(THEORIES) ~! |
|
1139 ((options | success(Nil)) ~ rep1(theory_entry)) ^^ |
|
1140 { case _ ~ (x ~ y) => (x, y) } |
|
1141 |
|
1142 val document_theories = |
|
1143 $$$(DOCUMENT_THEORIES) ~! rep1(position(name)) ^^ { case _ ~ x => x } |
|
1144 |
|
1145 val document_files = |
|
1146 $$$(DOCUMENT_FILES) ~! (in_path_parens("document") ~ rep1(path)) ^^ |
|
1147 { case _ ~ (x ~ y) => y.map((x, _)) } |
|
1148 |
|
1149 val prune = $$$("[") ~! (nat ~ $$$("]")) ^^ { case _ ~ (x ~ _) => x } | success(0) |
|
1150 |
|
1151 val export_files = |
|
1152 $$$(EXPORT_FILES) ~! (in_path_parens("export") ~ prune ~ rep1(embedded)) ^^ |
|
1153 { case _ ~ (x ~ y ~ z) => (x, y, z) } |
|
1154 |
|
1155 val export_classpath = |
|
1156 $$$(EXPORT_CLASSPATH) ~! (rep1(embedded) | success(List("*:classpath/*.jar"))) ^^ |
|
1157 { case _ ~ x => x } |
|
1158 |
|
1159 command(SESSION) ~! |
|
1160 (position(session_name) ~ groups ~ in_path(".") ~ |
|
1161 ($$$("=") ~! |
|
1162 (opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~ description ~ |
|
1163 (($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~ |
|
1164 (($$$(SESSIONS) ~! rep1(session_name) ^^ { case _ ~ x => x }) | success(Nil)) ~ |
|
1165 (($$$(DIRECTORIES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~ |
|
1166 rep(theories) ~ |
|
1167 (opt(document_theories) ^^ (x => x.getOrElse(Nil))) ~ |
|
1168 (rep(document_files) ^^ (x => x.flatten)) ~ |
|
1169 rep(export_files) ~ |
|
1170 opt(export_classpath)))) ^^ |
|
1171 { case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k ~ l ~ m))) => |
|
1172 Session_Entry(pos, a, b, c, d, e, f, g, h, i, j, k, l, m.getOrElse(Nil)) } |
|
1173 } |
|
1174 |
|
1175 def parse_root(path: Path): List[Entry] = { |
|
1176 val toks = Token.explode(root_syntax.keywords, File.read(path)) |
|
1177 val start = Token.Pos.file(path.implode) |
|
1178 val parser: Parser[Entry] = chapter_def | chapter_entry | session_entry |
|
1179 parse_all(rep(parser), Token.reader(toks, start)) match { |
|
1180 case Success(result, _) => result |
|
1181 case bad => error(bad.toString) |
|
1182 } |
|
1183 } |
|
1184 } |
|
1185 |
|
1186 def parse_root(path: Path): List[Entry] = Parsers.parse_root(path) |
|
1187 |
|
1188 def parse_root_entries(path: Path): List[Session_Entry] = |
|
1189 Parsers.parse_root(path).flatMap(Library.as_subclass(classOf[Session_Entry])) |
|
1190 |
|
1191 def parse_roots(roots: Path): List[String] = { |
|
1192 for { |
|
1193 line <- split_lines(File.read(roots)) |
|
1194 if !(line == "" || line.startsWith("#")) |
|
1195 } yield line |
|
1196 } |
|
1197 |
|
1198 |
|
1199 /* load sessions from certain directories */ |
|
1200 |
|
1201 def is_session_dir(dir: Path, ssh: SSH.System = SSH.Local): Boolean = |
|
1202 ssh.is_file(dir + ROOT) || ssh.is_file(dir + ROOTS) |
|
1203 |
|
1204 def check_session_dir(dir: Path): Path = |
|
1205 if (is_session_dir(dir)) File.pwd() + dir.expand |
|
1206 else { |
|
1207 error("Bad session root directory: " + dir.expand.toString + |
|
1208 "\n (missing \"ROOT\" or \"ROOTS\")") |
|
1209 } |
|
1210 |
|
1211 def directories(dirs: List[Path], select_dirs: List[Path]): List[(Boolean, Path)] = { |
|
1212 val default_dirs = Components.directories().filter(is_session_dir(_)) |
|
1213 for { (select, dir) <- (default_dirs ::: dirs).map((false, _)) ::: select_dirs.map((true, _)) } |
|
1214 yield (select, dir.canonical) |
|
1215 } |
|
1216 |
|
1217 sealed case class Root_File(path: Path, select: Boolean) { |
|
1218 val key: JFile = path.canonical_file |
|
1219 def dir: Path = path.dir |
|
1220 |
|
1221 lazy val entries: List[Entry] = Parsers.parse_root(path) |
|
1222 } |
|
1223 |
|
1224 def load_root_files( |
|
1225 dirs: List[Path] = Nil, |
|
1226 select_dirs: List[Path] = Nil |
|
1227 ): List[Root_File] = { |
|
1228 def load_dir(select: Boolean, dir: Path): List[Root_File] = |
|
1229 load_root(select, dir) ::: load_roots(select, dir) |
|
1230 |
|
1231 def load_root(select: Boolean, dir: Path): List[Root_File] = { |
|
1232 val root = dir + ROOT |
|
1233 if (root.is_file) List(Root_File(root, select)) else Nil |
|
1234 } |
|
1235 |
|
1236 def load_roots(select: Boolean, dir: Path): List[Root_File] = { |
|
1237 val roots = dir + ROOTS |
|
1238 if (roots.is_file) { |
|
1239 for { |
|
1240 entry <- parse_roots(roots) |
|
1241 dir1 = |
|
1242 try { check_session_dir(dir + Path.explode(entry)) } |
|
1243 catch { |
|
1244 case ERROR(msg) => |
|
1245 error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString) |
|
1246 } |
|
1247 res <- load_dir(select, dir1) |
|
1248 } yield res |
|
1249 } |
|
1250 else Nil |
|
1251 } |
|
1252 |
|
1253 val raw_roots: List[Root_File] = |
|
1254 for { |
|
1255 (select, dir) <- directories(dirs, select_dirs) |
|
1256 root <- load_dir(select, check_session_dir(dir)) |
|
1257 } yield root |
|
1258 |
|
1259 var next_root = 0 |
|
1260 var seen_roots = Map.empty[JFile, (Root_File, Int)] |
|
1261 for (root <- raw_roots) { |
|
1262 seen_roots.get(root.key) match { |
|
1263 case None => |
|
1264 seen_roots += (root.key -> (root, next_root)) |
|
1265 next_root += 1 |
|
1266 case Some((root0, next0)) => |
|
1267 val root1 = root0.copy(select = root0.select || root.select) |
|
1268 seen_roots += (root0.key -> (root1, next0)) |
|
1269 } |
|
1270 } |
|
1271 seen_roots.valuesIterator.toList.sortBy(_._2).map(_._1) |
|
1272 } |
|
1273 |
|
1274 |
|
1275 /* Isabelle tool wrapper */ |
|
1276 |
|
1277 val isabelle_tool = Isabelle_Tool("sessions", "explore structure of Isabelle sessions", |
|
1278 Scala_Project.here, |
|
1279 { args => |
|
1280 var base_sessions: List[String] = Nil |
|
1281 var select_dirs: List[Path] = Nil |
|
1282 var requirements = false |
|
1283 var exclude_session_groups: List[String] = Nil |
|
1284 var all_sessions = false |
|
1285 var build_graph = false |
|
1286 var dirs: List[Path] = Nil |
|
1287 var session_groups: List[String] = Nil |
|
1288 var exclude_sessions: List[String] = Nil |
|
1289 |
|
1290 val getopts = Getopts(""" |
|
1291 Usage: isabelle sessions [OPTIONS] [SESSIONS ...] |
|
1292 |
|
1293 Options are: |
|
1294 -B NAME include session NAME and all descendants |
|
1295 -D DIR include session directory and select its sessions |
|
1296 -R refer to requirements of selected sessions |
|
1297 -X NAME exclude sessions from group NAME and all descendants |
|
1298 -a select all sessions |
|
1299 -b follow session build dependencies (default: source imports) |
|
1300 -d DIR include session directory |
|
1301 -g NAME select session group NAME |
|
1302 -x NAME exclude session NAME and all descendants |
|
1303 |
|
1304 Explore the structure of Isabelle sessions and print result names in |
|
1305 topological order (on stdout). |
|
1306 """, |
|
1307 "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), |
|
1308 "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), |
|
1309 "R" -> (_ => requirements = true), |
|
1310 "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), |
|
1311 "a" -> (_ => all_sessions = true), |
|
1312 "b" -> (_ => build_graph = true), |
|
1313 "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), |
|
1314 "g:" -> (arg => session_groups = session_groups ::: List(arg)), |
|
1315 "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg))) |
|
1316 |
|
1317 val sessions = getopts(args) |
|
1318 |
|
1319 val options = Options.init() |
|
1320 |
|
1321 val selection = |
|
1322 Selection(requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions, |
|
1323 exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions, |
|
1324 session_groups = session_groups, sessions = sessions) |
|
1325 val sessions_structure = |
|
1326 load_structure(options, dirs = dirs, select_dirs = select_dirs).selection(selection) |
|
1327 |
|
1328 val order = |
|
1329 if (build_graph) sessions_structure.build_topological_order |
|
1330 else sessions_structure.imports_topological_order |
|
1331 for (name <- order) Output.writeln(name, stdout = true) |
|
1332 }) |
|
1333 } |
|