author | wenzelm |
Fri, 17 Mar 2017 09:49:01 +0100 | |
changeset 65283 | 042160aee6c2 |
parent 65282 | f4c5f10829a0 |
child 65284 | d189ff34b5b9 |
permissions | -rw-r--r-- |
62631 | 1 |
/* Title: Pure/Thy/sessions.scala |
2 |
Author: Makarius |
|
3 |
||
62973 | 4 |
Isabelle session information. |
62631 | 5 |
*/ |
6 |
||
7 |
package isabelle |
|
8 |
||
62704
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
9 |
import java.nio.ByteBuffer |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
10 |
import java.nio.channels.FileChannel |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
11 |
import java.nio.file.StandardOpenOption |
62631 | 12 |
|
13 |
import scala.collection.SortedSet |
|
14 |
import scala.collection.mutable |
|
15 |
||
16 |
||
17 |
object Sessions |
|
18 |
{ |
|
62883 | 19 |
/* Pure */ |
20 |
||
63022 | 21 |
def pure_name(name: String): Boolean = name == Thy_Header.PURE |
62883 | 22 |
|
62902
3c0f53eae166
more conventional theory syntax for ML bootstrap, with 'ML_file' instead of 'use';
wenzelm
parents:
62883
diff
changeset
|
23 |
def pure_files(resources: Resources, syntax: Outer_Syntax, dir: Path): List[Path] = |
3c0f53eae166
more conventional theory syntax for ML bootstrap, with 'ML_file' instead of 'use';
wenzelm
parents:
62883
diff
changeset
|
24 |
{ |
62946 | 25 |
val roots = Thy_Header.ml_roots.map(_._1) |
62902
3c0f53eae166
more conventional theory syntax for ML bootstrap, with 'ML_file' instead of 'use';
wenzelm
parents:
62883
diff
changeset
|
26 |
val loaded_files = |
62946 | 27 |
roots.flatMap(root => resources.loaded_files(syntax, File.read(dir + Path.explode(root)))) |
28 |
(roots ::: loaded_files).map(file => dir + Path.explode(file)) |
|
62902
3c0f53eae166
more conventional theory syntax for ML bootstrap, with 'ML_file' instead of 'use';
wenzelm
parents:
62883
diff
changeset
|
29 |
} |
62883 | 30 |
|
65254
3075aa3b40bf
clarified fall-back base, e.g. relevant for "isabelle jedit -l BAD";
wenzelm
parents:
65251
diff
changeset
|
31 |
def pure_base(options: Options): Base = session_base(options, Thy_Header.PURE) |
3075aa3b40bf
clarified fall-back base, e.g. relevant for "isabelle jedit -l BAD";
wenzelm
parents:
65251
diff
changeset
|
32 |
|
62883 | 33 |
|
65251 | 34 |
/* base info and source dependencies */ |
64856 | 35 |
|
36 |
object Base |
|
37 |
{ |
|
38 |
lazy val bootstrap: Base = |
|
39 |
Base(keywords = Thy_Header.bootstrap_header, syntax = Thy_Header.bootstrap_syntax) |
|
40 |
} |
|
41 |
||
42 |
sealed case class Base( |
|
43 |
loaded_theories: Set[String] = Set.empty, |
|
44 |
known_theories: Map[String, Document.Node.Name] = Map.empty, |
|
45 |
keywords: Thy_Header.Keywords = Nil, |
|
46 |
syntax: Outer_Syntax = Outer_Syntax.empty, |
|
47 |
sources: List[(Path, SHA1.Digest)] = Nil, |
|
48 |
session_graph: Graph_Display.Graph = Graph_Display.empty_graph) |
|
49 |
||
65251 | 50 |
sealed case class Deps(deps: Map[String, Base]) |
51 |
{ |
|
52 |
def is_empty: Boolean = deps.isEmpty |
|
53 |
def apply(name: String): Base = deps(name) |
|
54 |
def sources(name: String): List[SHA1.Digest] = deps(name).sources.map(_._2) |
|
55 |
} |
|
64856 | 56 |
|
65251 | 57 |
def dependencies( |
58 |
progress: Progress = No_Progress, |
|
59 |
inlined_files: Boolean = false, |
|
60 |
verbose: Boolean = false, |
|
61 |
list_files: Boolean = false, |
|
62 |
check_keywords: Set[String] = Set.empty, |
|
63 |
tree: Tree): Deps = |
|
64 |
Deps((Map.empty[String, Base] /: tree.topological_order)( |
|
65 |
{ case (deps, (name, info)) => |
|
66 |
if (progress.stopped) throw Exn.Interrupt() |
|
67 |
||
68 |
try { |
|
69 |
val resources = |
|
70 |
new Resources( |
|
71 |
info.parent match { |
|
72 |
case None => Base.bootstrap |
|
73 |
case Some(parent) => deps(parent) |
|
74 |
}) |
|
75 |
||
76 |
if (verbose || list_files) { |
|
77 |
val groups = |
|
78 |
if (info.groups.isEmpty) "" |
|
79 |
else info.groups.mkString(" (", " ", ")") |
|
80 |
progress.echo("Session " + info.chapter + "/" + name + groups) |
|
81 |
} |
|
82 |
||
83 |
val thy_deps = |
|
84 |
{ |
|
85 |
val root_theories = |
|
86 |
info.theories.flatMap({ |
|
87 |
case (global, _, thys) => |
|
88 |
thys.map(thy => |
|
89 |
(resources.node_name( |
|
90 |
if (global) "" else name, info.dir + resources.thy_path(thy)), info.pos)) |
|
91 |
}) |
|
92 |
val thy_deps = resources.thy_info.dependencies(name, root_theories) |
|
93 |
||
94 |
thy_deps.errors match { |
|
95 |
case Nil => thy_deps |
|
96 |
case errs => error(cat_lines(errs)) |
|
97 |
} |
|
98 |
} |
|
99 |
||
100 |
val known_theories = |
|
101 |
(resources.base.known_theories /: thy_deps.deps)({ case (known, dep) => |
|
102 |
val name = dep.name |
|
103 |
known.get(name.theory) match { |
|
104 |
case Some(name1) if name != name1 => |
|
105 |
error("Duplicate theory " + quote(name.node) + " vs. " + quote(name1.node)) |
|
106 |
case _ => |
|
107 |
known + (name.theory -> name) + (Long_Name.base_name(name.theory) -> name) |
|
108 |
} |
|
109 |
}) |
|
110 |
||
111 |
val loaded_theories = thy_deps.loaded_theories |
|
112 |
val keywords = thy_deps.keywords |
|
113 |
val syntax = thy_deps.syntax |
|
114 |
||
115 |
val theory_files = thy_deps.deps.map(dep => Path.explode(dep.name.node)) |
|
116 |
val loaded_files = |
|
117 |
if (inlined_files) { |
|
118 |
val pure_files = |
|
119 |
if (pure_name(name)) Sessions.pure_files(resources, syntax, info.dir) |
|
120 |
else Nil |
|
121 |
pure_files ::: thy_deps.loaded_files |
|
122 |
} |
|
123 |
else Nil |
|
124 |
||
125 |
val all_files = |
|
126 |
(theory_files ::: loaded_files ::: |
|
127 |
info.files.map(file => info.dir + file) ::: |
|
128 |
info.document_files.map(file => info.dir + file._1 + file._2)).map(_.expand) |
|
129 |
||
130 |
if (list_files) |
|
131 |
progress.echo(cat_lines(all_files.map(_.implode).sorted.map(" " + _))) |
|
132 |
||
133 |
if (check_keywords.nonEmpty) |
|
134 |
Check_Keywords.check_keywords(progress, syntax.keywords, check_keywords, theory_files) |
|
135 |
||
136 |
val sources = all_files.map(p => (p, SHA1.digest(p.file))) |
|
137 |
||
138 |
val session_graph = |
|
139 |
Present.session_graph(info.parent getOrElse "", |
|
140 |
resources.base.loaded_theories, thy_deps.deps) |
|
141 |
||
142 |
val base = |
|
143 |
Base(loaded_theories, known_theories, keywords, syntax, sources, session_graph) |
|
144 |
deps + (name -> base) |
|
145 |
} |
|
146 |
catch { |
|
147 |
case ERROR(msg) => |
|
148 |
cat_error(msg, "The error(s) above occurred in session " + |
|
149 |
quote(name) + Position.here(info.pos)) |
|
150 |
} |
|
151 |
})) |
|
152 |
||
153 |
def session_base(options: Options, session: String, dirs: List[Path] = Nil): Base = |
|
154 |
{ |
|
155 |
val (_, tree) = load(options, dirs = dirs).selection(sessions = List(session)) |
|
156 |
dependencies(tree = tree)(session) |
|
157 |
} |
|
158 |
||
159 |
||
160 |
/* session tree */ |
|
62631 | 161 |
|
162 |
sealed case class Info( |
|
163 |
chapter: String, |
|
164 |
select: Boolean, |
|
165 |
pos: Position.T, |
|
166 |
groups: List[String], |
|
167 |
dir: Path, |
|
168 |
parent: Option[String], |
|
169 |
description: String, |
|
170 |
options: Options, |
|
171 |
theories: List[(Boolean, Options, List[Path])], |
|
172 |
files: List[Path], |
|
173 |
document_files: List[(Path, Path)], |
|
174 |
meta_digest: SHA1.Digest) |
|
175 |
{ |
|
176 |
def timeout: Time = Time.seconds(options.real("timeout") * options.real("timeout_scale")) |
|
177 |
} |
|
178 |
||
179 |
object Tree |
|
180 |
{ |
|
181 |
def apply(infos: Seq[(String, Info)]): Tree = |
|
182 |
{ |
|
183 |
val graph1 = |
|
184 |
(Graph.string[Info] /: infos) { |
|
185 |
case (graph, (name, info)) => |
|
186 |
if (graph.defined(name)) |
|
187 |
error("Duplicate session " + quote(name) + Position.here(info.pos) + |
|
188 |
Position.here(graph.get_node(name).pos)) |
|
189 |
else graph.new_node(name, info) |
|
190 |
} |
|
191 |
val graph2 = |
|
192 |
(graph1 /: graph1.iterator) { |
|
193 |
case (graph, (name, (info, _))) => |
|
194 |
info.parent match { |
|
195 |
case None => graph |
|
196 |
case Some(parent) => |
|
197 |
if (!graph.defined(parent)) |
|
198 |
error("Bad parent session " + quote(parent) + " for " + |
|
199 |
quote(name) + Position.here(info.pos)) |
|
200 |
||
201 |
try { graph.add_edge_acyclic(parent, name) } |
|
202 |
catch { |
|
203 |
case exn: Graph.Cycles[_] => |
|
204 |
error(cat_lines(exn.cycles.map(cycle => |
|
205 |
"Cyclic session dependency of " + |
|
206 |
cycle.map(c => quote(c.toString)).mkString(" via "))) + |
|
207 |
Position.here(info.pos)) |
|
208 |
} |
|
209 |
} |
|
210 |
} |
|
211 |
new Tree(graph2) |
|
212 |
} |
|
213 |
} |
|
214 |
||
215 |
final class Tree private(val graph: Graph[String, Info]) |
|
216 |
extends PartialFunction[String, Info] |
|
217 |
{ |
|
218 |
def apply(name: String): Info = graph.get_node(name) |
|
219 |
def isDefinedAt(name: String): Boolean = graph.defined(name) |
|
220 |
||
221 |
def selection( |
|
222 |
requirements: Boolean = false, |
|
223 |
all_sessions: Boolean = false, |
|
224 |
exclude_session_groups: List[String] = Nil, |
|
225 |
exclude_sessions: List[String] = Nil, |
|
226 |
session_groups: List[String] = Nil, |
|
227 |
sessions: List[String] = Nil): (List[String], Tree) = |
|
228 |
{ |
|
229 |
val bad_sessions = |
|
230 |
SortedSet((exclude_sessions ::: sessions).filterNot(isDefinedAt(_)): _*).toList |
|
231 |
if (bad_sessions.nonEmpty) error("Undefined session(s): " + commas_quote(bad_sessions)) |
|
232 |
||
233 |
val excluded = |
|
234 |
{ |
|
235 |
val exclude_group = exclude_session_groups.toSet |
|
236 |
val exclude_group_sessions = |
|
237 |
(for { |
|
238 |
(name, (info, _)) <- graph.iterator |
|
239 |
if apply(name).groups.exists(exclude_group) |
|
240 |
} yield name).toList |
|
241 |
graph.all_succs(exclude_group_sessions ::: exclude_sessions).toSet |
|
242 |
} |
|
243 |
||
244 |
val pre_selected = |
|
245 |
{ |
|
246 |
if (all_sessions) graph.keys |
|
247 |
else { |
|
248 |
val select_group = session_groups.toSet |
|
249 |
val select = sessions.toSet |
|
250 |
(for { |
|
251 |
(name, (info, _)) <- graph.iterator |
|
252 |
if info.select || select(name) || apply(name).groups.exists(select_group) |
|
253 |
} yield name).toList |
|
254 |
} |
|
255 |
}.filterNot(excluded) |
|
256 |
||
257 |
val selected = |
|
258 |
if (requirements) (graph.all_preds(pre_selected).toSet -- pre_selected).toList |
|
259 |
else pre_selected |
|
260 |
||
261 |
val graph1 = graph.restrict(graph.all_preds(selected).toSet) |
|
262 |
(selected, new Tree(graph1)) |
|
263 |
} |
|
264 |
||
265 |
def ancestors(name: String): List[String] = |
|
266 |
graph.all_preds(List(name)).tail.reverse |
|
267 |
||
268 |
def topological_order: List[(String, Info)] = |
|
269 |
graph.topological_order.map(name => (name, apply(name))) |
|
270 |
||
271 |
override def toString: String = graph.keys_iterator.mkString("Sessions.Tree(", ", ", ")") |
|
272 |
} |
|
273 |
||
274 |
||
275 |
/* parser */ |
|
276 |
||
62864 | 277 |
val ROOT = Path.explode("ROOT") |
278 |
val ROOTS = Path.explode("ROOTS") |
|
279 |
||
62631 | 280 |
private val CHAPTER = "chapter" |
281 |
private val SESSION = "session" |
|
282 |
private val IN = "in" |
|
283 |
private val DESCRIPTION = "description" |
|
284 |
private val OPTIONS = "options" |
|
285 |
private val GLOBAL_THEORIES = "global_theories" |
|
286 |
private val THEORIES = "theories" |
|
287 |
private val FILES = "files" |
|
288 |
private val DOCUMENT_FILES = "document_files" |
|
289 |
||
290 |
lazy val root_syntax = |
|
63443 | 291 |
Outer_Syntax.init() + "(" + ")" + "+" + "," + "=" + "[" + "]" + IN + |
292 |
(CHAPTER, Keyword.THY_DECL) + |
|
293 |
(SESSION, Keyword.THY_DECL) + |
|
294 |
(DESCRIPTION, Keyword.QUASI_COMMAND) + |
|
295 |
(OPTIONS, Keyword.QUASI_COMMAND) + |
|
296 |
(GLOBAL_THEORIES, Keyword.QUASI_COMMAND) + |
|
297 |
(THEORIES, Keyword.QUASI_COMMAND) + |
|
298 |
(FILES, Keyword.QUASI_COMMAND) + |
|
299 |
(DOCUMENT_FILES, Keyword.QUASI_COMMAND) |
|
62631 | 300 |
|
62968 | 301 |
private object Parser extends Parse.Parser with Options.Parser |
62631 | 302 |
{ |
303 |
private abstract class Entry |
|
304 |
private sealed case class Chapter(name: String) extends Entry |
|
305 |
private sealed case class Session_Entry( |
|
306 |
pos: Position.T, |
|
307 |
name: String, |
|
308 |
groups: List[String], |
|
309 |
path: String, |
|
310 |
parent: Option[String], |
|
311 |
description: String, |
|
312 |
options: List[Options.Spec], |
|
313 |
theories: List[(Boolean, List[Options.Spec], List[String])], |
|
314 |
files: List[String], |
|
315 |
document_files: List[(String, String)]) extends Entry |
|
316 |
||
317 |
private val chapter: Parser[Chapter] = |
|
318 |
{ |
|
319 |
val chapter_name = atom("chapter name", _.is_name) |
|
320 |
||
321 |
command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter(a) } |
|
322 |
} |
|
323 |
||
324 |
private val session_entry: Parser[Session_Entry] = |
|
325 |
{ |
|
326 |
val session_name = atom("session name", _.is_name) |
|
327 |
||
328 |
val option = |
|
62968 | 329 |
option_name ~ opt($$$("=") ~! option_value ^^ |
330 |
{ case _ ~ x => x }) ^^ { case x ~ y => (x, y) } |
|
62631 | 331 |
val options = $$$("[") ~> rep1sep(option, $$$(",")) <~ $$$("]") |
332 |
||
333 |
val theories = |
|
334 |
($$$(GLOBAL_THEORIES) | $$$(THEORIES)) ~! |
|
62969 | 335 |
((options | success(Nil)) ~ rep(theory_name)) ^^ |
62631 | 336 |
{ case x ~ (y ~ z) => (x == GLOBAL_THEORIES, y, z) } |
337 |
||
338 |
val document_files = |
|
339 |
$$$(DOCUMENT_FILES) ~! |
|
340 |
(($$$("(") ~! ($$$(IN) ~! (path ~ $$$(")"))) ^^ |
|
341 |
{ case _ ~ (_ ~ (x ~ _)) => x } | success("document")) ~ |
|
342 |
rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) } |
|
343 |
||
344 |
command(SESSION) ~! |
|
345 |
(position(session_name) ~ |
|
346 |
(($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil)) ~ |
|
347 |
(($$$(IN) ~! path ^^ { case _ ~ x => x }) | success(".")) ~ |
|
348 |
($$$("=") ~! |
|
349 |
(opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~ |
|
350 |
(($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")) ~ |
|
351 |
(($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~ |
|
352 |
rep1(theories) ~ |
|
353 |
(($$$(FILES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~ |
|
354 |
(rep(document_files) ^^ (x => x.flatten))))) ^^ |
|
355 |
{ case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i))) => |
|
356 |
Session_Entry(pos, a, b, c, d, e, f, g, h, i) } |
|
357 |
} |
|
358 |
||
359 |
def parse(options: Options, select: Boolean, dir: Path): List[(String, Info)] = |
|
360 |
{ |
|
361 |
def make_info(entry_chapter: String, entry: Session_Entry): (String, Info) = |
|
362 |
{ |
|
363 |
try { |
|
364 |
val name = entry.name |
|
365 |
||
366 |
if (name == "") error("Bad session name") |
|
62883 | 367 |
if (pure_name(name) && entry.parent.isDefined) error("Illegal parent session") |
368 |
if (!pure_name(name) && !entry.parent.isDefined) error("Missing parent session") |
|
62631 | 369 |
|
370 |
val session_options = options ++ entry.options |
|
371 |
||
372 |
val theories = |
|
373 |
entry.theories.map({ case (global, opts, thys) => |
|
374 |
(global, session_options ++ opts, thys.map(Path.explode(_))) }) |
|
375 |
val files = entry.files.map(Path.explode(_)) |
|
376 |
val document_files = |
|
377 |
entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) }) |
|
378 |
||
379 |
val meta_digest = |
|
380 |
SHA1.digest((entry_chapter, name, entry.parent, entry.options, |
|
381 |
entry.theories, entry.files, entry.document_files).toString) |
|
382 |
||
383 |
val info = |
|
384 |
Info(entry_chapter, select, entry.pos, entry.groups, dir + Path.explode(entry.path), |
|
385 |
entry.parent, entry.description, session_options, theories, files, |
|
386 |
document_files, meta_digest) |
|
387 |
||
388 |
(name, info) |
|
389 |
} |
|
390 |
catch { |
|
391 |
case ERROR(msg) => |
|
392 |
error(msg + "\nThe error(s) above occurred in session entry " + |
|
393 |
quote(entry.name) + Position.here(entry.pos)) |
|
394 |
} |
|
395 |
} |
|
396 |
||
397 |
val root = dir + ROOT |
|
398 |
if (root.is_file) { |
|
399 |
val toks = Token.explode(root_syntax.keywords, File.read(root)) |
|
400 |
val start = Token.Pos.file(root.implode) |
|
401 |
||
402 |
parse_all(rep(chapter | session_entry), Token.reader(toks, start)) match { |
|
403 |
case Success(result, _) => |
|
404 |
var entry_chapter = "Unsorted" |
|
405 |
val infos = new mutable.ListBuffer[(String, Info)] |
|
406 |
result.foreach { |
|
407 |
case Chapter(name) => entry_chapter = name |
|
408 |
case entry: Session_Entry => infos += make_info(entry_chapter, entry) |
|
409 |
} |
|
410 |
infos.toList |
|
411 |
case bad => error(bad.toString) |
|
412 |
} |
|
413 |
} |
|
414 |
else Nil |
|
415 |
} |
|
416 |
} |
|
417 |
||
418 |
||
62635 | 419 |
/* load sessions from certain directories */ |
62631 | 420 |
|
421 |
private def is_session_dir(dir: Path): Boolean = |
|
422 |
(dir + ROOT).is_file || (dir + ROOTS).is_file |
|
423 |
||
424 |
private def check_session_dir(dir: Path): Path = |
|
425 |
if (is_session_dir(dir)) dir |
|
426 |
else error("Bad session root directory: " + dir.toString) |
|
427 |
||
62635 | 428 |
def load(options: Options, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil): Tree = |
62631 | 429 |
{ |
62635 | 430 |
def load_dir(select: Boolean, dir: Path): List[(String, Info)] = |
431 |
load_root(select, dir) ::: load_roots(select, dir) |
|
62631 | 432 |
|
62635 | 433 |
def load_root(select: Boolean, dir: Path): List[(String, Info)] = |
62631 | 434 |
Parser.parse(options, select, dir) |
435 |
||
62635 | 436 |
def load_roots(select: Boolean, dir: Path): List[(String, Info)] = |
62631 | 437 |
{ |
438 |
val roots = dir + ROOTS |
|
439 |
if (roots.is_file) { |
|
440 |
for { |
|
441 |
line <- split_lines(File.read(roots)) |
|
442 |
if !(line == "" || line.startsWith("#")) |
|
443 |
dir1 = |
|
444 |
try { check_session_dir(dir + Path.explode(line)) } |
|
445 |
catch { |
|
446 |
case ERROR(msg) => |
|
447 |
error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString) |
|
448 |
} |
|
62635 | 449 |
info <- load_dir(select, dir1) |
62631 | 450 |
} yield info |
451 |
} |
|
452 |
else Nil |
|
453 |
} |
|
454 |
||
455 |
val default_dirs = Isabelle_System.components().filter(is_session_dir(_)) |
|
456 |
dirs.foreach(check_session_dir(_)) |
|
457 |
select_dirs.foreach(check_session_dir(_)) |
|
458 |
||
459 |
Tree( |
|
460 |
for { |
|
461 |
(select, dir) <- (default_dirs ::: dirs).map((false, _)) ::: select_dirs.map((true, _)) |
|
62635 | 462 |
info <- load_dir(select, dir) |
62631 | 463 |
} yield info) |
464 |
} |
|
62632 | 465 |
|
466 |
||
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
467 |
|
62704
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
468 |
/** heap file with SHA1 digest **/ |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
469 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
470 |
private val sha1_prefix = "SHA1:" |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
471 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
472 |
def read_heap_digest(heap: Path): Option[String] = |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
473 |
{ |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
474 |
if (heap.is_file) { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
475 |
val file = FileChannel.open(heap.file.toPath, StandardOpenOption.READ) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
476 |
try { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
477 |
val len = file.size |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
478 |
val n = sha1_prefix.length + SHA1.digest_length |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
479 |
if (len >= n) { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
480 |
file.position(len - n) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
481 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
482 |
val buf = ByteBuffer.allocate(n) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
483 |
var i = 0 |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
484 |
var m = 0 |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
485 |
do { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
486 |
m = file.read(buf) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
487 |
if (m != -1) i += m |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
488 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
489 |
while (m != -1 && n > i) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
490 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
491 |
if (i == n) { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
492 |
val prefix = new String(buf.array(), 0, sha1_prefix.length, UTF8.charset) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
493 |
val s = new String(buf.array(), sha1_prefix.length, SHA1.digest_length, UTF8.charset) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
494 |
if (prefix == sha1_prefix) Some(s) else None |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
495 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
496 |
else None |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
497 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
498 |
else None |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
499 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
500 |
finally { file.close } |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
501 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
502 |
else None |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
503 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
504 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
505 |
def write_heap_digest(heap: Path): String = |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
506 |
read_heap_digest(heap) match { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
507 |
case None => |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
508 |
val s = SHA1.digest(heap).rep |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
509 |
File.append(heap, sha1_prefix + s) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
510 |
s |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
511 |
case Some(s) => s |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
512 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
513 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
514 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
515 |
|
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
516 |
/** persistent store **/ |
62632 | 517 |
|
518 |
def store(system_mode: Boolean = false): Store = new Store(system_mode) |
|
519 |
||
63996 | 520 |
class Store private[Sessions](system_mode: Boolean) |
62632 | 521 |
{ |
65278 | 522 |
/* file names */ |
523 |
||
65281
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
524 |
def database(name: String): Path = Path.basic("log") + Path.basic(name).ext("db") |
65278 | 525 |
def log(name: String): Path = Path.basic("log") + Path.basic(name) |
526 |
def log_gz(name: String): Path = log(name).ext("gz") |
|
527 |
||
528 |
||
65283 | 529 |
/* data representation */ |
530 |
||
531 |
val xml_cache: XML.Cache = new XML.Cache() |
|
532 |
||
533 |
def encode_properties(ps: Properties.T): Bytes = |
|
534 |
Bytes(YXML.string_of_body(XML.Encode.properties(ps))) |
|
535 |
||
536 |
def decode_properties(bs: Bytes): Properties.T = |
|
537 |
xml_cache.props(XML.Decode.properties(YXML.parse_body(bs.text))) |
|
538 |
||
539 |
def compress_properties(ps: List[Properties.T], options: XZ.Options = XZ.options()): Bytes = |
|
540 |
{ |
|
541 |
if (ps.isEmpty) Bytes.empty |
|
542 |
else Bytes(YXML.string_of_body(XML.Encode.list(XML.Encode.properties)(ps))).compress(options) |
|
543 |
} |
|
544 |
||
545 |
def uncompress_properties(bs: Bytes): List[Properties.T] = |
|
546 |
{ |
|
547 |
if (bs.isEmpty) Nil |
|
548 |
else |
|
549 |
XML.Decode.list(XML.Decode.properties)(YXML.parse_body(bs.uncompress().text)). |
|
550 |
map(xml_cache.props(_)) |
|
551 |
} |
|
552 |
||
553 |
||
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
554 |
/* output */ |
62632 | 555 |
|
556 |
val browser_info: Path = |
|
557 |
if (system_mode) Path.explode("~~/browser_info") |
|
558 |
else Path.explode("$ISABELLE_BROWSER_INFO") |
|
559 |
||
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
560 |
val output_dir: Path = |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
561 |
if (system_mode) Path.explode("~~/heaps/$ML_IDENTIFIER") |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
562 |
else Path.explode("$ISABELLE_OUTPUT") |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
563 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
564 |
def prepare_output() { Isabelle_System.mkdirs(output_dir + Path.basic("log")) } |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
565 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
566 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
567 |
/* input */ |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
568 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
569 |
private val input_dirs = |
62632 | 570 |
if (system_mode) List(output_dir) |
62633 | 571 |
else { |
572 |
val ml_ident = Path.explode("$ML_IDENTIFIER").expand |
|
573 |
output_dir :: Path.split(Isabelle_System.getenv_strict("ISABELLE_PATH")).map(_ + ml_ident) |
|
574 |
} |
|
62632 | 575 |
|
62636 | 576 |
def find(name: String): Option[(Path, Option[String])] = |
62632 | 577 |
input_dirs.find(dir => (dir + log_gz(name)).is_file).map(dir => |
62704
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
578 |
(dir + log_gz(name), read_heap_digest(dir + Path.basic(name)))) |
62632 | 579 |
|
65281
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
580 |
def find_database(name: String): Option[Path] = |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
581 |
input_dirs.map(_ + database(name)).find(_.is_file) |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
582 |
|
62633 | 583 |
def find_log(name: String): Option[Path] = |
584 |
input_dirs.map(_ + log(name)).find(_.is_file) |
|
585 |
||
586 |
def find_log_gz(name: String): Option[Path] = |
|
587 |
input_dirs.map(_ + log_gz(name)).find(_.is_file) |
|
62632 | 588 |
|
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
589 |
def find_heap(name: String): Option[Path] = |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
590 |
input_dirs.map(_ + Path.basic(name)).find(_.is_file) |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
591 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
592 |
def heap(name: String): Path = |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
593 |
find_heap(name) getOrElse |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
594 |
error("Unknown logic " + quote(name) + " -- no heap file found in:\n" + |
62769 | 595 |
cat_lines(input_dirs.map(dir => " " + dir.expand.implode))) |
62632 | 596 |
} |
65281
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
597 |
|
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
598 |
|
65283 | 599 |
/* session info */ |
65281
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
600 |
|
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
601 |
object Session_Info |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
602 |
{ |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
603 |
// Build_Log.Session_Info |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
604 |
val session_name = SQL.Column.string("session_name") |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
605 |
val session_timing = SQL.Column.bytes("session_timing") |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
606 |
val command_timings = SQL.Column.bytes("command_timings") |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
607 |
val ml_statistics = SQL.Column.bytes("ml_statistics") |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
608 |
val task_statistics = SQL.Column.bytes("task_statistics") |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
609 |
|
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
610 |
// Build.Session_Info |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
611 |
val sources = SQL.Column.string("sources") |
65282 | 612 |
val input_heaps = SQL.Column.string("input_heaps") |
65281
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
613 |
val output_heap = SQL.Column.string("output_heap") |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
614 |
val return_code = SQL.Column.int("return_code") |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
615 |
|
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
616 |
val table = SQL.Table("isabelle_session_info", |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
617 |
List(session_name, session_timing, command_timings, ml_statistics, task_statistics, |
65282 | 618 |
sources, input_heaps, output_heap, return_code)) |
65281
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
619 |
|
65283 | 620 |
def write_data(store: Store, db: SQLite.Database, |
621 |
build_log: Build_Log.Session_Info, build: Build.Session_Info) |
|
65281
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
622 |
{ |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
623 |
db.transaction { |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
624 |
db.drop_table(table) |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
625 |
db.create_table(table) |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
626 |
using(db.insert_statement(table))(stmt => |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
627 |
{ |
65283 | 628 |
db.set_string(stmt, 1, build_log.session_name) |
629 |
db.set_bytes(stmt, 2, store.encode_properties(build_log.session_timing)) |
|
630 |
db.set_bytes(stmt, 3, store.compress_properties(build_log.command_timings)) |
|
631 |
db.set_bytes(stmt, 4, store.compress_properties(build_log.ml_statistics)) |
|
632 |
db.set_bytes(stmt, 5, store.compress_properties(build_log.task_statistics)) |
|
633 |
db.set_string(stmt, 6, build.sources) |
|
634 |
db.set_string(stmt, 7, build.input_heaps) |
|
635 |
db.set_string(stmt, 8, build.output_heap) |
|
636 |
db.set_int(stmt, 9, build.return_code) |
|
65281
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
637 |
stmt.execute() |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
638 |
}) |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
639 |
} |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
640 |
} |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
641 |
|
65283 | 642 |
def read_data(store: Store, db: SQLite.Database) |
643 |
: Option[(Build_Log.Session_Info, Build.Session_Info)] = |
|
65281
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
644 |
{ |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
645 |
using(db.select_statement(table, table.columns))(stmt => |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
646 |
{ |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
647 |
val rs = stmt.executeQuery |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
648 |
if (!rs.next) None |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
649 |
else { |
65283 | 650 |
val build_log = |
65281
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
651 |
Build_Log.Session_Info( |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
652 |
db.string(rs, session_name.name), |
65283 | 653 |
store.decode_properties(db.bytes(rs, session_timing.name)), |
654 |
store.uncompress_properties(db.bytes(rs, command_timings.name)), |
|
655 |
store.uncompress_properties(db.bytes(rs, ml_statistics.name)), |
|
656 |
store.uncompress_properties(db.bytes(rs, task_statistics.name))) |
|
657 |
val build = |
|
65281
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
658 |
Build.Session_Info( |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
659 |
db.string(rs, sources.name), |
65282 | 660 |
db.string(rs, input_heaps.name), |
65281
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
661 |
db.string(rs, output_heap.name), |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
662 |
db.int(rs, return_code.name)) |
65283 | 663 |
Some((build_log, build)) |
65281
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
664 |
} |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
665 |
}) |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
666 |
} |
c70e7d24a16d
SQL database operations for combined session info;
wenzelm
parents:
65278
diff
changeset
|
667 |
} |
62631 | 668 |
} |