author | wenzelm |
Wed, 15 Mar 2017 11:04:46 +0100 | |
changeset 65254 | 3075aa3b40bf |
parent 65251 | 4b0a43afc3fb |
child 65269 | 2947837b9f04 |
permissions | -rw-r--r-- |
62631 | 1 |
/* Title: Pure/Thy/sessions.scala |
2 |
Author: Makarius |
|
3 |
||
62973 | 4 |
Isabelle session information. |
62631 | 5 |
*/ |
6 |
||
7 |
package isabelle |
|
8 |
||
62704
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
9 |
import java.nio.ByteBuffer |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
10 |
import java.nio.channels.FileChannel |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
11 |
import java.nio.file.StandardOpenOption |
62631 | 12 |
|
13 |
import scala.collection.SortedSet |
|
14 |
import scala.collection.mutable |
|
15 |
||
16 |
||
17 |
object Sessions |
|
18 |
{ |
|
62883 | 19 |
/* Pure */ |
20 |
||
63022 | 21 |
def pure_name(name: String): Boolean = name == Thy_Header.PURE |
62883 | 22 |
|
62902
3c0f53eae166
more conventional theory syntax for ML bootstrap, with 'ML_file' instead of 'use';
wenzelm
parents:
62883
diff
changeset
|
23 |
def pure_files(resources: Resources, syntax: Outer_Syntax, dir: Path): List[Path] = |
3c0f53eae166
more conventional theory syntax for ML bootstrap, with 'ML_file' instead of 'use';
wenzelm
parents:
62883
diff
changeset
|
24 |
{ |
62946 | 25 |
val roots = Thy_Header.ml_roots.map(_._1) |
62902
3c0f53eae166
more conventional theory syntax for ML bootstrap, with 'ML_file' instead of 'use';
wenzelm
parents:
62883
diff
changeset
|
26 |
val loaded_files = |
62946 | 27 |
roots.flatMap(root => resources.loaded_files(syntax, File.read(dir + Path.explode(root)))) |
28 |
(roots ::: loaded_files).map(file => dir + Path.explode(file)) |
|
62902
3c0f53eae166
more conventional theory syntax for ML bootstrap, with 'ML_file' instead of 'use';
wenzelm
parents:
62883
diff
changeset
|
29 |
} |
62883 | 30 |
|
65254
3075aa3b40bf
clarified fall-back base, e.g. relevant for "isabelle jedit -l BAD";
wenzelm
parents:
65251
diff
changeset
|
31 |
def pure_base(options: Options): Base = session_base(options, Thy_Header.PURE) |
3075aa3b40bf
clarified fall-back base, e.g. relevant for "isabelle jedit -l BAD";
wenzelm
parents:
65251
diff
changeset
|
32 |
|
62883 | 33 |
|
65251 | 34 |
/* base info and source dependencies */ |
64856 | 35 |
|
36 |
object Base |
|
37 |
{ |
|
38 |
val empty: Base = Base() |
|
39 |
||
40 |
lazy val bootstrap: Base = |
|
41 |
Base(keywords = Thy_Header.bootstrap_header, syntax = Thy_Header.bootstrap_syntax) |
|
42 |
} |
|
43 |
||
44 |
sealed case class Base( |
|
45 |
loaded_theories: Set[String] = Set.empty, |
|
46 |
known_theories: Map[String, Document.Node.Name] = Map.empty, |
|
47 |
keywords: Thy_Header.Keywords = Nil, |
|
48 |
syntax: Outer_Syntax = Outer_Syntax.empty, |
|
49 |
sources: List[(Path, SHA1.Digest)] = Nil, |
|
50 |
session_graph: Graph_Display.Graph = Graph_Display.empty_graph) |
|
51 |
||
65251 | 52 |
sealed case class Deps(deps: Map[String, Base]) |
53 |
{ |
|
54 |
def is_empty: Boolean = deps.isEmpty |
|
55 |
def apply(name: String): Base = deps(name) |
|
56 |
def sources(name: String): List[SHA1.Digest] = deps(name).sources.map(_._2) |
|
57 |
} |
|
64856 | 58 |
|
65251 | 59 |
def dependencies( |
60 |
progress: Progress = No_Progress, |
|
61 |
inlined_files: Boolean = false, |
|
62 |
verbose: Boolean = false, |
|
63 |
list_files: Boolean = false, |
|
64 |
check_keywords: Set[String] = Set.empty, |
|
65 |
tree: Tree): Deps = |
|
66 |
Deps((Map.empty[String, Base] /: tree.topological_order)( |
|
67 |
{ case (deps, (name, info)) => |
|
68 |
if (progress.stopped) throw Exn.Interrupt() |
|
69 |
||
70 |
try { |
|
71 |
val resources = |
|
72 |
new Resources( |
|
73 |
info.parent match { |
|
74 |
case None => Base.bootstrap |
|
75 |
case Some(parent) => deps(parent) |
|
76 |
}) |
|
77 |
||
78 |
if (verbose || list_files) { |
|
79 |
val groups = |
|
80 |
if (info.groups.isEmpty) "" |
|
81 |
else info.groups.mkString(" (", " ", ")") |
|
82 |
progress.echo("Session " + info.chapter + "/" + name + groups) |
|
83 |
} |
|
84 |
||
85 |
val thy_deps = |
|
86 |
{ |
|
87 |
val root_theories = |
|
88 |
info.theories.flatMap({ |
|
89 |
case (global, _, thys) => |
|
90 |
thys.map(thy => |
|
91 |
(resources.node_name( |
|
92 |
if (global) "" else name, info.dir + resources.thy_path(thy)), info.pos)) |
|
93 |
}) |
|
94 |
val thy_deps = resources.thy_info.dependencies(name, root_theories) |
|
95 |
||
96 |
thy_deps.errors match { |
|
97 |
case Nil => thy_deps |
|
98 |
case errs => error(cat_lines(errs)) |
|
99 |
} |
|
100 |
} |
|
101 |
||
102 |
val known_theories = |
|
103 |
(resources.base.known_theories /: thy_deps.deps)({ case (known, dep) => |
|
104 |
val name = dep.name |
|
105 |
known.get(name.theory) match { |
|
106 |
case Some(name1) if name != name1 => |
|
107 |
error("Duplicate theory " + quote(name.node) + " vs. " + quote(name1.node)) |
|
108 |
case _ => |
|
109 |
known + (name.theory -> name) + (Long_Name.base_name(name.theory) -> name) |
|
110 |
} |
|
111 |
}) |
|
112 |
||
113 |
val loaded_theories = thy_deps.loaded_theories |
|
114 |
val keywords = thy_deps.keywords |
|
115 |
val syntax = thy_deps.syntax |
|
116 |
||
117 |
val theory_files = thy_deps.deps.map(dep => Path.explode(dep.name.node)) |
|
118 |
val loaded_files = |
|
119 |
if (inlined_files) { |
|
120 |
val pure_files = |
|
121 |
if (pure_name(name)) Sessions.pure_files(resources, syntax, info.dir) |
|
122 |
else Nil |
|
123 |
pure_files ::: thy_deps.loaded_files |
|
124 |
} |
|
125 |
else Nil |
|
126 |
||
127 |
val all_files = |
|
128 |
(theory_files ::: loaded_files ::: |
|
129 |
info.files.map(file => info.dir + file) ::: |
|
130 |
info.document_files.map(file => info.dir + file._1 + file._2)).map(_.expand) |
|
131 |
||
132 |
if (list_files) |
|
133 |
progress.echo(cat_lines(all_files.map(_.implode).sorted.map(" " + _))) |
|
134 |
||
135 |
if (check_keywords.nonEmpty) |
|
136 |
Check_Keywords.check_keywords(progress, syntax.keywords, check_keywords, theory_files) |
|
137 |
||
138 |
val sources = all_files.map(p => (p, SHA1.digest(p.file))) |
|
139 |
||
140 |
val session_graph = |
|
141 |
Present.session_graph(info.parent getOrElse "", |
|
142 |
resources.base.loaded_theories, thy_deps.deps) |
|
143 |
||
144 |
val base = |
|
145 |
Base(loaded_theories, known_theories, keywords, syntax, sources, session_graph) |
|
146 |
deps + (name -> base) |
|
147 |
} |
|
148 |
catch { |
|
149 |
case ERROR(msg) => |
|
150 |
cat_error(msg, "The error(s) above occurred in session " + |
|
151 |
quote(name) + Position.here(info.pos)) |
|
152 |
} |
|
153 |
})) |
|
154 |
||
155 |
def session_base(options: Options, session: String, dirs: List[Path] = Nil): Base = |
|
156 |
{ |
|
157 |
val (_, tree) = load(options, dirs = dirs).selection(sessions = List(session)) |
|
158 |
dependencies(tree = tree)(session) |
|
159 |
} |
|
160 |
||
161 |
||
162 |
/* session tree */ |
|
62631 | 163 |
|
164 |
sealed case class Info( |
|
165 |
chapter: String, |
|
166 |
select: Boolean, |
|
167 |
pos: Position.T, |
|
168 |
groups: List[String], |
|
169 |
dir: Path, |
|
170 |
parent: Option[String], |
|
171 |
description: String, |
|
172 |
options: Options, |
|
173 |
theories: List[(Boolean, Options, List[Path])], |
|
174 |
files: List[Path], |
|
175 |
document_files: List[(Path, Path)], |
|
176 |
meta_digest: SHA1.Digest) |
|
177 |
{ |
|
178 |
def timeout: Time = Time.seconds(options.real("timeout") * options.real("timeout_scale")) |
|
179 |
} |
|
180 |
||
181 |
object Tree |
|
182 |
{ |
|
183 |
def apply(infos: Seq[(String, Info)]): Tree = |
|
184 |
{ |
|
185 |
val graph1 = |
|
186 |
(Graph.string[Info] /: infos) { |
|
187 |
case (graph, (name, info)) => |
|
188 |
if (graph.defined(name)) |
|
189 |
error("Duplicate session " + quote(name) + Position.here(info.pos) + |
|
190 |
Position.here(graph.get_node(name).pos)) |
|
191 |
else graph.new_node(name, info) |
|
192 |
} |
|
193 |
val graph2 = |
|
194 |
(graph1 /: graph1.iterator) { |
|
195 |
case (graph, (name, (info, _))) => |
|
196 |
info.parent match { |
|
197 |
case None => graph |
|
198 |
case Some(parent) => |
|
199 |
if (!graph.defined(parent)) |
|
200 |
error("Bad parent session " + quote(parent) + " for " + |
|
201 |
quote(name) + Position.here(info.pos)) |
|
202 |
||
203 |
try { graph.add_edge_acyclic(parent, name) } |
|
204 |
catch { |
|
205 |
case exn: Graph.Cycles[_] => |
|
206 |
error(cat_lines(exn.cycles.map(cycle => |
|
207 |
"Cyclic session dependency of " + |
|
208 |
cycle.map(c => quote(c.toString)).mkString(" via "))) + |
|
209 |
Position.here(info.pos)) |
|
210 |
} |
|
211 |
} |
|
212 |
} |
|
213 |
new Tree(graph2) |
|
214 |
} |
|
215 |
} |
|
216 |
||
217 |
final class Tree private(val graph: Graph[String, Info]) |
|
218 |
extends PartialFunction[String, Info] |
|
219 |
{ |
|
220 |
def apply(name: String): Info = graph.get_node(name) |
|
221 |
def isDefinedAt(name: String): Boolean = graph.defined(name) |
|
222 |
||
223 |
def selection( |
|
224 |
requirements: Boolean = false, |
|
225 |
all_sessions: Boolean = false, |
|
226 |
exclude_session_groups: List[String] = Nil, |
|
227 |
exclude_sessions: List[String] = Nil, |
|
228 |
session_groups: List[String] = Nil, |
|
229 |
sessions: List[String] = Nil): (List[String], Tree) = |
|
230 |
{ |
|
231 |
val bad_sessions = |
|
232 |
SortedSet((exclude_sessions ::: sessions).filterNot(isDefinedAt(_)): _*).toList |
|
233 |
if (bad_sessions.nonEmpty) error("Undefined session(s): " + commas_quote(bad_sessions)) |
|
234 |
||
235 |
val excluded = |
|
236 |
{ |
|
237 |
val exclude_group = exclude_session_groups.toSet |
|
238 |
val exclude_group_sessions = |
|
239 |
(for { |
|
240 |
(name, (info, _)) <- graph.iterator |
|
241 |
if apply(name).groups.exists(exclude_group) |
|
242 |
} yield name).toList |
|
243 |
graph.all_succs(exclude_group_sessions ::: exclude_sessions).toSet |
|
244 |
} |
|
245 |
||
246 |
val pre_selected = |
|
247 |
{ |
|
248 |
if (all_sessions) graph.keys |
|
249 |
else { |
|
250 |
val select_group = session_groups.toSet |
|
251 |
val select = sessions.toSet |
|
252 |
(for { |
|
253 |
(name, (info, _)) <- graph.iterator |
|
254 |
if info.select || select(name) || apply(name).groups.exists(select_group) |
|
255 |
} yield name).toList |
|
256 |
} |
|
257 |
}.filterNot(excluded) |
|
258 |
||
259 |
val selected = |
|
260 |
if (requirements) (graph.all_preds(pre_selected).toSet -- pre_selected).toList |
|
261 |
else pre_selected |
|
262 |
||
263 |
val graph1 = graph.restrict(graph.all_preds(selected).toSet) |
|
264 |
(selected, new Tree(graph1)) |
|
265 |
} |
|
266 |
||
267 |
def ancestors(name: String): List[String] = |
|
268 |
graph.all_preds(List(name)).tail.reverse |
|
269 |
||
270 |
def topological_order: List[(String, Info)] = |
|
271 |
graph.topological_order.map(name => (name, apply(name))) |
|
272 |
||
273 |
override def toString: String = graph.keys_iterator.mkString("Sessions.Tree(", ", ", ")") |
|
274 |
} |
|
275 |
||
276 |
||
277 |
/* parser */ |
|
278 |
||
62864 | 279 |
val ROOT = Path.explode("ROOT") |
280 |
val ROOTS = Path.explode("ROOTS") |
|
281 |
||
62631 | 282 |
private val CHAPTER = "chapter" |
283 |
private val SESSION = "session" |
|
284 |
private val IN = "in" |
|
285 |
private val DESCRIPTION = "description" |
|
286 |
private val OPTIONS = "options" |
|
287 |
private val GLOBAL_THEORIES = "global_theories" |
|
288 |
private val THEORIES = "theories" |
|
289 |
private val FILES = "files" |
|
290 |
private val DOCUMENT_FILES = "document_files" |
|
291 |
||
292 |
lazy val root_syntax = |
|
63443 | 293 |
Outer_Syntax.init() + "(" + ")" + "+" + "," + "=" + "[" + "]" + IN + |
294 |
(CHAPTER, Keyword.THY_DECL) + |
|
295 |
(SESSION, Keyword.THY_DECL) + |
|
296 |
(DESCRIPTION, Keyword.QUASI_COMMAND) + |
|
297 |
(OPTIONS, Keyword.QUASI_COMMAND) + |
|
298 |
(GLOBAL_THEORIES, Keyword.QUASI_COMMAND) + |
|
299 |
(THEORIES, Keyword.QUASI_COMMAND) + |
|
300 |
(FILES, Keyword.QUASI_COMMAND) + |
|
301 |
(DOCUMENT_FILES, Keyword.QUASI_COMMAND) |
|
62631 | 302 |
|
62968 | 303 |
private object Parser extends Parse.Parser with Options.Parser |
62631 | 304 |
{ |
305 |
private abstract class Entry |
|
306 |
private sealed case class Chapter(name: String) extends Entry |
|
307 |
private sealed case class Session_Entry( |
|
308 |
pos: Position.T, |
|
309 |
name: String, |
|
310 |
groups: List[String], |
|
311 |
path: String, |
|
312 |
parent: Option[String], |
|
313 |
description: String, |
|
314 |
options: List[Options.Spec], |
|
315 |
theories: List[(Boolean, List[Options.Spec], List[String])], |
|
316 |
files: List[String], |
|
317 |
document_files: List[(String, String)]) extends Entry |
|
318 |
||
319 |
private val chapter: Parser[Chapter] = |
|
320 |
{ |
|
321 |
val chapter_name = atom("chapter name", _.is_name) |
|
322 |
||
323 |
command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter(a) } |
|
324 |
} |
|
325 |
||
326 |
private val session_entry: Parser[Session_Entry] = |
|
327 |
{ |
|
328 |
val session_name = atom("session name", _.is_name) |
|
329 |
||
330 |
val option = |
|
62968 | 331 |
option_name ~ opt($$$("=") ~! option_value ^^ |
332 |
{ case _ ~ x => x }) ^^ { case x ~ y => (x, y) } |
|
62631 | 333 |
val options = $$$("[") ~> rep1sep(option, $$$(",")) <~ $$$("]") |
334 |
||
335 |
val theories = |
|
336 |
($$$(GLOBAL_THEORIES) | $$$(THEORIES)) ~! |
|
62969 | 337 |
((options | success(Nil)) ~ rep(theory_name)) ^^ |
62631 | 338 |
{ case x ~ (y ~ z) => (x == GLOBAL_THEORIES, y, z) } |
339 |
||
340 |
val document_files = |
|
341 |
$$$(DOCUMENT_FILES) ~! |
|
342 |
(($$$("(") ~! ($$$(IN) ~! (path ~ $$$(")"))) ^^ |
|
343 |
{ case _ ~ (_ ~ (x ~ _)) => x } | success("document")) ~ |
|
344 |
rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) } |
|
345 |
||
346 |
command(SESSION) ~! |
|
347 |
(position(session_name) ~ |
|
348 |
(($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil)) ~ |
|
349 |
(($$$(IN) ~! path ^^ { case _ ~ x => x }) | success(".")) ~ |
|
350 |
($$$("=") ~! |
|
351 |
(opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~ |
|
352 |
(($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")) ~ |
|
353 |
(($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~ |
|
354 |
rep1(theories) ~ |
|
355 |
(($$$(FILES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~ |
|
356 |
(rep(document_files) ^^ (x => x.flatten))))) ^^ |
|
357 |
{ case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i))) => |
|
358 |
Session_Entry(pos, a, b, c, d, e, f, g, h, i) } |
|
359 |
} |
|
360 |
||
361 |
def parse(options: Options, select: Boolean, dir: Path): List[(String, Info)] = |
|
362 |
{ |
|
363 |
def make_info(entry_chapter: String, entry: Session_Entry): (String, Info) = |
|
364 |
{ |
|
365 |
try { |
|
366 |
val name = entry.name |
|
367 |
||
368 |
if (name == "") error("Bad session name") |
|
62883 | 369 |
if (pure_name(name) && entry.parent.isDefined) error("Illegal parent session") |
370 |
if (!pure_name(name) && !entry.parent.isDefined) error("Missing parent session") |
|
62631 | 371 |
|
372 |
val session_options = options ++ entry.options |
|
373 |
||
374 |
val theories = |
|
375 |
entry.theories.map({ case (global, opts, thys) => |
|
376 |
(global, session_options ++ opts, thys.map(Path.explode(_))) }) |
|
377 |
val files = entry.files.map(Path.explode(_)) |
|
378 |
val document_files = |
|
379 |
entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) }) |
|
380 |
||
381 |
val meta_digest = |
|
382 |
SHA1.digest((entry_chapter, name, entry.parent, entry.options, |
|
383 |
entry.theories, entry.files, entry.document_files).toString) |
|
384 |
||
385 |
val info = |
|
386 |
Info(entry_chapter, select, entry.pos, entry.groups, dir + Path.explode(entry.path), |
|
387 |
entry.parent, entry.description, session_options, theories, files, |
|
388 |
document_files, meta_digest) |
|
389 |
||
390 |
(name, info) |
|
391 |
} |
|
392 |
catch { |
|
393 |
case ERROR(msg) => |
|
394 |
error(msg + "\nThe error(s) above occurred in session entry " + |
|
395 |
quote(entry.name) + Position.here(entry.pos)) |
|
396 |
} |
|
397 |
} |
|
398 |
||
399 |
val root = dir + ROOT |
|
400 |
if (root.is_file) { |
|
401 |
val toks = Token.explode(root_syntax.keywords, File.read(root)) |
|
402 |
val start = Token.Pos.file(root.implode) |
|
403 |
||
404 |
parse_all(rep(chapter | session_entry), Token.reader(toks, start)) match { |
|
405 |
case Success(result, _) => |
|
406 |
var entry_chapter = "Unsorted" |
|
407 |
val infos = new mutable.ListBuffer[(String, Info)] |
|
408 |
result.foreach { |
|
409 |
case Chapter(name) => entry_chapter = name |
|
410 |
case entry: Session_Entry => infos += make_info(entry_chapter, entry) |
|
411 |
} |
|
412 |
infos.toList |
|
413 |
case bad => error(bad.toString) |
|
414 |
} |
|
415 |
} |
|
416 |
else Nil |
|
417 |
} |
|
418 |
} |
|
419 |
||
420 |
||
62635 | 421 |
/* load sessions from certain directories */ |
62631 | 422 |
|
423 |
private def is_session_dir(dir: Path): Boolean = |
|
424 |
(dir + ROOT).is_file || (dir + ROOTS).is_file |
|
425 |
||
426 |
private def check_session_dir(dir: Path): Path = |
|
427 |
if (is_session_dir(dir)) dir |
|
428 |
else error("Bad session root directory: " + dir.toString) |
|
429 |
||
62635 | 430 |
def load(options: Options, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil): Tree = |
62631 | 431 |
{ |
62635 | 432 |
def load_dir(select: Boolean, dir: Path): List[(String, Info)] = |
433 |
load_root(select, dir) ::: load_roots(select, dir) |
|
62631 | 434 |
|
62635 | 435 |
def load_root(select: Boolean, dir: Path): List[(String, Info)] = |
62631 | 436 |
Parser.parse(options, select, dir) |
437 |
||
62635 | 438 |
def load_roots(select: Boolean, dir: Path): List[(String, Info)] = |
62631 | 439 |
{ |
440 |
val roots = dir + ROOTS |
|
441 |
if (roots.is_file) { |
|
442 |
for { |
|
443 |
line <- split_lines(File.read(roots)) |
|
444 |
if !(line == "" || line.startsWith("#")) |
|
445 |
dir1 = |
|
446 |
try { check_session_dir(dir + Path.explode(line)) } |
|
447 |
catch { |
|
448 |
case ERROR(msg) => |
|
449 |
error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString) |
|
450 |
} |
|
62635 | 451 |
info <- load_dir(select, dir1) |
62631 | 452 |
} yield info |
453 |
} |
|
454 |
else Nil |
|
455 |
} |
|
456 |
||
457 |
val default_dirs = Isabelle_System.components().filter(is_session_dir(_)) |
|
458 |
dirs.foreach(check_session_dir(_)) |
|
459 |
select_dirs.foreach(check_session_dir(_)) |
|
460 |
||
461 |
Tree( |
|
462 |
for { |
|
463 |
(select, dir) <- (default_dirs ::: dirs).map((false, _)) ::: select_dirs.map((true, _)) |
|
62635 | 464 |
info <- load_dir(select, dir) |
62631 | 465 |
} yield info) |
466 |
} |
|
62632 | 467 |
|
468 |
||
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
469 |
|
62704
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
470 |
/** heap file with SHA1 digest **/ |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
471 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
472 |
private val sha1_prefix = "SHA1:" |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
473 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
474 |
def read_heap_digest(heap: Path): Option[String] = |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
475 |
{ |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
476 |
if (heap.is_file) { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
477 |
val file = FileChannel.open(heap.file.toPath, StandardOpenOption.READ) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
478 |
try { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
479 |
val len = file.size |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
480 |
val n = sha1_prefix.length + SHA1.digest_length |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
481 |
if (len >= n) { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
482 |
file.position(len - n) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
483 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
484 |
val buf = ByteBuffer.allocate(n) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
485 |
var i = 0 |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
486 |
var m = 0 |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
487 |
do { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
488 |
m = file.read(buf) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
489 |
if (m != -1) i += m |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
490 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
491 |
while (m != -1 && n > i) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
492 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
493 |
if (i == n) { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
494 |
val prefix = new String(buf.array(), 0, sha1_prefix.length, UTF8.charset) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
495 |
val s = new String(buf.array(), sha1_prefix.length, SHA1.digest_length, UTF8.charset) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
496 |
if (prefix == sha1_prefix) Some(s) else None |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
497 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
498 |
else None |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
499 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
500 |
else None |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
501 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
502 |
finally { file.close } |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
503 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
504 |
else None |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
505 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
506 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
507 |
def write_heap_digest(heap: Path): String = |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
508 |
read_heap_digest(heap) match { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
509 |
case None => |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
510 |
val s = SHA1.digest(heap).rep |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
511 |
File.append(heap, sha1_prefix + s) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
512 |
s |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
513 |
case Some(s) => s |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
514 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
515 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
516 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
517 |
|
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
518 |
/** persistent store **/ |
62632 | 519 |
|
520 |
def log(name: String): Path = Path.basic("log") + Path.basic(name) |
|
521 |
def log_gz(name: String): Path = log(name).ext("gz") |
|
522 |
||
523 |
def store(system_mode: Boolean = false): Store = new Store(system_mode) |
|
524 |
||
63996 | 525 |
class Store private[Sessions](system_mode: Boolean) |
62632 | 526 |
{ |
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
527 |
/* output */ |
62632 | 528 |
|
529 |
val browser_info: Path = |
|
530 |
if (system_mode) Path.explode("~~/browser_info") |
|
531 |
else Path.explode("$ISABELLE_BROWSER_INFO") |
|
532 |
||
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
533 |
val output_dir: Path = |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
534 |
if (system_mode) Path.explode("~~/heaps/$ML_IDENTIFIER") |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
535 |
else Path.explode("$ISABELLE_OUTPUT") |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
536 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
537 |
def prepare_output() { Isabelle_System.mkdirs(output_dir + Path.basic("log")) } |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
538 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
539 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
540 |
/* input */ |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
541 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
542 |
private val input_dirs = |
62632 | 543 |
if (system_mode) List(output_dir) |
62633 | 544 |
else { |
545 |
val ml_ident = Path.explode("$ML_IDENTIFIER").expand |
|
546 |
output_dir :: Path.split(Isabelle_System.getenv_strict("ISABELLE_PATH")).map(_ + ml_ident) |
|
547 |
} |
|
62632 | 548 |
|
62636 | 549 |
def find(name: String): Option[(Path, Option[String])] = |
62632 | 550 |
input_dirs.find(dir => (dir + log_gz(name)).is_file).map(dir => |
62704
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
551 |
(dir + log_gz(name), read_heap_digest(dir + Path.basic(name)))) |
62632 | 552 |
|
62633 | 553 |
def find_log(name: String): Option[Path] = |
554 |
input_dirs.map(_ + log(name)).find(_.is_file) |
|
555 |
||
556 |
def find_log_gz(name: String): Option[Path] = |
|
557 |
input_dirs.map(_ + log_gz(name)).find(_.is_file) |
|
62632 | 558 |
|
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
559 |
def find_heap(name: String): Option[Path] = |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
560 |
input_dirs.map(_ + Path.basic(name)).find(_.is_file) |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
561 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
562 |
def heap(name: String): Path = |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
563 |
find_heap(name) getOrElse |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
564 |
error("Unknown logic " + quote(name) + " -- no heap file found in:\n" + |
62769 | 565 |
cat_lines(input_dirs.map(dir => " " + dir.expand.implode))) |
62632 | 566 |
} |
62631 | 567 |
} |