author | wenzelm |
Mon, 09 Jan 2017 20:47:45 +0100 | |
changeset 64856 | 5e9bf964510a |
parent 63996 | 3f47fec9edfc |
child 65251 | 4b0a43afc3fb |
permissions | -rw-r--r-- |
62631 | 1 |
/* Title: Pure/Thy/sessions.scala |
2 |
Author: Makarius |
|
3 |
||
62973 | 4 |
Isabelle session information. |
62631 | 5 |
*/ |
6 |
||
7 |
package isabelle |
|
8 |
||
62704
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
9 |
import java.nio.ByteBuffer |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
10 |
import java.nio.channels.FileChannel |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
11 |
import java.nio.file.StandardOpenOption |
62631 | 12 |
|
13 |
import scala.collection.SortedSet |
|
14 |
import scala.collection.mutable |
|
15 |
||
16 |
||
17 |
object Sessions |
|
18 |
{ |
|
62883 | 19 |
/* Pure */ |
20 |
||
63022 | 21 |
def pure_name(name: String): Boolean = name == Thy_Header.PURE |
62883 | 22 |
|
62902
3c0f53eae166
more conventional theory syntax for ML bootstrap, with 'ML_file' instead of 'use';
wenzelm
parents:
62883
diff
changeset
|
23 |
def pure_files(resources: Resources, syntax: Outer_Syntax, dir: Path): List[Path] = |
3c0f53eae166
more conventional theory syntax for ML bootstrap, with 'ML_file' instead of 'use';
wenzelm
parents:
62883
diff
changeset
|
24 |
{ |
62946 | 25 |
val roots = Thy_Header.ml_roots.map(_._1) |
62902
3c0f53eae166
more conventional theory syntax for ML bootstrap, with 'ML_file' instead of 'use';
wenzelm
parents:
62883
diff
changeset
|
26 |
val loaded_files = |
62946 | 27 |
roots.flatMap(root => resources.loaded_files(syntax, File.read(dir + Path.explode(root)))) |
28 |
(roots ::: loaded_files).map(file => dir + Path.explode(file)) |
|
62902
3c0f53eae166
more conventional theory syntax for ML bootstrap, with 'ML_file' instead of 'use';
wenzelm
parents:
62883
diff
changeset
|
29 |
} |
62883 | 30 |
|
31 |
||
64856 | 32 |
/* base info */ |
33 |
||
34 |
object Base |
|
35 |
{ |
|
36 |
val empty: Base = Base() |
|
37 |
||
38 |
lazy val bootstrap: Base = |
|
39 |
Base(keywords = Thy_Header.bootstrap_header, syntax = Thy_Header.bootstrap_syntax) |
|
40 |
} |
|
41 |
||
42 |
sealed case class Base( |
|
43 |
loaded_theories: Set[String] = Set.empty, |
|
44 |
known_theories: Map[String, Document.Node.Name] = Map.empty, |
|
45 |
keywords: Thy_Header.Keywords = Nil, |
|
46 |
syntax: Outer_Syntax = Outer_Syntax.empty, |
|
47 |
sources: List[(Path, SHA1.Digest)] = Nil, |
|
48 |
session_graph: Graph_Display.Graph = Graph_Display.empty_graph) |
|
49 |
||
50 |
||
62883 | 51 |
/* info */ |
62631 | 52 |
|
53 |
sealed case class Info( |
|
54 |
chapter: String, |
|
55 |
select: Boolean, |
|
56 |
pos: Position.T, |
|
57 |
groups: List[String], |
|
58 |
dir: Path, |
|
59 |
parent: Option[String], |
|
60 |
description: String, |
|
61 |
options: Options, |
|
62 |
theories: List[(Boolean, Options, List[Path])], |
|
63 |
files: List[Path], |
|
64 |
document_files: List[(Path, Path)], |
|
65 |
meta_digest: SHA1.Digest) |
|
66 |
{ |
|
67 |
def timeout: Time = Time.seconds(options.real("timeout") * options.real("timeout_scale")) |
|
68 |
} |
|
69 |
||
70 |
||
71 |
/* session tree */ |
|
72 |
||
73 |
object Tree |
|
74 |
{ |
|
75 |
def apply(infos: Seq[(String, Info)]): Tree = |
|
76 |
{ |
|
77 |
val graph1 = |
|
78 |
(Graph.string[Info] /: infos) { |
|
79 |
case (graph, (name, info)) => |
|
80 |
if (graph.defined(name)) |
|
81 |
error("Duplicate session " + quote(name) + Position.here(info.pos) + |
|
82 |
Position.here(graph.get_node(name).pos)) |
|
83 |
else graph.new_node(name, info) |
|
84 |
} |
|
85 |
val graph2 = |
|
86 |
(graph1 /: graph1.iterator) { |
|
87 |
case (graph, (name, (info, _))) => |
|
88 |
info.parent match { |
|
89 |
case None => graph |
|
90 |
case Some(parent) => |
|
91 |
if (!graph.defined(parent)) |
|
92 |
error("Bad parent session " + quote(parent) + " for " + |
|
93 |
quote(name) + Position.here(info.pos)) |
|
94 |
||
95 |
try { graph.add_edge_acyclic(parent, name) } |
|
96 |
catch { |
|
97 |
case exn: Graph.Cycles[_] => |
|
98 |
error(cat_lines(exn.cycles.map(cycle => |
|
99 |
"Cyclic session dependency of " + |
|
100 |
cycle.map(c => quote(c.toString)).mkString(" via "))) + |
|
101 |
Position.here(info.pos)) |
|
102 |
} |
|
103 |
} |
|
104 |
} |
|
105 |
new Tree(graph2) |
|
106 |
} |
|
107 |
} |
|
108 |
||
109 |
final class Tree private(val graph: Graph[String, Info]) |
|
110 |
extends PartialFunction[String, Info] |
|
111 |
{ |
|
112 |
def apply(name: String): Info = graph.get_node(name) |
|
113 |
def isDefinedAt(name: String): Boolean = graph.defined(name) |
|
114 |
||
115 |
def selection( |
|
116 |
requirements: Boolean = false, |
|
117 |
all_sessions: Boolean = false, |
|
118 |
exclude_session_groups: List[String] = Nil, |
|
119 |
exclude_sessions: List[String] = Nil, |
|
120 |
session_groups: List[String] = Nil, |
|
121 |
sessions: List[String] = Nil): (List[String], Tree) = |
|
122 |
{ |
|
123 |
val bad_sessions = |
|
124 |
SortedSet((exclude_sessions ::: sessions).filterNot(isDefinedAt(_)): _*).toList |
|
125 |
if (bad_sessions.nonEmpty) error("Undefined session(s): " + commas_quote(bad_sessions)) |
|
126 |
||
127 |
val excluded = |
|
128 |
{ |
|
129 |
val exclude_group = exclude_session_groups.toSet |
|
130 |
val exclude_group_sessions = |
|
131 |
(for { |
|
132 |
(name, (info, _)) <- graph.iterator |
|
133 |
if apply(name).groups.exists(exclude_group) |
|
134 |
} yield name).toList |
|
135 |
graph.all_succs(exclude_group_sessions ::: exclude_sessions).toSet |
|
136 |
} |
|
137 |
||
138 |
val pre_selected = |
|
139 |
{ |
|
140 |
if (all_sessions) graph.keys |
|
141 |
else { |
|
142 |
val select_group = session_groups.toSet |
|
143 |
val select = sessions.toSet |
|
144 |
(for { |
|
145 |
(name, (info, _)) <- graph.iterator |
|
146 |
if info.select || select(name) || apply(name).groups.exists(select_group) |
|
147 |
} yield name).toList |
|
148 |
} |
|
149 |
}.filterNot(excluded) |
|
150 |
||
151 |
val selected = |
|
152 |
if (requirements) (graph.all_preds(pre_selected).toSet -- pre_selected).toList |
|
153 |
else pre_selected |
|
154 |
||
155 |
val graph1 = graph.restrict(graph.all_preds(selected).toSet) |
|
156 |
(selected, new Tree(graph1)) |
|
157 |
} |
|
158 |
||
159 |
def ancestors(name: String): List[String] = |
|
160 |
graph.all_preds(List(name)).tail.reverse |
|
161 |
||
162 |
def topological_order: List[(String, Info)] = |
|
163 |
graph.topological_order.map(name => (name, apply(name))) |
|
164 |
||
165 |
override def toString: String = graph.keys_iterator.mkString("Sessions.Tree(", ", ", ")") |
|
166 |
} |
|
167 |
||
168 |
||
169 |
/* parser */ |
|
170 |
||
62864 | 171 |
val ROOT = Path.explode("ROOT") |
172 |
val ROOTS = Path.explode("ROOTS") |
|
173 |
||
62631 | 174 |
private val CHAPTER = "chapter" |
175 |
private val SESSION = "session" |
|
176 |
private val IN = "in" |
|
177 |
private val DESCRIPTION = "description" |
|
178 |
private val OPTIONS = "options" |
|
179 |
private val GLOBAL_THEORIES = "global_theories" |
|
180 |
private val THEORIES = "theories" |
|
181 |
private val FILES = "files" |
|
182 |
private val DOCUMENT_FILES = "document_files" |
|
183 |
||
184 |
lazy val root_syntax = |
|
63443 | 185 |
Outer_Syntax.init() + "(" + ")" + "+" + "," + "=" + "[" + "]" + IN + |
186 |
(CHAPTER, Keyword.THY_DECL) + |
|
187 |
(SESSION, Keyword.THY_DECL) + |
|
188 |
(DESCRIPTION, Keyword.QUASI_COMMAND) + |
|
189 |
(OPTIONS, Keyword.QUASI_COMMAND) + |
|
190 |
(GLOBAL_THEORIES, Keyword.QUASI_COMMAND) + |
|
191 |
(THEORIES, Keyword.QUASI_COMMAND) + |
|
192 |
(FILES, Keyword.QUASI_COMMAND) + |
|
193 |
(DOCUMENT_FILES, Keyword.QUASI_COMMAND) |
|
62631 | 194 |
|
62968 | 195 |
private object Parser extends Parse.Parser with Options.Parser |
62631 | 196 |
{ |
197 |
private abstract class Entry |
|
198 |
private sealed case class Chapter(name: String) extends Entry |
|
199 |
private sealed case class Session_Entry( |
|
200 |
pos: Position.T, |
|
201 |
name: String, |
|
202 |
groups: List[String], |
|
203 |
path: String, |
|
204 |
parent: Option[String], |
|
205 |
description: String, |
|
206 |
options: List[Options.Spec], |
|
207 |
theories: List[(Boolean, List[Options.Spec], List[String])], |
|
208 |
files: List[String], |
|
209 |
document_files: List[(String, String)]) extends Entry |
|
210 |
||
211 |
private val chapter: Parser[Chapter] = |
|
212 |
{ |
|
213 |
val chapter_name = atom("chapter name", _.is_name) |
|
214 |
||
215 |
command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter(a) } |
|
216 |
} |
|
217 |
||
218 |
private val session_entry: Parser[Session_Entry] = |
|
219 |
{ |
|
220 |
val session_name = atom("session name", _.is_name) |
|
221 |
||
222 |
val option = |
|
62968 | 223 |
option_name ~ opt($$$("=") ~! option_value ^^ |
224 |
{ case _ ~ x => x }) ^^ { case x ~ y => (x, y) } |
|
62631 | 225 |
val options = $$$("[") ~> rep1sep(option, $$$(",")) <~ $$$("]") |
226 |
||
227 |
val theories = |
|
228 |
($$$(GLOBAL_THEORIES) | $$$(THEORIES)) ~! |
|
62969 | 229 |
((options | success(Nil)) ~ rep(theory_name)) ^^ |
62631 | 230 |
{ case x ~ (y ~ z) => (x == GLOBAL_THEORIES, y, z) } |
231 |
||
232 |
val document_files = |
|
233 |
$$$(DOCUMENT_FILES) ~! |
|
234 |
(($$$("(") ~! ($$$(IN) ~! (path ~ $$$(")"))) ^^ |
|
235 |
{ case _ ~ (_ ~ (x ~ _)) => x } | success("document")) ~ |
|
236 |
rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) } |
|
237 |
||
238 |
command(SESSION) ~! |
|
239 |
(position(session_name) ~ |
|
240 |
(($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil)) ~ |
|
241 |
(($$$(IN) ~! path ^^ { case _ ~ x => x }) | success(".")) ~ |
|
242 |
($$$("=") ~! |
|
243 |
(opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~ |
|
244 |
(($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")) ~ |
|
245 |
(($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~ |
|
246 |
rep1(theories) ~ |
|
247 |
(($$$(FILES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~ |
|
248 |
(rep(document_files) ^^ (x => x.flatten))))) ^^ |
|
249 |
{ case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i))) => |
|
250 |
Session_Entry(pos, a, b, c, d, e, f, g, h, i) } |
|
251 |
} |
|
252 |
||
253 |
def parse(options: Options, select: Boolean, dir: Path): List[(String, Info)] = |
|
254 |
{ |
|
255 |
def make_info(entry_chapter: String, entry: Session_Entry): (String, Info) = |
|
256 |
{ |
|
257 |
try { |
|
258 |
val name = entry.name |
|
259 |
||
260 |
if (name == "") error("Bad session name") |
|
62883 | 261 |
if (pure_name(name) && entry.parent.isDefined) error("Illegal parent session") |
262 |
if (!pure_name(name) && !entry.parent.isDefined) error("Missing parent session") |
|
62631 | 263 |
|
264 |
val session_options = options ++ entry.options |
|
265 |
||
266 |
val theories = |
|
267 |
entry.theories.map({ case (global, opts, thys) => |
|
268 |
(global, session_options ++ opts, thys.map(Path.explode(_))) }) |
|
269 |
val files = entry.files.map(Path.explode(_)) |
|
270 |
val document_files = |
|
271 |
entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) }) |
|
272 |
||
273 |
val meta_digest = |
|
274 |
SHA1.digest((entry_chapter, name, entry.parent, entry.options, |
|
275 |
entry.theories, entry.files, entry.document_files).toString) |
|
276 |
||
277 |
val info = |
|
278 |
Info(entry_chapter, select, entry.pos, entry.groups, dir + Path.explode(entry.path), |
|
279 |
entry.parent, entry.description, session_options, theories, files, |
|
280 |
document_files, meta_digest) |
|
281 |
||
282 |
(name, info) |
|
283 |
} |
|
284 |
catch { |
|
285 |
case ERROR(msg) => |
|
286 |
error(msg + "\nThe error(s) above occurred in session entry " + |
|
287 |
quote(entry.name) + Position.here(entry.pos)) |
|
288 |
} |
|
289 |
} |
|
290 |
||
291 |
val root = dir + ROOT |
|
292 |
if (root.is_file) { |
|
293 |
val toks = Token.explode(root_syntax.keywords, File.read(root)) |
|
294 |
val start = Token.Pos.file(root.implode) |
|
295 |
||
296 |
parse_all(rep(chapter | session_entry), Token.reader(toks, start)) match { |
|
297 |
case Success(result, _) => |
|
298 |
var entry_chapter = "Unsorted" |
|
299 |
val infos = new mutable.ListBuffer[(String, Info)] |
|
300 |
result.foreach { |
|
301 |
case Chapter(name) => entry_chapter = name |
|
302 |
case entry: Session_Entry => infos += make_info(entry_chapter, entry) |
|
303 |
} |
|
304 |
infos.toList |
|
305 |
case bad => error(bad.toString) |
|
306 |
} |
|
307 |
} |
|
308 |
else Nil |
|
309 |
} |
|
310 |
} |
|
311 |
||
312 |
||
62635 | 313 |
/* load sessions from certain directories */ |
62631 | 314 |
|
315 |
private def is_session_dir(dir: Path): Boolean = |
|
316 |
(dir + ROOT).is_file || (dir + ROOTS).is_file |
|
317 |
||
318 |
private def check_session_dir(dir: Path): Path = |
|
319 |
if (is_session_dir(dir)) dir |
|
320 |
else error("Bad session root directory: " + dir.toString) |
|
321 |
||
62635 | 322 |
def load(options: Options, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil): Tree = |
62631 | 323 |
{ |
62635 | 324 |
def load_dir(select: Boolean, dir: Path): List[(String, Info)] = |
325 |
load_root(select, dir) ::: load_roots(select, dir) |
|
62631 | 326 |
|
62635 | 327 |
def load_root(select: Boolean, dir: Path): List[(String, Info)] = |
62631 | 328 |
Parser.parse(options, select, dir) |
329 |
||
62635 | 330 |
def load_roots(select: Boolean, dir: Path): List[(String, Info)] = |
62631 | 331 |
{ |
332 |
val roots = dir + ROOTS |
|
333 |
if (roots.is_file) { |
|
334 |
for { |
|
335 |
line <- split_lines(File.read(roots)) |
|
336 |
if !(line == "" || line.startsWith("#")) |
|
337 |
dir1 = |
|
338 |
try { check_session_dir(dir + Path.explode(line)) } |
|
339 |
catch { |
|
340 |
case ERROR(msg) => |
|
341 |
error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString) |
|
342 |
} |
|
62635 | 343 |
info <- load_dir(select, dir1) |
62631 | 344 |
} yield info |
345 |
} |
|
346 |
else Nil |
|
347 |
} |
|
348 |
||
349 |
val default_dirs = Isabelle_System.components().filter(is_session_dir(_)) |
|
350 |
dirs.foreach(check_session_dir(_)) |
|
351 |
select_dirs.foreach(check_session_dir(_)) |
|
352 |
||
353 |
Tree( |
|
354 |
for { |
|
355 |
(select, dir) <- (default_dirs ::: dirs).map((false, _)) ::: select_dirs.map((true, _)) |
|
62635 | 356 |
info <- load_dir(select, dir) |
62631 | 357 |
} yield info) |
358 |
} |
|
62632 | 359 |
|
360 |
||
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
361 |
|
62704
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
362 |
/** heap file with SHA1 digest **/ |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
363 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
364 |
private val sha1_prefix = "SHA1:" |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
365 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
366 |
def read_heap_digest(heap: Path): Option[String] = |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
367 |
{ |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
368 |
if (heap.is_file) { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
369 |
val file = FileChannel.open(heap.file.toPath, StandardOpenOption.READ) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
370 |
try { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
371 |
val len = file.size |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
372 |
val n = sha1_prefix.length + SHA1.digest_length |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
373 |
if (len >= n) { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
374 |
file.position(len - n) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
375 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
376 |
val buf = ByteBuffer.allocate(n) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
377 |
var i = 0 |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
378 |
var m = 0 |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
379 |
do { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
380 |
m = file.read(buf) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
381 |
if (m != -1) i += m |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
382 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
383 |
while (m != -1 && n > i) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
384 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
385 |
if (i == n) { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
386 |
val prefix = new String(buf.array(), 0, sha1_prefix.length, UTF8.charset) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
387 |
val s = new String(buf.array(), sha1_prefix.length, SHA1.digest_length, UTF8.charset) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
388 |
if (prefix == sha1_prefix) Some(s) else None |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
389 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
390 |
else None |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
391 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
392 |
else None |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
393 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
394 |
finally { file.close } |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
395 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
396 |
else None |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
397 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
398 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
399 |
def write_heap_digest(heap: Path): String = |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
400 |
read_heap_digest(heap) match { |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
401 |
case None => |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
402 |
val s = SHA1.digest(heap).rep |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
403 |
File.append(heap, sha1_prefix + s) |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
404 |
s |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
405 |
case Some(s) => s |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
406 |
} |
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
407 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
408 |
|
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
409 |
|
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
410 |
/** persistent store **/ |
62632 | 411 |
|
412 |
def log(name: String): Path = Path.basic("log") + Path.basic(name) |
|
413 |
def log_gz(name: String): Path = log(name).ext("gz") |
|
414 |
||
415 |
def store(system_mode: Boolean = false): Store = new Store(system_mode) |
|
416 |
||
63996 | 417 |
class Store private[Sessions](system_mode: Boolean) |
62632 | 418 |
{ |
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
419 |
/* output */ |
62632 | 420 |
|
421 |
val browser_info: Path = |
|
422 |
if (system_mode) Path.explode("~~/browser_info") |
|
423 |
else Path.explode("$ISABELLE_BROWSER_INFO") |
|
424 |
||
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
425 |
val output_dir: Path = |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
426 |
if (system_mode) Path.explode("~~/heaps/$ML_IDENTIFIER") |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
427 |
else Path.explode("$ISABELLE_OUTPUT") |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
428 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
429 |
def prepare_output() { Isabelle_System.mkdirs(output_dir + Path.basic("log")) } |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
430 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
431 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
432 |
/* input */ |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
433 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
434 |
private val input_dirs = |
62632 | 435 |
if (system_mode) List(output_dir) |
62633 | 436 |
else { |
437 |
val ml_ident = Path.explode("$ML_IDENTIFIER").expand |
|
438 |
output_dir :: Path.split(Isabelle_System.getenv_strict("ISABELLE_PATH")).map(_ + ml_ident) |
|
439 |
} |
|
62632 | 440 |
|
62636 | 441 |
def find(name: String): Option[(Path, Option[String])] = |
62632 | 442 |
input_dirs.find(dir => (dir + log_gz(name)).is_file).map(dir => |
62704
478b49f0d726
proper SHA1 digest as annex to heap file: Poly/ML reads precise segment length;
wenzelm
parents:
62637
diff
changeset
|
443 |
(dir + log_gz(name), read_heap_digest(dir + Path.basic(name)))) |
62632 | 444 |
|
62633 | 445 |
def find_log(name: String): Option[Path] = |
446 |
input_dirs.map(_ + log(name)).find(_.is_file) |
|
447 |
||
448 |
def find_log_gz(name: String): Option[Path] = |
|
449 |
input_dirs.map(_ + log_gz(name)).find(_.is_file) |
|
62632 | 450 |
|
62637
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
451 |
def find_heap(name: String): Option[Path] = |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
452 |
input_dirs.map(_ + Path.basic(name)).find(_.is_file) |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
453 |
|
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
454 |
def heap(name: String): Path = |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
455 |
find_heap(name) getOrElse |
0189fe0f6452
support for Poly/ML heap hierarchy, which saves a lot of disk space;
wenzelm
parents:
62636
diff
changeset
|
456 |
error("Unknown logic " + quote(name) + " -- no heap file found in:\n" + |
62769 | 457 |
cat_lines(input_dirs.map(dir => " " + dir.expand.implode))) |
62632 | 458 |
} |
62631 | 459 |
} |