1 /* Title: Pure/Thy/export.scala |
|
2 Author: Makarius |
|
3 |
|
4 Manage theory exports: compressed blobs. |
|
5 */ |
|
6 |
|
7 package isabelle |
|
8 |
|
9 |
|
10 import scala.annotation.tailrec |
|
11 import scala.util.matching.Regex |
|
12 import scala.collection.mutable |
|
13 |
|
14 |
|
15 object Export { |
|
16 /* artefact names */ |
|
17 |
|
18 val DOCUMENT_ID: String = "PIDE/document_id" |
|
19 val FILES: String = "PIDE/files" |
|
20 val MARKUP: String = "PIDE/markup" |
|
21 val MESSAGES: String = "PIDE/messages" |
|
22 val DOCUMENT_PREFIX: String = "document/" |
|
23 val DOCUMENT_LATEX: String = DOCUMENT_PREFIX + "latex" |
|
24 val THEORY_PREFIX: String = "theory/" |
|
25 val PROOFS_PREFIX: String = "proofs/" |
|
26 |
|
27 def explode_name(s: String): List[String] = space_explode('/', s) |
|
28 def implode_name(elems: Iterable[String]): String = elems.mkString("/") |
|
29 |
|
30 |
|
31 /* SQL data model */ |
|
32 |
|
33 object private_data extends SQL.Data() { |
|
34 override lazy val tables = SQL.Tables(Base.table) |
|
35 |
|
36 object Base { |
|
37 val session_name = SQL.Column.string("session_name").make_primary_key |
|
38 val theory_name = SQL.Column.string("theory_name").make_primary_key |
|
39 val name = SQL.Column.string("name").make_primary_key |
|
40 val executable = SQL.Column.bool("executable") |
|
41 val compressed = SQL.Column.bool("compressed") |
|
42 val body = SQL.Column.bytes("body") |
|
43 |
|
44 val table = |
|
45 SQL.Table("isabelle_exports", |
|
46 List(session_name, theory_name, name, executable, compressed, body)) |
|
47 } |
|
48 |
|
49 def where_equal(session_name: String, theory_name: String = "", name: String = ""): SQL.Source = |
|
50 SQL.where_and( |
|
51 Base.session_name.equal(session_name), |
|
52 if_proper(theory_name, Base.theory_name.equal(theory_name)), |
|
53 if_proper(name, Base.name.equal(name))) |
|
54 |
|
55 def clean_session(db: SQL.Database, session_name: String): Unit = |
|
56 db.execute_statement(Base.table.delete(sql = where_equal(session_name))) |
|
57 |
|
58 def known_entries(db: SQL.Database, entry_names: Iterable[Entry_Name]): Set[Entry_Name] = { |
|
59 val it = entry_names.iterator |
|
60 if (it.isEmpty) Set.empty[Entry_Name] |
|
61 else { |
|
62 val sql_preds = |
|
63 List.from( |
|
64 for (entry_name <- it) yield { |
|
65 SQL.and( |
|
66 Base.session_name.equal(entry_name.session), |
|
67 Base.theory_name.equal(entry_name.theory), |
|
68 Base.name.equal(entry_name.name) |
|
69 ) |
|
70 }) |
|
71 db.execute_query_statement( |
|
72 Base.table.select(List(Base.session_name, Base.theory_name, Base.name), |
|
73 sql = SQL.where(SQL.OR(sql_preds))), |
|
74 Set.from[Entry_Name], |
|
75 { res => |
|
76 val session_name = res.string(Base.session_name) |
|
77 val theory_name = res.string(Base.theory_name) |
|
78 val name = res.string(Base.name) |
|
79 Entry_Name(session_name, theory_name, name) |
|
80 }) |
|
81 } |
|
82 } |
|
83 |
|
84 def read_entry(db: SQL.Database, entry_name: Entry_Name, cache: XML.Cache): Option[Entry] = |
|
85 db.execute_query_statementO[Entry]( |
|
86 Base.table.select(List(Base.executable, Base.compressed, Base.body), |
|
87 sql = private_data.where_equal(entry_name.session, entry_name.theory, entry_name.name)), |
|
88 { res => |
|
89 val executable = res.bool(Base.executable) |
|
90 val compressed = res.bool(Base.compressed) |
|
91 val bytes = res.bytes(Base.body) |
|
92 val body = Future.value(compressed, bytes) |
|
93 Entry(entry_name, executable, body, cache) |
|
94 } |
|
95 ) |
|
96 |
|
97 def write_entries(db: SQL.Database, entries: List[Entry]): Unit = |
|
98 db.execute_batch_statement(Base.table.insert(), batch = |
|
99 for (entry <- entries) yield { (stmt: SQL.Statement) => |
|
100 val (compressed, bs) = entry.body.join |
|
101 stmt.string(1) = entry.session_name |
|
102 stmt.string(2) = entry.theory_name |
|
103 stmt.string(3) = entry.name |
|
104 stmt.bool(4) = entry.executable |
|
105 stmt.bool(5) = compressed |
|
106 stmt.bytes(6) = bs |
|
107 }) |
|
108 |
|
109 def read_theory_names(db: SQL.Database, session_name: String): List[String] = |
|
110 db.execute_query_statement( |
|
111 Base.table.select(List(Base.theory_name), distinct = true, |
|
112 sql = private_data.where_equal(session_name) + SQL.order_by(List(Base.theory_name))), |
|
113 List.from[String], res => res.string(Base.theory_name)) |
|
114 |
|
115 def read_entry_names(db: SQL.Database, session_name: String): List[Entry_Name] = |
|
116 db.execute_query_statement( |
|
117 Base.table.select(List(Base.theory_name, Base.name), |
|
118 sql = private_data.where_equal(session_name)) + SQL.order_by(List(Base.theory_name, Base.name)), |
|
119 List.from[Entry_Name], |
|
120 { res => |
|
121 Entry_Name( |
|
122 session = session_name, |
|
123 theory = res.string(Base.theory_name), |
|
124 name = res.string(Base.name)) |
|
125 }) |
|
126 } |
|
127 |
|
128 def compound_name(a: String, b: String): String = |
|
129 if (a.isEmpty) b else a + ":" + b |
|
130 |
|
131 sealed case class Entry_Name(session: String = "", theory: String = "", name: String = "") { |
|
132 val compound_name: String = Export.compound_name(theory, name) |
|
133 |
|
134 def make_path(prune: Int = 0): Path = { |
|
135 val elems = theory :: space_explode('/', name) |
|
136 if (elems.length < prune + 1) { |
|
137 error("Cannot prune path by " + prune + " element(s): " + Path.make(elems)) |
|
138 } |
|
139 else Path.make(elems.drop(prune)) |
|
140 } |
|
141 } |
|
142 |
|
143 def message(msg: String, theory_name: String, name: String): String = |
|
144 msg + " " + quote(name) + " for theory " + quote(theory_name) |
|
145 |
|
146 object Entry { |
|
147 def apply( |
|
148 entry_name: Entry_Name, |
|
149 executable: Boolean, |
|
150 body: Future[(Boolean, Bytes)], |
|
151 cache: XML.Cache |
|
152 ): Entry = new Entry(entry_name, executable, body, cache) |
|
153 |
|
154 def empty(theory_name: String, name: String): Entry = |
|
155 Entry(Entry_Name(theory = theory_name, name = name), |
|
156 false, Future.value(false, Bytes.empty), XML.Cache.none) |
|
157 |
|
158 def make( |
|
159 session_name: String, |
|
160 args: Protocol.Export.Args, |
|
161 bytes: Bytes, |
|
162 cache: XML.Cache |
|
163 ): Entry = { |
|
164 val body = |
|
165 if (args.compress) Future.fork(bytes.maybe_compress(cache = cache.compress)) |
|
166 else Future.value((false, bytes)) |
|
167 val entry_name = |
|
168 Entry_Name(session = session_name, theory = args.theory_name, name = args.name) |
|
169 Entry(entry_name, args.executable, body, cache) |
|
170 } |
|
171 } |
|
172 |
|
173 final class Entry private( |
|
174 val entry_name: Entry_Name, |
|
175 val executable: Boolean, |
|
176 val body: Future[(Boolean, Bytes)], |
|
177 val cache: XML.Cache |
|
178 ) { |
|
179 def session_name: String = entry_name.session |
|
180 def theory_name: String = entry_name.theory |
|
181 def name: String = entry_name.name |
|
182 override def toString: String = name |
|
183 |
|
184 def is_finished: Boolean = body.is_finished |
|
185 def cancel(): Unit = body.cancel() |
|
186 |
|
187 def compound_name: String = entry_name.compound_name |
|
188 |
|
189 def name_has_prefix(s: String): Boolean = name.startsWith(s) |
|
190 val name_elems: List[String] = explode_name(name) |
|
191 |
|
192 def name_extends(elems: List[String]): Boolean = |
|
193 name_elems.startsWith(elems) && name_elems != elems |
|
194 |
|
195 def bytes: Bytes = { |
|
196 val (compressed, bs) = body.join |
|
197 if (compressed) bs.uncompress(cache = cache.compress) else bs |
|
198 } |
|
199 |
|
200 def text: String = bytes.text |
|
201 |
|
202 def yxml: XML.Body = YXML.parse_body(UTF8.decode_permissive(bytes), cache = cache) |
|
203 } |
|
204 |
|
205 def make_regex(pattern: String): Regex = { |
|
206 @tailrec def make(result: List[String], depth: Int, chs: List[Char]): Regex = |
|
207 chs match { |
|
208 case '*' :: '*' :: rest => make("[^:]*" :: result, depth, rest) |
|
209 case '*' :: rest => make("[^:/]*" :: result, depth, rest) |
|
210 case '?' :: rest => make("[^:/]" :: result, depth, rest) |
|
211 case '\\' :: c :: rest => make(("\\" + c) :: result, depth, rest) |
|
212 case '{' :: rest => make("(" :: result, depth + 1, rest) |
|
213 case ',' :: rest if depth > 0 => make("|" :: result, depth, rest) |
|
214 case '}' :: rest if depth > 0 => make(")" :: result, depth - 1, rest) |
|
215 case c :: rest if ".+()".contains(c) => make(("\\" + c) :: result, depth, rest) |
|
216 case c :: rest => make(c.toString :: result, depth, rest) |
|
217 case Nil => result.reverse.mkString.r |
|
218 } |
|
219 make(Nil, 0, pattern.toList) |
|
220 } |
|
221 |
|
222 def make_matcher(pats: List[String]): Entry_Name => Boolean = { |
|
223 val regs = pats.map(make_regex) |
|
224 (entry_name: Entry_Name) => regs.exists(_.matches(entry_name.compound_name)) |
|
225 } |
|
226 |
|
227 def clean_session(db: SQL.Database, session_name: String): Unit = |
|
228 private_data.transaction_lock(db, create = true, label = "Export.clean_session") { |
|
229 private_data.clean_session(db, session_name) |
|
230 } |
|
231 |
|
232 def read_theory_names(db: SQL.Database, session_name: String): List[String] = |
|
233 private_data.transaction_lock(db, label = "Export.read_theory_names") { |
|
234 private_data.read_theory_names(db, session_name) |
|
235 } |
|
236 |
|
237 def read_entry_names(db: SQL.Database, session_name: String): List[Entry_Name] = |
|
238 private_data.transaction_lock(db, label = "Export.read_entry_names") { |
|
239 private_data.read_entry_names(db, session_name) |
|
240 } |
|
241 |
|
242 def read_entry(db: SQL.Database, entry_name: Entry_Name, cache: XML.Cache): Option[Entry] = |
|
243 private_data.transaction_lock(db, label = "Export.read_entry") { |
|
244 private_data.read_entry(db, entry_name, cache) |
|
245 } |
|
246 |
|
247 |
|
248 /* database consumer thread */ |
|
249 |
|
250 def consumer(db: SQL.Database, cache: XML.Cache, progress: Progress = new Progress): Consumer = |
|
251 new Consumer(db, cache, progress) |
|
252 |
|
253 class Consumer private[Export](db: SQL.Database, cache: XML.Cache, progress: Progress) { |
|
254 private val errors = Synchronized[List[String]](Nil) |
|
255 |
|
256 private def consume(args: List[(Entry, Boolean)]): List[Exn.Result[Unit]] = { |
|
257 for ((entry, _) <- args) { |
|
258 if (progress.stopped) entry.cancel() else entry.body.join |
|
259 } |
|
260 private_data.transaction_lock(db, label = "Export.consumer(" + args.length + ")") { |
|
261 var known = private_data.known_entries(db, args.map(p => p._1.entry_name)) |
|
262 val buffer = new mutable.ListBuffer[Option[Entry]] |
|
263 |
|
264 for ((entry, strict) <- args) { |
|
265 if (progress.stopped || known(entry.entry_name)) { |
|
266 buffer += None |
|
267 if (strict && known(entry.entry_name)) { |
|
268 val msg = message("Duplicate export", entry.theory_name, entry.name) |
|
269 errors.change(msg :: _) |
|
270 } |
|
271 } |
|
272 else { |
|
273 buffer += Some(entry) |
|
274 known += entry.entry_name |
|
275 } |
|
276 } |
|
277 |
|
278 val entries = buffer.toList |
|
279 try { |
|
280 private_data.write_entries(db, entries.flatten) |
|
281 val ok = Exn.Res[Unit](()) |
|
282 entries.map(_ => ok) |
|
283 } |
|
284 catch { |
|
285 case exn: Throwable => |
|
286 val err = Exn.Exn[Unit](exn) |
|
287 entries.map(_ => err) |
|
288 } |
|
289 } |
|
290 } |
|
291 |
|
292 private val consumer = |
|
293 Consumer_Thread.fork_bulk[(Entry, Boolean)](name = "export")( |
|
294 bulk = { case (entry, _) => entry.is_finished }, |
|
295 consume = args => (args.grouped(20).toList.flatMap(consume), true)) |
|
296 |
|
297 def make_entry(session_name: String, args: Protocol.Export.Args, body: Bytes): Unit = { |
|
298 if (!progress.stopped && !body.is_empty) { |
|
299 consumer.send(Entry.make(session_name, args, body, cache) -> args.strict) |
|
300 } |
|
301 } |
|
302 |
|
303 def shutdown(close: Boolean = false): List[String] = { |
|
304 consumer.shutdown() |
|
305 if (close) db.close() |
|
306 errors.value.reverse ::: (if (progress.stopped) List("Export stopped") else Nil) |
|
307 } |
|
308 } |
|
309 |
|
310 |
|
311 /* context for database access */ |
|
312 |
|
313 def open_database_context(store: Store, server: SSH.Server = SSH.no_server): Database_Context = |
|
314 new Database_Context(store, store.maybe_open_database_server(server = server)) |
|
315 |
|
316 def open_session_context0( |
|
317 store: Store, |
|
318 session: String, |
|
319 server: SSH.Server = SSH.no_server |
|
320 ): Session_Context = { |
|
321 open_database_context(store, server = server) |
|
322 .open_session0(session, close_database_context = true) |
|
323 } |
|
324 |
|
325 def open_session_context( |
|
326 store: Store, |
|
327 session_background: Sessions.Background, |
|
328 document_snapshot: Option[Document.Snapshot] = None, |
|
329 server: SSH.Server = SSH.no_server |
|
330 ): Session_Context = { |
|
331 open_database_context(store, server = server).open_session( |
|
332 session_background, document_snapshot = document_snapshot, close_database_context = true) |
|
333 } |
|
334 |
|
335 class Database_Context private[Export]( |
|
336 val store: Store, |
|
337 val database_server: Option[SQL.Database] |
|
338 ) extends AutoCloseable { |
|
339 database_context => |
|
340 |
|
341 override def toString: String = { |
|
342 val s = |
|
343 database_server match { |
|
344 case Some(db) => db.toString |
|
345 case None => "input_dirs = " + store.input_dirs.map(_.absolute).mkString(", ") |
|
346 } |
|
347 "Database_Context(" + s + ")" |
|
348 } |
|
349 |
|
350 def cache: Term.Cache = store.cache |
|
351 |
|
352 def close(): Unit = database_server.foreach(_.close()) |
|
353 |
|
354 def open_database(session: String, output: Boolean = false): Session_Database = |
|
355 database_server match { |
|
356 case Some(db) => new Session_Database(session, db) |
|
357 case None => |
|
358 new Session_Database(session, store.open_database(session, output = output)) { |
|
359 override def close(): Unit = db.close() |
|
360 } |
|
361 } |
|
362 |
|
363 def open_session0(session: String, close_database_context: Boolean = false): Session_Context = |
|
364 open_session(Sessions.background0(session), close_database_context = close_database_context) |
|
365 |
|
366 def open_session( |
|
367 session_background: Sessions.Background, |
|
368 document_snapshot: Option[Document.Snapshot] = None, |
|
369 close_database_context: Boolean = false |
|
370 ): Session_Context = { |
|
371 val session_name = session_background.check_errors.session_name |
|
372 val session_hierarchy = session_background.sessions_structure.build_hierarchy(session_name) |
|
373 val session_databases = |
|
374 database_server match { |
|
375 case Some(db) => session_hierarchy.map(name => new Session_Database(name, db)) |
|
376 case None => |
|
377 val attempts = |
|
378 for (name <- session_hierarchy) |
|
379 yield name -> store.try_open_database(name, server_mode = false) |
|
380 attempts.collectFirst({ case (name, None) => name }) match { |
|
381 case Some(bad) => |
|
382 for (case (_, Some(db)) <- attempts) db.close() |
|
383 store.error_database(bad) |
|
384 case None => |
|
385 for (case (name, Some(db)) <- attempts) yield { |
|
386 new Session_Database(name, db) { override def close(): Unit = this.db.close() } |
|
387 } |
|
388 } |
|
389 } |
|
390 new Session_Context( |
|
391 database_context, session_background, session_databases, document_snapshot) { |
|
392 override def close(): Unit = { |
|
393 session_databases.foreach(_.close()) |
|
394 if (close_database_context) database_context.close() |
|
395 } |
|
396 } |
|
397 } |
|
398 } |
|
399 |
|
400 class Session_Database private[Export](val session: String, val db: SQL.Database) |
|
401 extends AutoCloseable { |
|
402 def close(): Unit = () |
|
403 |
|
404 lazy private [Export] val theory_names: List[String] = read_theory_names(db, session) |
|
405 lazy private [Export] val entry_names: List[Entry_Name] = read_entry_names(db, session) |
|
406 } |
|
407 |
|
408 class Session_Context private[Export]( |
|
409 val database_context: Database_Context, |
|
410 session_background: Sessions.Background, |
|
411 db_hierarchy: List[Session_Database], |
|
412 val document_snapshot: Option[Document.Snapshot] |
|
413 ) extends AutoCloseable { |
|
414 session_context => |
|
415 |
|
416 def close(): Unit = () |
|
417 |
|
418 def cache: Term.Cache = database_context.cache |
|
419 |
|
420 def sessions_structure: Sessions.Structure = session_background.sessions_structure |
|
421 |
|
422 def session_base: Sessions.Base = session_background.base |
|
423 |
|
424 def session_name: String = |
|
425 if (document_snapshot.isDefined) Sessions.DRAFT |
|
426 else session_base.session_name |
|
427 |
|
428 def session_database(session: String = session_name): Option[Session_Database] = |
|
429 db_hierarchy.find(_.session == session) |
|
430 |
|
431 def session_db(session: String = session_name): Option[SQL.Database] = |
|
432 session_database(session = session).map(_.db) |
|
433 |
|
434 def session_stack: List[String] = |
|
435 ((if (document_snapshot.isDefined) List(session_name) else Nil) ::: |
|
436 db_hierarchy.map(_.session)).reverse |
|
437 |
|
438 private def select[A]( |
|
439 session: String, |
|
440 select: Session_Database => List[A], |
|
441 project: Entry_Name => A, |
|
442 sort_key: A => String |
|
443 ): List[A] = { |
|
444 def result(name: String): List[A] = |
|
445 if (name == Sessions.DRAFT) { |
|
446 (for { |
|
447 snapshot <- document_snapshot.iterator |
|
448 entry_name <- snapshot.all_exports.keysIterator |
|
449 } yield project(entry_name)).toSet.toList.sortBy(sort_key) |
|
450 } |
|
451 else session_database(name).map(select).getOrElse(Nil) |
|
452 |
|
453 if (session.nonEmpty) result(session) else session_stack.flatMap(result) |
|
454 } |
|
455 |
|
456 def entry_names(session: String = session_name): List[Entry_Name] = |
|
457 select(session, _.entry_names, identity, _.compound_name) |
|
458 |
|
459 def theory_names(session: String = session_name): List[String] = |
|
460 select(session, _.theory_names, _.theory, identity) |
|
461 |
|
462 def get(theory: String, name: String): Option[Entry] = |
|
463 { |
|
464 def snapshot_entry: Option[Entry] = |
|
465 for { |
|
466 snapshot <- document_snapshot |
|
467 entry_name = Entry_Name(session = Sessions.DRAFT, theory = theory, name = name) |
|
468 entry <- snapshot.all_exports.get(entry_name) |
|
469 } yield entry |
|
470 def db_entry: Option[Entry] = |
|
471 db_hierarchy.view.map { database => |
|
472 val entry_name = Export.Entry_Name(session = database.session, theory = theory, name = name) |
|
473 read_entry(database.db, entry_name, cache) |
|
474 }.collectFirst({ case Some(entry) => entry }) |
|
475 |
|
476 snapshot_entry orElse db_entry |
|
477 } |
|
478 |
|
479 def apply(theory: String, name: String, permissive: Boolean = false): Entry = |
|
480 get(theory, name) match { |
|
481 case None if permissive => Entry.empty(theory, name) |
|
482 case None => error("Missing export entry " + quote(compound_name(theory, name))) |
|
483 case Some(entry) => entry |
|
484 } |
|
485 |
|
486 def theory(theory: String, other_cache: Option[Term.Cache] = None): Theory_Context = |
|
487 new Theory_Context(session_context, theory, other_cache) |
|
488 |
|
489 def get_source_file(name: String): Option[Store.Source_File] = { |
|
490 val store = database_context.store |
|
491 (for { |
|
492 database <- db_hierarchy.iterator |
|
493 file <- store.read_sources(database.db, database.session, name = name).iterator |
|
494 } yield file).nextOption() |
|
495 } |
|
496 |
|
497 def source_file(name: String): Store.Source_File = |
|
498 get_source_file(name).getOrElse(error("Missing session source file " + quote(name))) |
|
499 |
|
500 def theory_source(theory: String, unicode_symbols: Boolean = false): String = { |
|
501 def snapshot_source: Option[String] = |
|
502 for { |
|
503 snapshot <- document_snapshot |
|
504 text <- snapshot.version.nodes.iterator.collectFirst( |
|
505 { case (name, node) if name.theory == theory => node.source }) |
|
506 if text.nonEmpty |
|
507 } yield Symbol.output(unicode_symbols, text) |
|
508 |
|
509 def db_source: Option[String] = { |
|
510 val theory_context = session_context.theory(theory) |
|
511 for { |
|
512 name <- theory_context.files0(permissive = true).headOption |
|
513 file <- get_source_file(name) |
|
514 } yield Symbol.output(unicode_symbols, file.bytes.text) |
|
515 } |
|
516 |
|
517 snapshot_source orElse db_source getOrElse "" |
|
518 } |
|
519 |
|
520 def classpath(): List[File.Content] = { |
|
521 (for { |
|
522 session <- session_stack.iterator |
|
523 info <- sessions_structure.get(session).iterator |
|
524 if info.export_classpath.nonEmpty |
|
525 matcher = make_matcher(info.export_classpath) |
|
526 entry_name <- entry_names(session = session).iterator |
|
527 if matcher(entry_name) |
|
528 entry <- get(entry_name.theory, entry_name.name).iterator |
|
529 } yield File.content(entry.entry_name.make_path(), entry.bytes)).toList |
|
530 } |
|
531 |
|
532 override def toString: String = |
|
533 "Export.Session_Context(" + commas_quote(session_stack) + ")" |
|
534 } |
|
535 |
|
536 class Theory_Context private[Export]( |
|
537 val session_context: Session_Context, |
|
538 val theory: String, |
|
539 other_cache: Option[Term.Cache] |
|
540 ) { |
|
541 def cache: Term.Cache = other_cache getOrElse session_context.cache |
|
542 |
|
543 def get(name: String): Option[Entry] = session_context.get(theory, name) |
|
544 def apply(name: String, permissive: Boolean = false): Entry = |
|
545 session_context.apply(theory, name, permissive = permissive) |
|
546 |
|
547 def yxml(name: String): XML.Body = |
|
548 get(name) match { |
|
549 case Some(entry) => entry.yxml |
|
550 case None => Nil |
|
551 } |
|
552 |
|
553 def document_id(): Option[Long] = |
|
554 apply(DOCUMENT_ID, permissive = true).text match { |
|
555 case Value.Long(id) => Some(id) |
|
556 case _ => None |
|
557 } |
|
558 |
|
559 def files0(permissive: Boolean = false): List[String] = |
|
560 split_lines(apply(FILES, permissive = permissive).text) |
|
561 |
|
562 def files(permissive: Boolean = false): Option[(String, List[String])] = |
|
563 files0(permissive = permissive) match { |
|
564 case Nil => None |
|
565 case a :: bs => Some((a, bs)) |
|
566 } |
|
567 |
|
568 override def toString: String = "Export.Theory_Context(" + quote(theory) + ")" |
|
569 } |
|
570 |
|
571 |
|
572 /* export to file-system */ |
|
573 |
|
574 def export_files( |
|
575 store: Store, |
|
576 session_name: String, |
|
577 export_dir: Path, |
|
578 progress: Progress = new Progress, |
|
579 export_prune: Int = 0, |
|
580 export_list: Boolean = false, |
|
581 export_patterns: List[String] = Nil |
|
582 ): Unit = { |
|
583 using(store.open_database(session_name)) { db => |
|
584 val entry_names = read_entry_names(db, session_name) |
|
585 |
|
586 // list |
|
587 if (export_list) { |
|
588 for (entry_name <- entry_names) progress.echo(entry_name.compound_name) |
|
589 } |
|
590 |
|
591 // export |
|
592 if (export_patterns.nonEmpty) { |
|
593 val matcher = make_matcher(export_patterns) |
|
594 for { |
|
595 entry_name <- entry_names if matcher(entry_name) |
|
596 entry <- read_entry(db, entry_name, store.cache) |
|
597 } { |
|
598 val path = export_dir + entry_name.make_path(prune = export_prune) |
|
599 progress.echo("export " + path + (if (entry.executable) " (executable)" else "")) |
|
600 Isabelle_System.make_directory(path.dir) |
|
601 val bytes = entry.bytes |
|
602 if (!path.is_file || Bytes.read(path) != bytes) Bytes.write(path, bytes) |
|
603 File.set_executable(path, reset = !entry.executable) |
|
604 } |
|
605 } |
|
606 } |
|
607 } |
|
608 |
|
609 |
|
610 /* Isabelle tool wrapper */ |
|
611 |
|
612 val default_export_dir: Path = Path.explode("export") |
|
613 |
|
614 val isabelle_tool = |
|
615 Isabelle_Tool("export", "retrieve theory exports", Scala_Project.here, |
|
616 { args => |
|
617 /* arguments */ |
|
618 |
|
619 var export_dir = default_export_dir |
|
620 var dirs: List[Path] = Nil |
|
621 var export_list = false |
|
622 var no_build = false |
|
623 var options = Options.init() |
|
624 var export_prune = 0 |
|
625 var export_patterns: List[String] = Nil |
|
626 |
|
627 val getopts = Getopts(""" |
|
628 Usage: isabelle export [OPTIONS] SESSION |
|
629 |
|
630 Options are: |
|
631 -O DIR output directory for exported files (default: """ + default_export_dir + """) |
|
632 -d DIR include session directory |
|
633 -l list exports |
|
634 -n no build of session |
|
635 -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) |
|
636 -p NUM prune path of exported files by NUM elements |
|
637 -x PATTERN extract files matching pattern (e.g. "*:**" for all) |
|
638 |
|
639 List or export theory exports for SESSION: named blobs produced by |
|
640 isabelle build. Option -l or -x is required; option -x may be repeated. |
|
641 |
|
642 The PATTERN language resembles glob patterns in the shell, with ? and * |
|
643 (both excluding ":" and "/"), ** (excluding ":"), and [abc] or [^abc], |
|
644 and variants {pattern1,pattern2,pattern3}. |
|
645 """, |
|
646 "O:" -> (arg => export_dir = Path.explode(arg)), |
|
647 "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), |
|
648 "l" -> (_ => export_list = true), |
|
649 "n" -> (_ => no_build = true), |
|
650 "o:" -> (arg => options = options + arg), |
|
651 "p:" -> (arg => export_prune = Value.Int.parse(arg)), |
|
652 "x:" -> (arg => export_patterns ::= arg)) |
|
653 |
|
654 val more_args = getopts(args) |
|
655 val session_name = |
|
656 more_args match { |
|
657 case List(session_name) if export_list || export_patterns.nonEmpty => session_name |
|
658 case _ => getopts.usage() |
|
659 } |
|
660 |
|
661 val progress = new Console_Progress() |
|
662 |
|
663 |
|
664 /* build */ |
|
665 |
|
666 if (!no_build) { |
|
667 val rc = |
|
668 progress.interrupt_handler { |
|
669 Build.build_logic(options, session_name, progress = progress, dirs = dirs) |
|
670 } |
|
671 if (rc != Process_Result.RC.ok) sys.exit(rc) |
|
672 } |
|
673 |
|
674 |
|
675 /* export files */ |
|
676 |
|
677 val store = Store(options) |
|
678 export_files(store, session_name, export_dir, progress = progress, export_prune = export_prune, |
|
679 export_list = export_list, export_patterns = export_patterns) |
|
680 }) |
|
681 } |
|