525 |
525 |
526 override def cancel(): Unit = future_result.cancel() |
526 override def cancel(): Unit = future_result.cancel() |
527 override def is_finished: Boolean = future_result.is_finished |
527 override def is_finished: Boolean = future_result.is_finished |
528 override def join: (Process_Result, SHA1.Shasum) = future_result.join |
528 override def join: (Process_Result, SHA1.Shasum) = future_result.join |
529 } |
529 } |
530 |
|
531 |
|
532 /* theory markup/messages from session database */ |
|
533 |
|
534 def read_theory( |
|
535 theory_context: Export.Theory_Context, |
|
536 unicode_symbols: Boolean = false |
|
537 ): Option[Document.Snapshot] = { |
|
538 def decode_bytes(bytes: Bytes): String = |
|
539 Symbol.output(unicode_symbols, UTF8.decode_permissive(bytes)) |
|
540 |
|
541 def read(name: String): Export.Entry = theory_context(name, permissive = true) |
|
542 |
|
543 def read_xml(name: String): XML.Body = |
|
544 YXML.parse_body(decode_bytes(read(name).bytes), cache = theory_context.cache) |
|
545 |
|
546 def read_source_file(name: String): Store.Source_File = |
|
547 theory_context.session_context.source_file(name) |
|
548 |
|
549 for { |
|
550 id <- theory_context.document_id() |
|
551 (thy_file, blobs_files) <- theory_context.files(permissive = true) |
|
552 } |
|
553 yield { |
|
554 val master_dir = |
|
555 Path.explode(Url.strip_base_name(thy_file).getOrElse( |
|
556 error("Cannot determine theory master directory: " + quote(thy_file)))) |
|
557 |
|
558 val blobs = |
|
559 blobs_files.map { name => |
|
560 val path = Path.explode(name) |
|
561 val src_path = File.relative_path(master_dir, path).getOrElse(path) |
|
562 |
|
563 val file = read_source_file(name) |
|
564 val bytes = file.bytes |
|
565 val text = decode_bytes(bytes) |
|
566 val chunk = Symbol.Text_Chunk(text) |
|
567 |
|
568 Command.Blob(Document.Node.Name(name), src_path, Some((file.digest, chunk))) -> |
|
569 Document.Blobs.Item(bytes, text, chunk, changed = false) |
|
570 } |
|
571 |
|
572 val thy_source = decode_bytes(read_source_file(thy_file).bytes) |
|
573 val thy_xml = read_xml(Export.MARKUP) |
|
574 val blobs_xml = |
|
575 for (i <- (1 to blobs.length).toList) |
|
576 yield read_xml(Export.MARKUP + i) |
|
577 |
|
578 val markups_index = Command.Markup_Index.make(blobs.map(_._1)) |
|
579 val markups = |
|
580 Command.Markups.make( |
|
581 for ((index, xml) <- markups_index.zip(thy_xml :: blobs_xml)) |
|
582 yield index -> Markup_Tree.from_XML(xml)) |
|
583 |
|
584 val results = |
|
585 Command.Results.make( |
|
586 for (elem @ XML.Elem(Markup(_, Markup.Serial(i)), _) <- read_xml(Export.MESSAGES)) |
|
587 yield i -> elem) |
|
588 |
|
589 val command = |
|
590 Command.unparsed(thy_source, theory = true, id = id, |
|
591 node_name = Document.Node.Name(thy_file, theory = theory_context.theory), |
|
592 blobs_info = Command.Blobs_Info.make(blobs), |
|
593 markups = markups, results = results) |
|
594 |
|
595 val doc_blobs = Document.Blobs.make(blobs) |
|
596 |
|
597 Document.State.init.snippet(command, doc_blobs) |
|
598 } |
|
599 } |
|
600 } |
530 } |