18 |
18 |
19 sealed case class Blob( |
19 sealed case class Blob( |
20 name: Document.Node.Name, |
20 name: Document.Node.Name, |
21 src_path: Path, |
21 src_path: Path, |
22 content: Option[(SHA1.Digest, Symbol.Text_Chunk)]) |
22 content: Option[(SHA1.Digest, Symbol.Text_Chunk)]) |
|
23 { |
|
24 def chunk_file: Symbol.Text_Chunk.File = |
|
25 Symbol.Text_Chunk.File(name.node) |
|
26 } |
23 |
27 |
24 object Blobs_Info |
28 object Blobs_Info |
25 { |
29 { |
26 val none: Blobs_Info = Blobs_Info(Nil, -1) |
30 val none: Blobs_Info = Blobs_Info(Nil, -1) |
27 |
31 |
372 def unparsed( |
377 def unparsed( |
373 source: String, |
378 source: String, |
374 theory: Boolean = false, |
379 theory: Boolean = false, |
375 id: Document_ID.Command = Document_ID.none, |
380 id: Document_ID.Command = Document_ID.none, |
376 node_name: Document.Node.Name = Document.Node.Name.empty, |
381 node_name: Document.Node.Name = Document.Node.Name.empty, |
|
382 blobs_info: Blobs_Info = Blobs_Info.none, |
377 results: Results = Results.empty, |
383 results: Results = Results.empty, |
378 markups: Markups = Markups.empty): Command = |
384 markups: Markups = Markups.empty): Command = |
379 { |
385 { |
380 val (source1, span1) = Command_Span.unparsed(source, theory).compact_source |
386 val (source1, span1) = Command_Span.unparsed(source, theory).compact_source |
381 new Command(id, node_name, Blobs_Info.none, span1, source1, results, markups) |
387 new Command(id, node_name, blobs_info, span1, source1, results, markups) |
382 } |
388 } |
383 |
389 |
384 def text(source: String): Command = unparsed(source) |
390 def text(source: String): Command = unparsed(source) |
385 |
391 |
386 def rich_text(id: Document_ID.Command, results: Results, body: XML.Body): Command = |
392 def rich_text(id: Document_ID.Command, results: Results, body: XML.Body): Command = |
459 Blob(name, src_path, content) |
465 Blob(name, src_path, content) |
460 }).user_error) |
466 }).user_error) |
461 Blobs_Info(blobs, loaded_files.index) |
467 Blobs_Info(blobs, loaded_files.index) |
462 } |
468 } |
463 } |
469 } |
|
470 |
|
471 def build_blobs_info( |
|
472 syntax: Outer_Syntax, |
|
473 node_name: Document.Node.Name, |
|
474 load_commands: List[Command_Span.Span]): Blobs_Info = |
|
475 { |
|
476 val blobs = |
|
477 for { |
|
478 span <- load_commands |
|
479 file <- span.loaded_files(syntax).files |
|
480 } yield { |
|
481 (Exn.capture { |
|
482 val dir = Path.explode(node_name.master_dir) |
|
483 val src_path = Path.explode(file) |
|
484 val name = Document.Node.Name((dir + src_path).expand.implode_symbolic) |
|
485 val bytes = Bytes.read(name.path) |
|
486 val chunk = Symbol.Text_Chunk(bytes.text) |
|
487 Blob(name, src_path, Some((bytes.sha1_digest, chunk))) |
|
488 }).user_error |
|
489 } |
|
490 Blobs_Info(blobs, -1) |
|
491 } |
464 } |
492 } |
465 |
493 |
466 |
494 |
467 final class Command private( |
495 final class Command private( |
468 val id: Document_ID.Command, |
496 val id: Document_ID.Command, |
514 val chunk: Symbol.Text_Chunk = Symbol.Text_Chunk(source) |
542 val chunk: Symbol.Text_Chunk = Symbol.Text_Chunk(source) |
515 |
543 |
516 val chunks: Map[Symbol.Text_Chunk.Name, Symbol.Text_Chunk] = |
544 val chunks: Map[Symbol.Text_Chunk.Name, Symbol.Text_Chunk] = |
517 ((Symbol.Text_Chunk.Default -> chunk) :: |
545 ((Symbol.Text_Chunk.Default -> chunk) :: |
518 (for (Exn.Res(blob) <- blobs; (_, file) <- blob.content) |
546 (for (Exn.Res(blob) <- blobs; (_, file) <- blob.content) |
519 yield Symbol.Text_Chunk.File(blob.name.node) -> file)).toMap |
547 yield blob.chunk_file -> file)).toMap |
520 |
548 |
521 def length: Int = source.length |
549 def length: Int = source.length |
522 def range: Text.Range = chunk.range |
550 def range: Text.Range = chunk.range |
523 |
551 |
524 val core_range: Text.Range = |
552 val core_range: Text.Range = |