159 // commit: must not block, must not fail |
160 // commit: must not block, must not fail |
160 commit: Option[(Document.Snapshot, Document_Status.Node_Status) => Unit] = None, |
161 commit: Option[(Document.Snapshot, Document_Status.Node_Status) => Unit] = None, |
161 commit_cleanup_delay: Time = default_commit_cleanup_delay, |
162 commit_cleanup_delay: Time = default_commit_cleanup_delay, |
162 progress: Progress = No_Progress): Use_Theories_Result = |
163 progress: Progress = No_Progress): Use_Theories_Result = |
163 { |
164 { |
164 val dep_theories = |
165 val dependencies = |
165 { |
166 { |
166 val import_names = |
167 val import_names = |
167 theories.map(thy => |
168 theories.map(thy => |
168 resources.import_name(qualifier, master_directory(master_dir), thy) -> Position.none) |
169 resources.import_name(qualifier, master_directory(master_dir), thy) -> Position.none) |
169 resources.dependencies(import_names, progress = progress).check_errors.theories |
170 resources.dependencies(import_names, progress = progress).check_errors |
170 } |
171 } |
|
172 val dep_theories = dependencies.theories |
|
173 val dep_files = |
|
174 dependencies.loaded_files(false).flatMap(_._2). |
|
175 map(path => Document.Node.Name(resources.append("", path))) |
171 |
176 |
172 val use_theories_state = Synchronized(Use_Theories_State()) |
177 val use_theories_state = Synchronized(Use_Theories_State()) |
173 |
178 |
174 def check_result(beyond_limit: Boolean = false) |
179 def check_result(beyond_limit: Boolean = false) |
175 { |
180 { |
256 } |
261 } |
257 } |
262 } |
258 |
263 |
259 try { |
264 try { |
260 session.commands_changed += consumer |
265 session.commands_changed += consumer |
261 resources.load_theories(session, id, dep_theories, progress) |
266 resources.load_theories(session, id, dep_theories, dep_files, progress) |
262 use_theories_state.value.await_result |
267 use_theories_state.value.await_result |
263 check_progress.cancel |
268 check_progress.cancel |
264 } |
269 } |
265 finally { |
270 finally { |
266 session.commands_changed -= consumer |
271 session.commands_changed -= consumer |
339 if (required == node_required) this |
344 if (required == node_required) this |
340 else new Theory(node_name, node_header, text, required) |
345 else new Theory(node_name, node_header, text, required) |
341 } |
346 } |
342 |
347 |
343 sealed case class State( |
348 sealed case class State( |
344 required: Multi_Map[Document.Node.Name, UUID.T] = Multi_Map.empty, |
349 blobs: Map[Document.Node.Name, Document.Blob] = Map.empty, |
345 theories: Map[Document.Node.Name, Theory] = Map.empty) |
350 theories: Map[Document.Node.Name, Theory] = Map.empty, |
346 { |
351 required: Multi_Map[Document.Node.Name, UUID.T] = Multi_Map.empty) |
|
352 { |
|
353 /* blobs */ |
|
354 |
|
355 def doc_blobs: Document.Blobs = Document.Blobs(blobs) |
|
356 |
|
357 def update_blobs(names: List[Document.Node.Name]): (Document.Blobs, State) = |
|
358 { |
|
359 val new_blobs = |
|
360 names.flatMap(name => |
|
361 { |
|
362 val bytes = Bytes.read(name.path) |
|
363 def new_blob: Document.Blob = |
|
364 { |
|
365 val text = bytes.text |
|
366 Document.Blob(bytes, text, Symbol.Text_Chunk(text), changed = true) |
|
367 } |
|
368 blobs.get(name) match { |
|
369 case Some(blob) => if (blob.bytes == bytes) None else Some(name -> new_blob) |
|
370 case None => Some(name -> new_blob) |
|
371 } |
|
372 }) |
|
373 val blobs1 = (blobs /: new_blobs)(_ + _) |
|
374 val blobs2 = (blobs /: new_blobs)({ case (map, (a, b)) => map + (a -> b.unchanged) }) |
|
375 (Document.Blobs(blobs1), copy(blobs = blobs2)) |
|
376 } |
|
377 |
|
378 def blob_edits(name: Document.Node.Name, old_blob: Option[Document.Blob]) |
|
379 : List[Document.Edit_Text] = |
|
380 { |
|
381 val blob = blobs.getOrElse(name, error("Missing blob " + quote(name.toString))) |
|
382 val text_edits = |
|
383 old_blob match { |
|
384 case None => List(Text.Edit.insert(0, blob.source)) |
|
385 case Some(blob0) => Text.Edit.replace(0, blob0.source, blob.source) |
|
386 } |
|
387 if (text_edits.isEmpty) Nil |
|
388 else List(name -> Document.Node.Blob(blob), name -> Document.Node.Edits(text_edits)) |
|
389 } |
|
390 |
|
391 |
|
392 /* theories */ |
|
393 |
347 lazy val theory_graph: Graph[Document.Node.Name, Unit] = |
394 lazy val theory_graph: Graph[Document.Node.Name, Unit] = |
348 { |
395 { |
349 val entries = |
396 val entries = |
350 for ((name, theory) <- theories.toList) |
397 for ((name, theory) <- theories.toList) |
351 yield ((name, ()), theory.node_header.imports.map(_._1).filter(theories.isDefinedAt(_))) |
398 yield ((name, ()), theory.node_header.imports.map(_._1).filter(theories.isDefinedAt(_))) |
387 yield { |
434 yield { |
388 val theory1 = theory.required(st1.is_required(node_name)) |
435 val theory1 = theory.required(st1.is_required(node_name)) |
389 val edits = theory1.node_edits(Some(theory)) |
436 val edits = theory1.node_edits(Some(theory)) |
390 (edits, (node_name, theory1)) |
437 (edits, (node_name, theory1)) |
391 } |
438 } |
392 session.update(Document.Blobs.empty, theory_edits.flatMap(_._1)) |
439 session.update(doc_blobs, theory_edits.flatMap(_._1)) |
393 st1.update_theories(theory_edits.map(_._2)) |
440 st1.update_theories(theory_edits.map(_._2)) |
394 } |
441 } |
395 |
442 |
396 def purge_theories(session: Session, nodes: List[Document.Node.Name]) |
443 def purge_theories(session: Session, nodes: List[Document.Node.Name]) |
397 : ((List[Document.Node.Name], List[Document.Node.Name]), State) = |
444 : ((List[Document.Node.Name], List[Document.Node.Name]), State) = |
401 |
448 |
402 val retain = theory_graph.all_preds(all_nodes.filterNot(purge)).toSet |
449 val retain = theory_graph.all_preds(all_nodes.filterNot(purge)).toSet |
403 val (retained, purged) = all_nodes.partition(retain) |
450 val (retained, purged) = all_nodes.partition(retain) |
404 |
451 |
405 val purge_edits = purged.flatMap(name => theories(name).purge_edits) |
452 val purge_edits = purged.flatMap(name => theories(name).purge_edits) |
406 session.update(Document.Blobs.empty, purge_edits) |
453 session.update(doc_blobs, purge_edits) |
407 |
454 |
408 ((purged, retained), remove_theories(purged)) |
455 ((purged, retained), remove_theories(purged)) |
409 } |
456 } |
410 |
457 |
411 def frontier_theories(clean: Set[Document.Node.Name]): Set[Document.Node.Name] = |
458 def frontier_theories(clean: Set[Document.Node.Name]): Set[Document.Node.Name] = |
492 val loaded = loaded_theories.length |
540 val loaded = loaded_theories.length |
493 if (loaded > 1) progress.echo("Loading " + loaded + " theories ...") |
541 if (loaded > 1) progress.echo("Loading " + loaded + " theories ...") |
494 |
542 |
495 state.change(st => |
543 state.change(st => |
496 { |
544 { |
497 val st1 = st.insert_required(id, dep_theories) |
545 val (doc_blobs1, st1) = st.insert_required(id, dep_theories).update_blobs(dep_files) |
498 val theory_edits = |
546 val theory_edits = |
499 for (theory <- loaded_theories) |
547 for (theory <- loaded_theories) |
500 yield { |
548 yield { |
501 val node_name = theory.node_name |
549 val node_name = theory.node_name |
502 val theory1 = theory.required(st1.is_required(node_name)) |
550 val theory1 = theory.required(st1.is_required(node_name)) |
503 val edits = theory1.node_edits(st1.theories.get(node_name)) |
551 val edits = theory1.node_edits(st1.theories.get(node_name)) |
504 (edits, (node_name, theory1)) |
552 (edits, (node_name, theory1)) |
505 } |
553 } |
506 session.update(Document.Blobs.empty, theory_edits.flatMap(_._1)) |
554 val file_edits = |
|
555 for { node_name <- dep_files if doc_blobs1.changed(node_name) } |
|
556 yield st1.blob_edits(node_name, st.blobs.get(node_name)) |
|
557 |
|
558 session.update(doc_blobs1, theory_edits.flatMap(_._1) ::: file_edits.flatten) |
507 st1.update_theories(theory_edits.map(_._2)) |
559 st1.update_theories(theory_edits.map(_._2)) |
508 }) |
560 }) |
509 } |
561 } |
510 |
562 |
511 def unload_theories(session: Session, id: UUID.T, dep_theories: List[Document.Node.Name]) |
563 def unload_theories(session: Session, id: UUID.T, dep_theories: List[Document.Node.Name]) |