src/Pure/PIDE/resources.scala
author wenzelm
Sun Mar 10 14:19:30 2019 +0100 (4 months ago ago)
changeset 70070 be04e9a053a7
parent 69575 f71eb0cf8da7
permissions -rw-r--r--
markup and document markers for some meta data from "Dublin Core Metadata Element Set";
     1 /*  Title:      Pure/PIDE/resources.scala
     2     Author:     Makarius
     3 
     4 Resources for theories and auxiliary files.
     5 */
     6 
     7 package isabelle
     8 
     9 
    10 import scala.annotation.tailrec
    11 import scala.util.parsing.input.Reader
    12 
    13 import java.io.{File => JFile}
    14 
    15 
    16 class Resources(
    17   val session_base: Sessions.Base,
    18   val log: Logger = No_Logger)
    19 {
    20   resources =>
    21 
    22 
    23   /* file formats */
    24 
    25   val file_formats: File_Format.Environment = File_Format.environment()
    26 
    27   def make_theory_name(name: Document.Node.Name): Option[Document.Node.Name] =
    28     file_formats.get(name).flatMap(_.make_theory_name(resources, name))
    29 
    30   def make_theory_content(thy_name: Document.Node.Name): Option[String] =
    31     file_formats.get_theory(thy_name).flatMap(_.make_theory_content(resources, thy_name))
    32 
    33   def make_preview(snapshot: Document.Snapshot): Option[Present.Preview] =
    34     file_formats.get(snapshot.node_name).flatMap(_.make_preview(snapshot))
    35 
    36   def is_hidden(name: Document.Node.Name): Boolean =
    37     !name.is_theory || name.theory == Sessions.root_name || file_formats.is_theory(name)
    38 
    39   def thy_path(path: Path): Path = path.ext("thy")
    40 
    41 
    42   /* file-system operations */
    43 
    44   def append(dir: String, source_path: Path): String =
    45     (Path.explode(dir) + source_path).expand.implode
    46 
    47   def append(node_name: Document.Node.Name, source_path: Path): String =
    48     append(node_name.master_dir, source_path)
    49 
    50 
    51   /* source files of Isabelle/ML bootstrap */
    52 
    53   def source_file(raw_name: String): Option[String] =
    54   {
    55     if (Path.is_wellformed(raw_name)) {
    56       if (Path.is_valid(raw_name)) {
    57         def check(p: Path): Option[Path] = if (p.is_file) Some(p) else None
    58 
    59         val path = Path.explode(raw_name)
    60         val path1 =
    61           if (path.is_absolute || path.is_current) check(path)
    62           else {
    63             check(Path.explode("~~/src/Pure") + path) orElse
    64               (if (Isabelle_System.getenv("ML_SOURCES") == "") None
    65                else check(Path.explode("$ML_SOURCES") + path))
    66           }
    67         Some(File.platform_path(path1 getOrElse path))
    68       }
    69       else None
    70     }
    71     else Some(raw_name)
    72   }
    73 
    74 
    75   /* theory files */
    76 
    77   def loaded_files(syntax: Outer_Syntax, name: Document.Node.Name): () => List[Path] =
    78   {
    79     val (is_utf8, raw_text) =
    80       with_thy_reader(name, reader => (Scan.reader_is_utf8(reader), reader.source.toString))
    81     () => {
    82       if (syntax.load_commands_in(raw_text)) {
    83         val text = Symbol.decode(Scan.reader_decode_utf8(is_utf8, raw_text))
    84         val spans = syntax.parse_spans(text)
    85         val dir = Path.explode(name.master_dir)
    86         spans.iterator.map(Command.span_files(syntax, _)._1).flatten.
    87           map(a => (dir + Path.explode(a)).expand).toList
    88       }
    89       else Nil
    90     }
    91   }
    92 
    93   def pure_files(syntax: Outer_Syntax): List[Path] =
    94   {
    95     val pure_dir = Path.explode("~~/src/Pure")
    96     val roots =
    97       for { (name, _) <- Thy_Header.ml_roots }
    98       yield (pure_dir + Path.explode(name)).expand
    99     val files =
   100       for {
   101         (path, (_, theory)) <- roots zip Thy_Header.ml_roots
   102         file <- loaded_files(syntax, Document.Node.Name(path.implode, path.dir.implode, theory))()
   103       } yield file
   104     roots ::: files
   105   }
   106 
   107   def theory_name(qualifier: String, theory: String): String =
   108     if (Long_Name.is_qualified(theory) || session_base.global_theories.isDefinedAt(theory))
   109       theory
   110     else Long_Name.qualify(qualifier, theory)
   111 
   112   def import_name(qualifier: String, dir: String, s: String): Document.Node.Name =
   113   {
   114     val theory = theory_name(qualifier, Thy_Header.import_name(s))
   115     if (session_base.loaded_theory(theory)) Document.Node.Name.loaded_theory(theory)
   116     else {
   117       session_base.known_theory(theory) match {
   118         case Some(node_name) => node_name
   119         case None =>
   120           if (Thy_Header.is_base_name(s) && Long_Name.is_qualified(s))
   121             Document.Node.Name.loaded_theory(theory)
   122           else {
   123             val path = Path.explode(s)
   124             val node = append(dir, thy_path(path))
   125             val master_dir = append(dir, path.dir)
   126             Document.Node.Name(node, master_dir, theory)
   127           }
   128       }
   129     }
   130   }
   131 
   132   def import_name(name: Document.Node.Name, s: String): Document.Node.Name =
   133     import_name(session_base.theory_qualifier(name), name.master_dir, s)
   134 
   135   def standard_import(base: Sessions.Base, qualifier: String, dir: String, s: String): String =
   136   {
   137     val name = import_name(qualifier, dir, s)
   138     val s1 =
   139       if (session_base.loaded_theory(name)) name.theory
   140       else {
   141         (try { Some(name.path) } catch { case ERROR(_) => None }) match {
   142           case None => s
   143           case Some(path) =>
   144             session_base.known.get_file(path.file) match {
   145               case Some(name1) if base.theory_qualifier(name1) != qualifier =>
   146                 name1.theory
   147               case Some(name1) if Thy_Header.is_base_name(s) =>
   148                 name1.theory_base_name
   149               case _ => s
   150             }
   151         }
   152       }
   153     val name2 = import_name(qualifier, dir, s1)
   154     if (name.node == name2.node) s1 else s
   155   }
   156 
   157   def with_thy_reader[A](name: Document.Node.Name, f: Reader[Char] => A): A =
   158   {
   159     val path = name.path
   160     if (path.is_file) using(Scan.byte_reader(path.file))(f)
   161     else if (name.node == name.theory)
   162       error("Cannot load theory " + quote(name.theory))
   163     else error ("Cannot load theory file " + path)
   164   }
   165 
   166   def check_thy_reader(node_name: Document.Node.Name, reader: Reader[Char],
   167     start: Token.Pos = Token.Pos.command, strict: Boolean = true): Document.Node.Header =
   168   {
   169     if (node_name.is_theory && reader.source.length > 0) {
   170       try {
   171         val header = Thy_Header.read(reader, start, strict)
   172 
   173         val base_name = node_name.theory_base_name
   174         val (name, pos) = header.name
   175         if (base_name != name)
   176           error("Bad theory name " + quote(name) +
   177             " for file " + thy_path(Path.basic(base_name)) + Position.here(pos) +
   178             Completion.report_theories(pos, List(base_name)))
   179 
   180         val imports =
   181           header.imports.map({ case (s, pos) =>
   182             val name = import_name(node_name, s)
   183             if (Sessions.exclude_theory(name.theory_base_name))
   184               error("Bad theory name " + quote(name.theory_base_name) + Position.here(pos))
   185             (name, pos)
   186           })
   187         Document.Node.Header(imports, header.keywords, header.abbrevs)
   188       }
   189       catch { case exn: Throwable => Document.Node.bad_header(Exn.message(exn)) }
   190     }
   191     else Document.Node.no_header
   192   }
   193 
   194   def check_thy(name: Document.Node.Name, start: Token.Pos = Token.Pos.command,
   195       strict: Boolean = true): Document.Node.Header =
   196     with_thy_reader(name, check_thy_reader(name, _, start, strict))
   197 
   198 
   199   /* special header */
   200 
   201   def special_header(name: Document.Node.Name): Option[Document.Node.Header] =
   202   {
   203     val imports =
   204       if (name.theory == Sessions.root_name) List(import_name(name, Sessions.theory_name))
   205       else if (Thy_Header.is_ml_root(name.theory)) List(import_name(name, Thy_Header.ML_BOOTSTRAP))
   206       else if (Thy_Header.is_bootstrap(name.theory)) List(import_name(name, Thy_Header.PURE))
   207       else Nil
   208     if (imports.isEmpty) None
   209     else Some(Document.Node.Header(imports.map((_, Position.none))))
   210   }
   211 
   212 
   213   /* blobs */
   214 
   215   def undefined_blobs(nodes: Document.Nodes): List[Document.Node.Name] =
   216     (for {
   217       (node_name, node) <- nodes.iterator
   218       if !session_base.loaded_theory(node_name)
   219       cmd <- node.load_commands.iterator
   220       name <- cmd.blobs_undefined.iterator
   221     } yield name).toList
   222 
   223 
   224   /* document changes */
   225 
   226   def parse_change(
   227       reparse_limit: Int,
   228       previous: Document.Version,
   229       doc_blobs: Document.Blobs,
   230       edits: List[Document.Edit_Text],
   231       consolidate: List[Document.Node.Name]): Session.Change =
   232     Thy_Syntax.parse_change(resources, reparse_limit, previous, doc_blobs, edits, consolidate)
   233 
   234   def commit(change: Session.Change) { }
   235 
   236 
   237   /* theory and file dependencies */
   238 
   239   def dependencies(
   240       thys: List[(Document.Node.Name, Position.T)],
   241       progress: Progress = No_Progress): Dependencies[Unit] =
   242     Dependencies.empty[Unit].require_thys((), thys, progress = progress)
   243 
   244   def session_dependencies(info: Sessions.Info, progress: Progress = No_Progress)
   245     : Dependencies[Options] =
   246   {
   247     val qualifier = info.name
   248     val dir = info.dir.implode
   249 
   250     (Dependencies.empty[Options] /: info.theories)({ case (dependencies, (options, thys)) =>
   251       dependencies.require_thys(options,
   252         for { (thy, pos) <- thys } yield (import_name(qualifier, dir, thy), pos),
   253         progress = progress)
   254     })
   255   }
   256 
   257   object Dependencies
   258   {
   259     def empty[A]: Dependencies[A] = new Dependencies[A](Nil, Map.empty)
   260 
   261     private def show_path(names: List[Document.Node.Name]): String =
   262       names.map(name => quote(name.theory)).mkString(" via ")
   263 
   264     private def cycle_msg(names: List[Document.Node.Name]): String =
   265       "Cyclic dependency of " + show_path(names)
   266 
   267     private def required_by(initiators: List[Document.Node.Name]): String =
   268       if (initiators.isEmpty) ""
   269       else "\n(required by " + show_path(initiators.reverse) + ")"
   270   }
   271 
   272   final class Dependencies[A] private(
   273     rev_entries: List[Document.Node.Entry],
   274     seen: Map[Document.Node.Name, A])
   275   {
   276     private def cons(entry: Document.Node.Entry): Dependencies[A] =
   277       new Dependencies[A](entry :: rev_entries, seen)
   278 
   279     def require_thy(adjunct: A,
   280       thy: (Document.Node.Name, Position.T),
   281       initiators: List[Document.Node.Name] = Nil,
   282       progress: Progress = No_Progress): Dependencies[A] =
   283     {
   284       val (name, pos) = thy
   285 
   286       def message: String =
   287         "The error(s) above occurred for theory " + quote(name.theory) +
   288           Dependencies.required_by(initiators) + Position.here(pos)
   289 
   290       if (seen.isDefinedAt(name)) this
   291       else {
   292         val dependencies1 = new Dependencies[A](rev_entries, seen + (name -> adjunct))
   293         if (session_base.loaded_theory(name)) dependencies1
   294         else {
   295           try {
   296             if (initiators.contains(name)) error(Dependencies.cycle_msg(initiators))
   297 
   298             progress.expose_interrupt()
   299             val header =
   300               try { check_thy(name, Token.Pos.file(name.node)).cat_errors(message) }
   301               catch { case ERROR(msg) => cat_error(msg, message) }
   302             val entry = Document.Node.Entry(name, header)
   303             dependencies1.require_thys(adjunct, header.imports,
   304               initiators = name :: initiators, progress = progress).cons(entry)
   305           }
   306           catch {
   307             case e: Throwable =>
   308               dependencies1.cons(Document.Node.Entry(name, Document.Node.bad_header(Exn.message(e))))
   309           }
   310         }
   311       }
   312     }
   313 
   314     def require_thys(adjunct: A,
   315         thys: List[(Document.Node.Name, Position.T)],
   316         progress: Progress = No_Progress,
   317         initiators: List[Document.Node.Name] = Nil): Dependencies[A] =
   318       (this /: thys)(_.require_thy(adjunct, _, progress = progress, initiators = initiators))
   319 
   320     def entries: List[Document.Node.Entry] = rev_entries.reverse
   321 
   322     def theories: List[Document.Node.Name] = entries.map(_.name)
   323     def adjunct_theories: List[(A, Document.Node.Name)] = theories.map(name => (seen(name), name))
   324 
   325     def errors: List[String] = entries.flatMap(_.header.errors)
   326 
   327     def check_errors: Dependencies[A] =
   328       errors match {
   329         case Nil => this
   330         case errs => error(cat_lines(errs))
   331       }
   332 
   333     lazy val loaded_theories: Graph[String, Outer_Syntax] =
   334       (session_base.loaded_theories /: entries)({ case (graph, entry) =>
   335         val name = entry.name.theory
   336         val imports = entry.header.imports.map(p => p._1.theory)
   337 
   338         val graph1 = (graph /: (name :: imports))(_.default_node(_, Outer_Syntax.empty))
   339         val graph2 = (graph1 /: imports)(_.add_edge(_, name))
   340 
   341         val syntax0 = if (name == Thy_Header.PURE) List(Thy_Header.bootstrap_syntax) else Nil
   342         val syntax1 = (name :: graph2.imm_preds(name).toList).map(graph2.get_node(_))
   343         val syntax = Outer_Syntax.merge(syntax0 ::: syntax1) + entry.header
   344 
   345         graph2.map_node(name, _ => syntax)
   346       })
   347 
   348     def loaded_files(pure: Boolean): List[(String, List[Path])] =
   349     {
   350       val loaded_files =
   351         theories.map(_.theory) zip
   352           Par_List.map((e: () => List[Path]) => e(),
   353             theories.map(name =>
   354               resources.loaded_files(loaded_theories.get_node(name.theory), name)))
   355 
   356       if (pure) {
   357         val pure_files = resources.pure_files(overall_syntax)
   358         loaded_files.map({ case (name, files) =>
   359           (name, if (name == Thy_Header.PURE) pure_files ::: files else files) })
   360       }
   361       else loaded_files
   362     }
   363 
   364     def imported_files: List[Path] =
   365     {
   366       val base_theories =
   367         loaded_theories.all_preds(theories.map(_.theory)).
   368           filter(session_base.loaded_theories.defined(_))
   369 
   370       base_theories.map(theory => session_base.known.theories(theory).name.path) :::
   371       base_theories.flatMap(session_base.known.loaded_files(_))
   372     }
   373 
   374     lazy val overall_syntax: Outer_Syntax =
   375       Outer_Syntax.merge(loaded_theories.maximals.map(loaded_theories.get_node(_)))
   376 
   377     override def toString: String = entries.toString
   378   }
   379 }