10 import scala.util.parsing.input.Reader |
10 import scala.util.parsing.input.Reader |
11 |
11 |
12 import java.io.{File => JFile} |
12 import java.io.{File => JFile} |
13 |
13 |
14 |
14 |
15 object Resources |
15 object Resources { |
16 { |
|
17 def empty: Resources = |
16 def empty: Resources = |
18 new Resources(Sessions.Structure.empty, Sessions.Structure.empty.bootstrap) |
17 new Resources(Sessions.Structure.empty, Sessions.Structure.empty.bootstrap) |
19 |
18 |
20 def file_node(file: Path, dir: String = "", theory: String = ""): Document.Node.Name = |
19 def file_node(file: Path, dir: String = "", theory: String = ""): Document.Node.Name = |
21 empty.file_node(file, dir = dir, theory = theory) |
20 empty.file_node(file, dir = dir, theory = theory) |
29 |
28 |
30 class Resources( |
29 class Resources( |
31 val sessions_structure: Sessions.Structure, |
30 val sessions_structure: Sessions.Structure, |
32 val session_base: Sessions.Base, |
31 val session_base: Sessions.Base, |
33 val log: Logger = No_Logger, |
32 val log: Logger = No_Logger, |
34 command_timings: List[Properties.T] = Nil) |
33 command_timings: List[Properties.T] = Nil |
35 { |
34 ) { |
36 resources => |
35 resources => |
37 |
36 |
38 |
37 |
39 /* init session */ |
38 /* init session */ |
40 |
39 |
41 def init_session_yxml: String = |
40 def init_session_yxml: String = { |
42 { |
|
43 import XML.Encode._ |
41 import XML.Encode._ |
44 |
42 |
45 YXML.string_of_body( |
43 YXML.string_of_body( |
46 pair(list(pair(string, properties)), |
44 pair(list(pair(string, properties)), |
47 pair(list(pair(string, string)), |
45 pair(list(pair(string, string)), |
76 (Path.explode(dir) + source_path).expand.implode |
74 (Path.explode(dir) + source_path).expand.implode |
77 |
75 |
78 def append(node_name: Document.Node.Name, source_path: Path): String = |
76 def append(node_name: Document.Node.Name, source_path: Path): String = |
79 append(node_name.master_dir, source_path) |
77 append(node_name.master_dir, source_path) |
80 |
78 |
81 def file_node(file: Path, dir: String = "", theory: String = ""): Document.Node.Name = |
79 def file_node(file: Path, dir: String = "", theory: String = ""): Document.Node.Name = { |
82 { |
|
83 val node = append(dir, file) |
80 val node = append(dir, file) |
84 val master_dir = append(dir, file.dir) |
81 val master_dir = append(dir, file.dir) |
85 Document.Node.Name(node, master_dir, theory) |
82 Document.Node.Name(node, master_dir, theory) |
86 } |
83 } |
87 |
84 |
89 Document.Node.Name(theory, "", theory) |
86 Document.Node.Name(theory, "", theory) |
90 |
87 |
91 |
88 |
92 /* source files of Isabelle/ML bootstrap */ |
89 /* source files of Isabelle/ML bootstrap */ |
93 |
90 |
94 def source_file(raw_name: String): Option[String] = |
91 def source_file(raw_name: String): Option[String] = { |
95 { |
|
96 if (Path.is_wellformed(raw_name)) { |
92 if (Path.is_wellformed(raw_name)) { |
97 if (Path.is_valid(raw_name)) { |
93 if (Path.is_valid(raw_name)) { |
98 def check(p: Path): Option[Path] = if (p.is_file) Some(p) else None |
94 def check(p: Path): Option[Path] = if (p.is_file) Some(p) else None |
99 |
95 |
100 val path = Path.explode(raw_name) |
96 val path = Path.explode(raw_name) |
113 } |
109 } |
114 |
110 |
115 |
111 |
116 /* theory files */ |
112 /* theory files */ |
117 |
113 |
118 def load_commands(syntax: Outer_Syntax, name: Document.Node.Name) |
114 def load_commands( |
119 : () => List[Command_Span.Span] = |
115 syntax: Outer_Syntax, |
120 { |
116 name: Document.Node.Name |
|
117 ) : () => List[Command_Span.Span] = { |
121 val (is_utf8, raw_text) = |
118 val (is_utf8, raw_text) = |
122 with_thy_reader(name, reader => (Scan.reader_is_utf8(reader), reader.source.toString)) |
119 with_thy_reader(name, reader => (Scan.reader_is_utf8(reader), reader.source.toString)) |
123 () => |
120 () => |
124 { |
121 { |
125 if (syntax.has_load_commands(raw_text)) { |
122 if (syntax.has_load_commands(raw_text)) { |
128 } |
125 } |
129 else Nil |
126 else Nil |
130 } |
127 } |
131 } |
128 } |
132 |
129 |
133 def loaded_files(syntax: Outer_Syntax, name: Document.Node.Name, spans: List[Command_Span.Span]) |
130 def loaded_files( |
134 : List[Path] = |
131 syntax: Outer_Syntax, |
135 { |
132 name: Document.Node.Name, |
|
133 spans: List[Command_Span.Span] |
|
134 ) : List[Path] = { |
136 val dir = name.master_dir_path |
135 val dir = name.master_dir_path |
137 for { span <- spans; file <- span.loaded_files(syntax).files } |
136 for { span <- spans; file <- span.loaded_files(syntax).files } |
138 yield (dir + Path.explode(file)).expand |
137 yield (dir + Path.explode(file)).expand |
139 } |
138 } |
140 |
139 |
141 def pure_files(syntax: Outer_Syntax): List[Path] = |
140 def pure_files(syntax: Outer_Syntax): List[Path] = { |
142 { |
|
143 val pure_dir = Path.explode("~~/src/Pure") |
141 val pure_dir = Path.explode("~~/src/Pure") |
144 for { |
142 for { |
145 (name, theory) <- Thy_Header.ml_roots |
143 (name, theory) <- Thy_Header.ml_roots |
146 path = (pure_dir + Path.explode(name)).expand |
144 path = (pure_dir + Path.explode(name)).expand |
147 node_name = Document.Node.Name(path.implode, path.dir.implode, theory) |
145 node_name = Document.Node.Name(path.implode, path.dir.implode, theory) |
152 def theory_name(qualifier: String, theory: String): String = |
150 def theory_name(qualifier: String, theory: String): String = |
153 if (Long_Name.is_qualified(theory) || session_base.global_theories.isDefinedAt(theory)) |
151 if (Long_Name.is_qualified(theory) || session_base.global_theories.isDefinedAt(theory)) |
154 theory |
152 theory |
155 else Long_Name.qualify(qualifier, theory) |
153 else Long_Name.qualify(qualifier, theory) |
156 |
154 |
157 def find_theory_node(theory: String): Option[Document.Node.Name] = |
155 def find_theory_node(theory: String): Option[Document.Node.Name] = { |
158 { |
|
159 val thy_file = Path.basic(Long_Name.base_name(theory)).thy |
156 val thy_file = Path.basic(Long_Name.base_name(theory)).thy |
160 val session = session_base.theory_qualifier(theory) |
157 val session = session_base.theory_qualifier(theory) |
161 val dirs = |
158 val dirs = |
162 sessions_structure.get(session) match { |
159 sessions_structure.get(session) match { |
163 case Some(info) => info.dirs |
160 case Some(info) => info.dirs |
165 } |
162 } |
166 dirs.collectFirst({ |
163 dirs.collectFirst({ |
167 case dir if (dir + thy_file).is_file => file_node(dir + thy_file, theory = theory) }) |
164 case dir if (dir + thy_file).is_file => file_node(dir + thy_file, theory = theory) }) |
168 } |
165 } |
169 |
166 |
170 def import_name(qualifier: String, dir: String, s: String): Document.Node.Name = |
167 def import_name(qualifier: String, dir: String, s: String): Document.Node.Name = { |
171 { |
|
172 val theory = theory_name(qualifier, Thy_Header.import_name(s)) |
168 val theory = theory_name(qualifier, Thy_Header.import_name(s)) |
173 def theory_node = file_node(Path.explode(s).thy, dir = dir, theory = theory) |
169 def theory_node = file_node(Path.explode(s).thy, dir = dir, theory = theory) |
174 |
170 |
175 if (!Thy_Header.is_base_name(s)) theory_node |
171 if (!Thy_Header.is_base_name(s)) theory_node |
176 else if (session_base.loaded_theory(theory)) loaded_theory_node(theory) |
172 else if (session_base.loaded_theory(theory)) loaded_theory_node(theory) |
186 import_name(session_base.theory_qualifier(name), name.master_dir, s) |
182 import_name(session_base.theory_qualifier(name), name.master_dir, s) |
187 |
183 |
188 def import_name(info: Sessions.Info, s: String): Document.Node.Name = |
184 def import_name(info: Sessions.Info, s: String): Document.Node.Name = |
189 import_name(info.name, info.dir.implode, s) |
185 import_name(info.name, info.dir.implode, s) |
190 |
186 |
191 def find_theory(file: JFile): Option[Document.Node.Name] = |
187 def find_theory(file: JFile): Option[Document.Node.Name] = { |
192 { |
|
193 for { |
188 for { |
194 qualifier <- session_base.session_directories.get(File.canonical(file).getParentFile) |
189 qualifier <- session_base.session_directories.get(File.canonical(file).getParentFile) |
195 theory_base <- proper_string(Thy_Header.theory_name(file.getName)) |
190 theory_base <- proper_string(Thy_Header.theory_name(file.getName)) |
196 theory = theory_name(qualifier, theory_base) |
191 theory = theory_name(qualifier, theory_base) |
197 theory_node <- find_theory_node(theory) |
192 theory_node <- find_theory_node(theory) |
198 if File.eq(theory_node.path.file, file) |
193 if File.eq(theory_node.path.file, file) |
199 } yield theory_node |
194 } yield theory_node |
200 } |
195 } |
201 |
196 |
202 def complete_import_name(context_name: Document.Node.Name, s: String): List[String] = |
197 def complete_import_name(context_name: Document.Node.Name, s: String): List[String] = { |
203 { |
|
204 val context_session = session_base.theory_qualifier(context_name) |
198 val context_session = session_base.theory_qualifier(context_name) |
205 val context_dir = |
199 val context_dir = |
206 try { Some(context_name.master_dir_path) } |
200 try { Some(context_name.master_dir_path) } |
207 catch { case ERROR(_) => None } |
201 catch { case ERROR(_) => None } |
208 (for { |
202 (for { |
214 if (session == context_session || session_base.global_theories.isDefinedAt(theory)) theory |
208 if (session == context_session || session_base.global_theories.isDefinedAt(theory)) theory |
215 else Long_Name.qualify(session, theory) |
209 else Long_Name.qualify(session, theory) |
216 }).toList.sorted |
210 }).toList.sorted |
217 } |
211 } |
218 |
212 |
219 def with_thy_reader[A](name: Document.Node.Name, f: Reader[Char] => A): A = |
213 def with_thy_reader[A](name: Document.Node.Name, f: Reader[Char] => A): A = { |
220 { |
|
221 val path = name.path |
214 val path = name.path |
222 if (path.is_file) using(Scan.byte_reader(path.file))(f) |
215 if (path.is_file) using(Scan.byte_reader(path.file))(f) |
223 else if (name.node == name.theory) |
216 else if (name.node == name.theory) |
224 error("Cannot load theory " + quote(name.theory)) |
217 error("Cannot load theory " + quote(name.theory)) |
225 else error ("Cannot load theory file " + path) |
218 else error ("Cannot load theory file " + path) |
226 } |
219 } |
227 |
220 |
228 def check_thy(node_name: Document.Node.Name, reader: Reader[Char], |
221 def check_thy( |
229 command: Boolean = true, strict: Boolean = true): Document.Node.Header = |
222 node_name: Document.Node.Name, |
230 { |
223 reader: Reader[Char], |
|
224 command: Boolean = true, |
|
225 strict: Boolean = true |
|
226 ): Document.Node.Header = { |
231 if (node_name.is_theory && reader.source.length > 0) { |
227 if (node_name.is_theory && reader.source.length > 0) { |
232 try { |
228 try { |
233 val header = Thy_Header.read(node_name, reader, command = command, strict = strict) |
229 val header = Thy_Header.read(node_name, reader, command = command, strict = strict) |
234 val imports = |
230 val imports = |
235 header.imports.map({ case (s, pos) => |
231 header.imports.map({ case (s, pos) => |
246 } |
242 } |
247 |
243 |
248 |
244 |
249 /* special header */ |
245 /* special header */ |
250 |
246 |
251 def special_header(name: Document.Node.Name): Option[Document.Node.Header] = |
247 def special_header(name: Document.Node.Name): Option[Document.Node.Header] = { |
252 { |
|
253 val imports = |
248 val imports = |
254 if (name.theory == Sessions.root_name) List(import_name(name, Sessions.theory_name)) |
249 if (name.theory == Sessions.root_name) List(import_name(name, Sessions.theory_name)) |
255 else if (Thy_Header.is_ml_root(name.theory)) List(import_name(name, Thy_Header.ML_BOOTSTRAP)) |
250 else if (Thy_Header.is_ml_root(name.theory)) List(import_name(name, Thy_Header.ML_BOOTSTRAP)) |
256 else if (Thy_Header.is_bootstrap(name.theory)) List(import_name(name, Thy_Header.PURE)) |
251 else if (Thy_Header.is_bootstrap(name.theory)) List(import_name(name, Thy_Header.PURE)) |
257 else Nil |
252 else Nil |
289 def dependencies( |
284 def dependencies( |
290 thys: List[(Document.Node.Name, Position.T)], |
285 thys: List[(Document.Node.Name, Position.T)], |
291 progress: Progress = new Progress): Dependencies[Unit] = |
286 progress: Progress = new Progress): Dependencies[Unit] = |
292 Dependencies.empty[Unit].require_thys((), thys, progress = progress) |
287 Dependencies.empty[Unit].require_thys((), thys, progress = progress) |
293 |
288 |
294 def session_dependencies(info: Sessions.Info, progress: Progress = new Progress) |
289 def session_dependencies( |
295 : Dependencies[Options] = |
290 info: Sessions.Info, |
296 { |
291 progress: Progress = new Progress |
|
292 ) : Dependencies[Options] = { |
297 info.theories.foldLeft(Dependencies.empty[Options]) { |
293 info.theories.foldLeft(Dependencies.empty[Options]) { |
298 case (dependencies, (options, thys)) => |
294 case (dependencies, (options, thys)) => |
299 dependencies.require_thys(options, |
295 dependencies.require_thys(options, |
300 for { (thy, pos) <- thys } yield (import_name(info, thy), pos), |
296 for { (thy, pos) <- thys } yield (import_name(info, thy), pos), |
301 progress = progress) |
297 progress = progress) |
302 } |
298 } |
303 } |
299 } |
304 |
300 |
305 object Dependencies |
301 object Dependencies { |
306 { |
|
307 def empty[A]: Dependencies[A] = new Dependencies[A](Nil, Map.empty) |
302 def empty[A]: Dependencies[A] = new Dependencies[A](Nil, Map.empty) |
308 |
303 |
309 private def show_path(names: List[Document.Node.Name]): String = |
304 private def show_path(names: List[Document.Node.Name]): String = |
310 names.map(name => quote(name.theory)).mkString(" via ") |
305 names.map(name => quote(name.theory)).mkString(" via ") |
311 |
306 |
317 else "\n(required by " + show_path(initiators.reverse) + ")" |
312 else "\n(required by " + show_path(initiators.reverse) + ")" |
318 } |
313 } |
319 |
314 |
320 final class Dependencies[A] private( |
315 final class Dependencies[A] private( |
321 rev_entries: List[Document.Node.Entry], |
316 rev_entries: List[Document.Node.Entry], |
322 seen: Map[Document.Node.Name, A]) |
317 seen: Map[Document.Node.Name, A] |
323 { |
318 ) { |
324 private def cons(entry: Document.Node.Entry): Dependencies[A] = |
319 private def cons(entry: Document.Node.Entry): Dependencies[A] = |
325 new Dependencies[A](entry :: rev_entries, seen) |
320 new Dependencies[A](entry :: rev_entries, seen) |
326 |
321 |
327 def require_thy(adjunct: A, |
322 def require_thy(adjunct: A, |
328 thy: (Document.Node.Name, Position.T), |
323 thy: (Document.Node.Name, Position.T), |
329 initiators: List[Document.Node.Name] = Nil, |
324 initiators: List[Document.Node.Name] = Nil, |
330 progress: Progress = new Progress): Dependencies[A] = |
325 progress: Progress = new Progress): Dependencies[A] = { |
331 { |
|
332 val (name, pos) = thy |
326 val (name, pos) = thy |
333 |
327 |
334 def message: String = |
328 def message: String = |
335 "The error(s) above occurred for theory " + quote(name.theory) + |
329 "The error(s) above occurred for theory " + quote(name.theory) + |
336 Dependencies.required_by(initiators) + Position.here(pos) |
330 Dependencies.required_by(initiators) + Position.here(pos) |
378 errors match { |
372 errors match { |
379 case Nil => this |
373 case Nil => this |
380 case errs => error(cat_lines(errs)) |
374 case errs => error(cat_lines(errs)) |
381 } |
375 } |
382 |
376 |
383 lazy val theory_graph: Document.Node.Name.Graph[Unit] = |
377 lazy val theory_graph: Document.Node.Name.Graph[Unit] = { |
384 { |
|
385 val regular = theories.toSet |
378 val regular = theories.toSet |
386 val irregular = |
379 val irregular = |
387 (for { |
380 (for { |
388 entry <- entries.iterator |
381 entry <- entries.iterator |
389 imp <- entry.header.imports |
382 imp <- entry.header.imports |
418 theories.zip( |
411 theories.zip( |
419 Par_List.map((e: () => List[Command_Span.Span]) => e(), |
412 Par_List.map((e: () => List[Command_Span.Span]) => e(), |
420 theories.map(name => resources.load_commands(get_syntax(name), name)))) |
413 theories.map(name => resources.load_commands(get_syntax(name), name)))) |
421 .filter(p => p._2.nonEmpty) |
414 .filter(p => p._2.nonEmpty) |
422 |
415 |
423 def loaded_files(name: Document.Node.Name, spans: List[Command_Span.Span]) |
416 def loaded_files( |
424 : (String, List[Path]) = |
417 name: Document.Node.Name, |
425 { |
418 spans: List[Command_Span.Span] |
|
419 ) : (String, List[Path]) = { |
426 val theory = name.theory |
420 val theory = name.theory |
427 val syntax = get_syntax(name) |
421 val syntax = get_syntax(name) |
428 val files1 = resources.loaded_files(syntax, name, spans) |
422 val files1 = resources.loaded_files(syntax, name, spans) |
429 val files2 = if (theory == Thy_Header.PURE) pure_files(syntax) else Nil |
423 val files2 = if (theory == Thy_Header.PURE) pure_files(syntax) else Nil |
430 (theory, files1 ::: files2) |
424 (theory, files1 ::: files2) |
434 for { |
428 for { |
435 (name, spans) <- load_commands |
429 (name, spans) <- load_commands |
436 file <- loaded_files(name, spans)._2 |
430 file <- loaded_files(name, spans)._2 |
437 } yield file |
431 } yield file |
438 |
432 |
439 def imported_files: List[Path] = |
433 def imported_files: List[Path] = { |
440 { |
|
441 val base_theories = |
434 val base_theories = |
442 loaded_theories.all_preds(theories.map(_.theory)). |
435 loaded_theories.all_preds(theories.map(_.theory)). |
443 filter(session_base.loaded_theories.defined) |
436 filter(session_base.loaded_theories.defined) |
444 |
437 |
445 base_theories.map(theory => session_base.known_theories(theory).name.path) ::: |
438 base_theories.map(theory => session_base.known_theories(theory).name.path) ::: |