--- a/doc-src/System/Thy/Sessions.thy Thu Aug 23 15:32:22 2012 +0200
+++ b/doc-src/System/Thy/Sessions.thy Thu Aug 23 15:44:47 2012 +0200
@@ -189,6 +189,7 @@
-d DIR include session directory
-g NAME select session group NAME
-j INT maximum number of parallel jobs (default 1)
+ -l list session source files
-n no build -- test dependencies only
-o OPTION override session configuration OPTION
(via NAME=VAL or NAME)
@@ -265,7 +266,9 @@
@{setting ISABELLE_OUTPUT} (which is normally in @{setting
ISABELLE_HOME_USER}, i.e.\ the user's home directory).
- \medskip Option @{verbatim "-v"} enables verbose mode.
+ \medskip Option @{verbatim "-v"} increases the general level of
+ verbosity. Option @{verbatim "-l"} lists the source files that
+ contribute to a session.
*}
subsubsection {* Examples *}
--- a/doc-src/System/Thy/document/Sessions.tex Thu Aug 23 15:32:22 2012 +0200
+++ b/doc-src/System/Thy/document/Sessions.tex Thu Aug 23 15:44:47 2012 +0200
@@ -302,6 +302,7 @@
-d DIR include session directory
-g NAME select session group NAME
-j INT maximum number of parallel jobs (default 1)
+ -l list session source files
-n no build -- test dependencies only
-o OPTION override session configuration OPTION
(via NAME=VAL or NAME)
@@ -373,7 +374,9 @@
\verb|$ISABELLE_HOME/heaps| instead of the default location
\hyperlink{setting.ISABELLE-OUTPUT}{\mbox{\isa{\isatt{ISABELLE{\isaliteral{5F}{\isacharunderscore}}OUTPUT}}}} (which is normally in \hyperlink{setting.ISABELLE-HOME-USER}{\mbox{\isa{\isatt{ISABELLE{\isaliteral{5F}{\isacharunderscore}}HOME{\isaliteral{5F}{\isacharunderscore}}USER}}}}, i.e.\ the user's home directory).
- \medskip Option \verb|-v| enables verbose mode.%
+ \medskip Option \verb|-v| increases the general level of
+ verbosity. Option \verb|-l| lists the source files that
+ contribute to a session.%
\end{isamarkuptext}%
\isamarkuptrue%
%
--- a/doc-src/antiquote_setup.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/doc-src/antiquote_setup.ML Thu Aug 23 15:44:47 2012 +0200
@@ -135,7 +135,7 @@
val thy_file_setup =
Thy_Output.antiquotation @{binding thy_file} (Scan.lift Args.name)
(fn {context = ctxt, ...} =>
- fn name => (Thy_Load.check_thy [] Path.current name; Thy_Output.output ctxt [Pretty.str name]));
+ fn name => (Thy_Load.check_thy Path.current name; Thy_Output.output ctxt [Pretty.str name]));
(* Isabelle/Isar entities (with index) *)
--- a/etc/isar-keywords.el Thu Aug 23 15:32:22 2012 +0200
+++ b/etc/isar-keywords.el Thu Aug 23 15:44:47 2012 +0200
@@ -238,6 +238,8 @@
"sorry"
"spark_end"
"spark_open"
+ "spark_open_siv"
+ "spark_open_vcg"
"spark_proof_functions"
"spark_status"
"spark_types"
@@ -543,6 +545,8 @@
"sledgehammer_params"
"spark_end"
"spark_open"
+ "spark_open_siv"
+ "spark_open_vcg"
"spark_proof_functions"
"spark_types"
"statespace"
--- a/lib/Tools/build Thu Aug 23 15:32:22 2012 +0200
+++ b/lib/Tools/build Thu Aug 23 15:44:47 2012 +0200
@@ -33,6 +33,7 @@
echo " -d DIR include session directory"
echo " -g NAME select session group NAME"
echo " -j INT maximum number of parallel jobs (default 1)"
+ echo " -l list session source files"
echo " -n no build -- test dependencies only"
echo " -o OPTION override session configuration OPTION (via NAME=VAL or NAME)"
echo " -s system build mode: produce output in ISABELLE_HOME"
@@ -65,12 +66,13 @@
declare -a INCLUDE_DIRS=()
declare -a SESSION_GROUPS=()
MAX_JOBS=1
+LIST_FILES=false
NO_BUILD=false
eval "declare -a BUILD_OPTIONS=($ISABELLE_BUILD_OPTIONS)"
SYSTEM_MODE=false
VERBOSE=false
-while getopts "D:abcd:g:j:no:sv" OPT
+while getopts "D:abcd:g:j:lno:sv" OPT
do
case "$OPT" in
D)
@@ -95,6 +97,9 @@
check_number "$OPTARG"
MAX_JOBS="$OPTARG"
;;
+ l)
+ LIST_FILES="true"
+ ;;
n)
NO_BUILD="true"
;;
@@ -131,7 +136,7 @@
"$ISABELLE_TOOL" java isabelle.Build \
"$ALL_SESSIONS" "$BUILD_HEAP" "$CLEAN_BUILD" "$MAX_JOBS" \
- "$NO_BUILD" "$SYSTEM_MODE" "$VERBOSE" \
+ "$LIST_FILES" "$NO_BUILD" "$SYSTEM_MODE" "$VERBOSE" \
"${SELECT_DIRS[@]}" $'\n' "${INCLUDE_DIRS[@]}" $'\n' \
"${SESSION_GROUPS[@]}" $'\n' "${BUILD_OPTIONS[@]}" $'\n' "$@"
RC="$?"
--- a/src/HOL/Boogie/Boogie.thy Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/Boogie/Boogie.thy Thu Aug 23 15:44:47 2012 +0200
@@ -7,7 +7,10 @@
theory Boogie
imports Word
keywords
- "boogie_open" "boogie_end" :: thy_decl and "boogie_vc" :: thy_goal and "boogie_status" :: diag
+ "boogie_open" :: thy_load and
+ "boogie_end" :: thy_decl and
+ "boogie_vc" :: thy_goal and
+ "boogie_status" :: diag
begin
text {*
@@ -64,7 +67,7 @@
Proving Boogie-generated verification conditions happens inside
a Boogie environment:
- boogie_open "b2i file without extension"
+ boogie_open "b2i file with extension"
boogie_vc "verification condition 1" ...
boogie_vc "verification condition 2" ...
boogie_vc "verification condition 3" ...
--- a/src/HOL/Boogie/Examples/Boogie_Dijkstra.thy Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/Boogie/Examples/Boogie_Dijkstra.thy Thu Aug 23 15:44:47 2012 +0200
@@ -6,7 +6,6 @@
theory Boogie_Dijkstra
imports Boogie
-uses ("Boogie_Dijkstra.b2i")
begin
text {*
--- a/src/HOL/Boogie/Examples/Boogie_Max.thy Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/Boogie/Examples/Boogie_Max.thy Thu Aug 23 15:44:47 2012 +0200
@@ -6,7 +6,6 @@
theory Boogie_Max
imports Boogie
-uses ("Boogie_Max.b2i")
begin
text {*
--- a/src/HOL/Boogie/Examples/Boogie_Max_Stepwise.thy Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/Boogie/Examples/Boogie_Max_Stepwise.thy Thu Aug 23 15:44:47 2012 +0200
@@ -6,7 +6,6 @@
theory Boogie_Max_Stepwise
imports Boogie
-uses ("Boogie_Max.b2i")
begin
text {*
--- a/src/HOL/Boogie/Examples/VCC_Max.thy Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/Boogie/Examples/VCC_Max.thy Thu Aug 23 15:44:47 2012 +0200
@@ -6,7 +6,6 @@
theory VCC_Max
imports Boogie
-uses ("VCC_Max.b2i")
begin
text {*
--- a/src/HOL/Boogie/Tools/boogie_commands.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/Boogie/Tools/boogie_commands.ML Thu Aug 23 15:44:47 2012 +0200
@@ -14,17 +14,16 @@
(* commands *)
-fun boogie_open ((quiet, base_name), offsets) thy =
+fun boogie_open ((quiet, files), offsets) thy =
let
+ val ([{src_path = path, text, ...}], thy') = files thy
+
val ext = "b2i"
- fun check_ext path = snd (Path.split_ext path) = ext orelse
+ val _ = snd (Path.split_ext path) = ext orelse
error ("Bad file ending of file " ^ Path.print path ^ ", " ^
"expected file ending " ^ quote ext)
- val base_path = Path.explode base_name |> tap check_ext
- val (text, thy') = Thy_Load.use_file base_path thy
-
- val _ = Boogie_VCs.is_closed thy' orelse
+ val _ = Boogie_VCs.is_closed thy orelse
error ("Found the beginning of a new Boogie environment, " ^
"but another Boogie environment is still open.")
in
@@ -44,7 +43,7 @@
fun get_vc thy vc_name =
(case Boogie_VCs.lookup thy vc_name of
SOME vc => vc
- | NONE =>
+ | NONE =>
(case AList.lookup (op =) (Boogie_VCs.state_of thy) vc_name of
SOME Boogie_VCs.Proved => error ("The verification condition " ^
quote vc_name ^ " has already been proved.")
@@ -258,7 +257,7 @@
val unproved = map_filter not_proved (Boogie_VCs.state_of thy)
in
if null unproved then Boogie_VCs.close thy
- else error (Pretty.string_of (Pretty.big_list
+ else error (Pretty.string_of (Pretty.big_list
"The following verification conditions have not been proved:"
(map Pretty.str unproved)))
end
@@ -277,7 +276,7 @@
val _ =
Outer_Syntax.command @{command_spec "boogie_open"}
"open a new Boogie environment and load a Boogie-generated .b2i file"
- (scan_opt "quiet" -- Parse.name -- vc_offsets >>
+ (scan_opt "quiet" -- Thy_Load.provide_parse_files "boogie_open" -- vc_offsets >>
(Toplevel.theory o boogie_open))
@@ -350,7 +349,7 @@
val setup = Theory.at_end (fn thy =>
let
val _ = Boogie_VCs.is_closed thy
- orelse error ("Found the end of the theory, " ^
+ orelse error ("Found the end of the theory, " ^
"but the last Boogie environment is still open.")
in NONE end)
--- a/src/HOL/Quickcheck_Narrowing.thy Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/Quickcheck_Narrowing.thy Thu Aug 23 15:44:47 2012 +0200
@@ -5,9 +5,6 @@
theory Quickcheck_Narrowing
imports Quickcheck_Exhaustive
keywords "find_unused_assms" :: diag
-uses (* FIXME session files *)
- ("Tools/Quickcheck/PNF_Narrowing_Engine.hs")
- ("Tools/Quickcheck/Narrowing_Engine.hs")
begin
subsection {* Counterexample generator *}
--- a/src/HOL/ROOT Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/ROOT Thu Aug 23 15:44:47 2012 +0200
@@ -2,7 +2,11 @@
description {* Classical Higher-order Logic *}
options [document_graph]
theories Complex_Main
- files "document/root.bib" "document/root.tex"
+ files
+ "Tools/Quickcheck/Narrowing_Engine.hs"
+ "Tools/Quickcheck/PNF_Narrowing_Engine.hs"
+ "document/root.bib"
+ "document/root.tex"
session "HOL-Base" = Pure +
description {* Raw HOL base, with minimal tools *}
@@ -18,11 +22,17 @@
description {* HOL side-entry for Main only, without Complex_Main *}
options [document = false]
theories Main
+ files
+ "Tools/Quickcheck/Narrowing_Engine.hs"
+ "Tools/Quickcheck/PNF_Narrowing_Engine.hs"
session "HOL-Proofs" = Pure +
description {* HOL-Main with explicit proof terms *}
options [document = false, proofs = 2, parallel_proofs = 0]
theories Main
+ files
+ "Tools/Quickcheck/Narrowing_Engine.hs"
+ "Tools/Quickcheck/PNF_Narrowing_Engine.hs"
session "HOL-Library" in Library = HOL +
description {* Classical Higher-order Logic -- batteries included *}
--- a/src/HOL/SPARK/SPARK_Setup.thy Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/SPARK/SPARK_Setup.thy Thu Aug 23 15:44:47 2012 +0200
@@ -8,6 +8,8 @@
theory SPARK_Setup
imports Word
keywords
+ "spark_open_vcg" :: thy_load ("vcg", "fdl", "rls") and
+ "spark_open_siv" :: thy_load ("siv", "fdl", "rls") and
"spark_open" "spark_proof_functions" "spark_types" "spark_end" :: thy_decl and
"spark_vc" :: thy_goal and "spark_status" :: diag
begin
--- a/src/HOL/SPARK/Tools/spark_commands.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/SPARK/Tools/spark_commands.ML Thu Aug 23 15:44:47 2012 +0200
@@ -13,8 +13,22 @@
structure SPARK_Commands: SPARK_COMMANDS =
struct
-(* FIXME proper Thy_Load.use_file, also for fdl_path and rls_path (!?) *)
-fun spark_open (vc_name, prfx) thy =
+fun spark_open header (prfx, files) thy =
+ let
+ val ([{src_path, text = vc_text, pos = vc_pos, ...},
+ {text = fdl_text, pos = fdl_pos, ...},
+ {text = rls_text, pos = rls_pos, ...}], thy') = files thy;
+ val base = fst (Path.split_ext (File.full_path (Thy_Load.master_directory thy') src_path));
+ in
+ SPARK_VCs.set_vcs
+ (snd (Fdl_Parser.parse_declarations fdl_pos fdl_text))
+ (Fdl_Parser.parse_rules rls_pos rls_text)
+ (snd (Fdl_Parser.parse_vcs header vc_pos vc_text))
+ base prfx thy'
+ end;
+
+(* FIXME *)
+fun spark_open_old (vc_name, prfx) thy =
let
val ((vc_path, vc_id), vc_text) = Thy_Load.load_file thy (Path.explode vc_name);
val (base, header) =
@@ -107,7 +121,19 @@
val _ =
Outer_Syntax.command @{command_spec "spark_open"}
"open a new SPARK environment and load a SPARK-generated .vcg or .siv file"
- (Parse.name -- Parse.parname >> (Toplevel.theory o spark_open));
+ (Parse.name -- Parse.parname >> (Toplevel.theory o spark_open_old));
+
+val _ =
+ Outer_Syntax.command @{command_spec "spark_open_vcg"}
+ "open a new SPARK environment and load a SPARK-generated .vcg file"
+ (Parse.parname -- Thy_Load.provide_parse_files "spark_open_vcg"
+ >> (Toplevel.theory o spark_open Fdl_Lexer.vcg_header));
+
+val _ =
+ Outer_Syntax.command @{command_spec "spark_open_siv"}
+ "open a new SPARK environment and load a SPARK-generated .siv file"
+ (Parse.parname -- Thy_Load.provide_parse_files "spark_open_siv"
+ >> (Toplevel.theory o spark_open Fdl_Lexer.siv_header));
val pfun_type = Scan.option
(Args.parens (Parse.list1 Parse.name) --| Args.colon -- Parse.name);
--- a/src/HOL/Tools/ATP/atp_util.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/Tools/ATP/atp_util.ML Thu Aug 23 15:44:47 2012 +0200
@@ -84,7 +84,7 @@
| strip_c_style_comment (_ :: cs) accum = strip_c_style_comment cs accum
and strip_spaces_in_list _ [] accum = accum
| strip_spaces_in_list true (#"%" :: cs) accum =
- strip_spaces_in_list true (cs |> chop_while (not_equal #"\n") |> snd)
+ strip_spaces_in_list true (cs |> take_prefix (not_equal #"\n") |> snd)
accum
| strip_spaces_in_list true (#"/" :: #"*" :: cs) accum =
strip_c_style_comment cs accum
--- a/src/HOL/Tools/Quickcheck/narrowing_generators.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/Tools/Quickcheck/narrowing_generators.ML Thu Aug 23 15:44:47 2012 +0200
@@ -202,12 +202,10 @@
(** invocation of Haskell interpreter **)
val narrowing_engine =
- Context.>>> (Context.map_theory_result
- (Thy_Load.use_file (Path.explode "Tools/Quickcheck/Narrowing_Engine.hs")))
+ File.read (Path.explode "~~/src/HOL/Tools/Quickcheck/Narrowing_Engine.hs")
val pnf_narrowing_engine =
- Context.>>> (Context.map_theory_result
- (Thy_Load.use_file (Path.explode "Tools/Quickcheck/PNF_Narrowing_Engine.hs")))
+ File.read (Path.explode "~~/src/HOL/Tools/Quickcheck/PNF_Narrowing_Engine.hs")
fun exec verbose code =
ML_Context.exec (fn () => Secure.use_text ML_Env.local_context (0, "generated code") verbose code)
--- a/src/HOL/Tools/SMT/smt_setup_solvers.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/Tools/SMT/smt_setup_solvers.ML Thu Aug 23 15:44:47 2012 +0200
@@ -51,7 +51,7 @@
let
val empty_line = (fn "" => true | _ => false)
val split_first = (fn [] => ("", []) | l :: ls => (l, ls))
- val (l, ls) = split_first (snd (chop_while empty_line lines))
+ val (l, ls) = split_first (snd (take_prefix empty_line lines))
in (test_outcome solver_name l, ls) end
--- a/src/HOL/Tools/SMT/smt_solver.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/Tools/SMT/smt_solver.ML Thu Aug 23 15:44:47 2012 +0200
@@ -96,7 +96,7 @@
SMT_Config.with_timeout ctxt (run ctxt name mk_cmd) input
val _ = SMT_Config.trace_msg ctxt (pretty "Solver:") err
- val ls = rev (snd (chop_while (equal "") (rev res)))
+ val ls = fst (take_suffix (equal "") res)
val _ = SMT_Config.trace_msg ctxt (pretty "Result:") ls
val _ = return_code <> 0 andalso
--- a/src/HOL/Tools/refute.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/HOL/Tools/refute.ML Thu Aug 23 15:44:47 2012 +0200
@@ -2772,7 +2772,7 @@
(* go back up the stack until we find a level where we can go *)
(* to the next sibling node *)
let
- val offsets' = snd (chop_while
+ val offsets' = snd (take_prefix
(fn off' => off' mod size_elem = size_elem - 1) offsets)
in
case offsets' of
--- a/src/Pure/Isar/code.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/Pure/Isar/code.ML Thu Aug 23 15:44:47 2012 +0200
@@ -1017,7 +1017,7 @@
val c = const_eqn thy thm;
fun update_subsume thy (thm, proper) eqns =
let
- val args_of = snd o chop_while is_Var o rev o snd o strip_comb
+ val args_of = snd o take_prefix is_Var o rev o snd o strip_comb
o map_types Type.strip_sorts o fst o Logic.dest_equals o Thm.plain_prop_of;
val args = args_of thm;
val incr_idx = Logic.incr_indexes ([], Thm.maxidx_of thm + 1);
--- a/src/Pure/Isar/keyword.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/Pure/Isar/keyword.ML Thu Aug 23 15:44:47 2012 +0200
@@ -47,9 +47,6 @@
type spec = (string * string list) * string list
val spec: spec -> T
val command_spec: (string * spec) * Position.T -> (string * T) * Position.T
- type keywords
- val lexicons_of: keywords -> Scan.lexicon * Scan.lexicon
- val get_keywords: unit -> keywords
val get_lexicons: unit -> Scan.lexicon * Scan.lexicon
val is_keyword: string -> bool
val command_keyword: string -> T option
@@ -57,8 +54,6 @@
val command_tags: string -> string list
val dest: unit -> string list * string list
val status: unit -> unit
- val thy_load_commands: keywords -> (string * string list) list
- val define_keywords: string * T option -> keywords -> keywords
val define: string * T option -> unit
val is_diag: string -> bool
val is_control: string -> bool
@@ -172,11 +167,6 @@
fun make_keywords (lexicons, commands) =
Keywords {lexicons = lexicons, commands = commands};
-fun map_keywords f (Keywords {lexicons, commands}) =
- make_keywords (f (lexicons, commands));
-
-fun lexicons_of (Keywords {lexicons, ...}) = lexicons;
-
local
val global_keywords =
@@ -186,11 +176,13 @@
fun get_keywords () = ! global_keywords;
-fun change_keywords f = CRITICAL (fn () => Unsynchronized.change global_keywords f);
+fun change_keywords f = CRITICAL (fn () =>
+ Unsynchronized.change global_keywords
+ (fn Keywords {lexicons, commands} => make_keywords (f (lexicons, commands))));
end;
-fun get_lexicons () = lexicons_of (get_keywords ());
+fun get_lexicons () = get_keywords () |> (fn Keywords {lexicons, ...} => lexicons);
fun get_commands () = get_keywords () |> (fn Keywords {commands, ...} => commands);
@@ -221,14 +213,9 @@
in () end;
-fun thy_load_commands (Keywords {commands, ...}) =
- Symtab.fold (fn (name, Keyword {kind, files, ...}) =>
- kind = kind_of thy_load ? cons (name, files)) commands [];
-
-
(* define *)
-fun define_keywords (name, opt_kind) = map_keywords (fn ((minor, major), commands) =>
+fun define (name, opt_kind) = change_keywords (fn ((minor, major), commands) =>
(case opt_kind of
NONE =>
let
@@ -240,8 +227,6 @@
val commands' = Symtab.update (name, kind) commands;
in ((minor, major'), commands') end));
-val define = change_keywords o define_keywords;
-
(* command categories *)
--- a/src/Pure/Isar/token.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/Pure/Isar/token.ML Thu Aug 23 15:44:47 2012 +0200
@@ -10,10 +10,10 @@
Command | Keyword | Ident | LongIdent | SymIdent | Var | TypeIdent | TypeVar |
Nat | Float | String | AltString | Verbatim | Space | Comment | InternalValue |
Error of string | Sync | EOF
- type files = Path.T * (string * Position.T) list
+ type file = {src_path: Path.T, text: string, pos: Position.T}
datatype value =
Text of string | Typ of typ | Term of term | Fact of thm list |
- Attribute of morphism -> attribute | Files of files
+ Attribute of morphism -> attribute | Files of file list
type T
val str_of_kind: kind -> string
val position_of: T -> Position.T
@@ -44,8 +44,8 @@
val content_of: T -> string
val unparse: T -> string
val text_of: T -> string * string
- val get_files: T -> files option
- val put_files: files -> T -> T
+ val get_files: T -> file list option
+ val put_files: file list -> T -> T
val get_value: T -> value option
val map_value: (value -> value) -> T -> T
val mk_text: string -> T
@@ -78,7 +78,7 @@
args.ML). Note that an assignable ref designates an intermediate
state of internalization -- it is NOT meant to persist.*)
-type files = Path.T * (string * Position.T) list; (*master dir, multiple file content*)
+type file = {src_path: Path.T, text: string, pos: Position.T};
datatype value =
Text of string |
@@ -86,7 +86,7 @@
Term of term |
Fact of thm list |
Attribute of morphism -> attribute |
- Files of files;
+ Files of file list;
datatype slot =
Slot |
--- a/src/Pure/ProofGeneral/pgip_isabelle.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/Pure/ProofGeneral/pgip_isabelle.ML Thu Aug 23 15:44:47 2012 +0200
@@ -79,7 +79,7 @@
NONE => (NONE, NONE)
| SOME fname =>
let val path = Path.explode fname in
- if can (Thy_Load.check_file Path.current) path
+ if File.exists path
then (SOME (PgipTypes.pgipurl_of_path path), NONE)
else (NONE, SOME fname)
end);
--- a/src/Pure/ProofGeneral/proof_general_pgip.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/Pure/ProofGeneral/proof_general_pgip.ML Thu Aug 23 15:44:47 2012 +0200
@@ -594,14 +594,14 @@
fun filerefs f =
let val thy = thy_name f
- val filerefs = map #1 (#uses (Thy_Load.check_thy [] (Path.dir f) thy))
+ val filerefs = map #1 (#uses (Thy_Load.check_thy (Path.dir f) thy))
in
issue_pgip (Setrefs {url=url, thyname=NONE, objtype=SOME PgipTypes.ObjFile,
name=NONE, idtables=[], fileurls=filerefs})
end
fun thyrefs thy =
- let val thyrefs = #imports (Thy_Load.check_thy [] Path.current thy)
+ let val thyrefs = #imports (Thy_Load.check_thy Path.current thy)
in
issue_pgip (Setrefs {url=url, thyname=thyname, objtype=SOME PgipTypes.ObjTheory,
name=NONE, idtables=[{context=NONE, objtype=PgipTypes.ObjTheory,
--- a/src/Pure/System/build.scala Thu Aug 23 15:32:22 2012 +0200
+++ b/src/Pure/System/build.scala Thu Aug 23 15:44:47 2012 +0200
@@ -328,7 +328,7 @@
def sources(name: String): List[SHA1.Digest] = deps(name).sources.map(_._2)
}
- def dependencies(verbose: Boolean, tree: Session_Tree): Deps =
+ def dependencies(verbose: Boolean, list_files: Boolean, tree: Session_Tree): Deps =
Deps((Map.empty[String, Session_Content] /: tree.topological_order)(
{ case (deps, (name, info)) =>
val (preloaded, parent_syntax, parent_errors) =
@@ -341,7 +341,7 @@
}
val thy_info = new Thy_Info(new Thy_Load(preloaded, parent_syntax))
- if (verbose) {
+ if (verbose || list_files) {
val groups =
if (info.groups.isEmpty) ""
else info.groups.mkString(" (", " ", ")")
@@ -357,11 +357,15 @@
val syntax = thy_deps.make_syntax
val all_files =
- thy_deps.deps.map({ case (n, h) =>
- val thy = Path.explode(n.node).expand
- val uses = h.uses.map(p => (Path.explode(n.dir) + Path.explode(p._1)).expand)
+ (thy_deps.deps.map({ case (n, h) =>
+ val thy = Path.explode(n.node)
+ val uses = h.uses.map(p => Path.explode(n.dir) + Path.explode(p._1))
thy :: uses
- }).flatten ::: info.files.map(file => info.dir + file)
+ }).flatten ::: info.files.map(file => info.dir + file)).map(_.expand)
+
+ if (list_files)
+ echo(cat_lines(all_files.map(_.implode).sorted.map(" " + _)))
+
val sources =
try { all_files.map(p => (p, SHA1.digest(p))) }
catch {
@@ -378,7 +382,7 @@
{
val (_, tree) =
find_sessions(Options.init(), dirs.map((false, _))).required(false, Nil, List(session))
- dependencies(false, tree)(session)
+ dependencies(false, false, tree)(session)
}
def outer_syntax(session: String): Outer_Syntax =
@@ -531,6 +535,7 @@
more_dirs: List[(Boolean, Path)] = Nil,
session_groups: List[String] = Nil,
max_jobs: Int = 1,
+ list_files: Boolean = false,
no_build: Boolean = false,
build_options: List[String] = Nil,
system_mode: Boolean = false,
@@ -540,7 +545,7 @@
val options = (Options.init() /: build_options)(_ + _)
val (descendants, tree) =
find_sessions(options, more_dirs).required(all_sessions, session_groups, sessions)
- val deps = dependencies(verbose, tree)
+ val deps = dependencies(verbose, list_files, tree)
val queue = Queue(tree)
def make_stamp(name: String): String =
@@ -692,6 +697,7 @@
Properties.Value.Boolean(build_heap) ::
Properties.Value.Boolean(clean_build) ::
Properties.Value.Int(max_jobs) ::
+ Properties.Value.Boolean(list_files) ::
Properties.Value.Boolean(no_build) ::
Properties.Value.Boolean(system_mode) ::
Properties.Value.Boolean(verbose) ::
@@ -700,7 +706,7 @@
select_dirs.map(d => (true, Path.explode(d))) :::
include_dirs.map(d => (false, Path.explode(d)))
build(all_sessions, build_heap, clean_build, dirs, session_groups,
- max_jobs, no_build, build_options, system_mode, verbose, sessions)
+ max_jobs, list_files, no_build, build_options, system_mode, verbose, sessions)
case _ => error("Bad arguments:\n" + cat_lines(args))
}
}
--- a/src/Pure/Thy/thy_info.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/Pure/Thy/thy_info.ML Thu Aug 23 15:44:47 2012 +0200
@@ -236,16 +236,16 @@
fun commit () = update_thy deps theory;
in (theory, present, commit) end;
-fun check_deps base_keywords dir name =
+fun check_deps dir name =
(case lookup_deps name of
SOME NONE => (true, NONE, get_imports name, [], [])
| NONE =>
- let val {master, text, imports, keywords, uses} = Thy_Load.check_thy base_keywords dir name
+ let val {master, text, imports, keywords, uses} = Thy_Load.check_thy dir name
in (false, SOME (make_deps master imports, text), imports, uses, keywords) end
| SOME (SOME {master, ...}) =>
let
val {master = master', text = text', imports = imports', uses = uses', keywords = keywords'}
- = Thy_Load.check_thy base_keywords dir name;
+ = Thy_Load.check_thy dir name;
val deps' = SOME (make_deps master' imports', text');
val current =
#2 master = #2 master' andalso
@@ -256,29 +256,29 @@
in
-fun require_thys initiators dir strs result =
- fold_map (require_thy initiators dir) strs result |>> forall I
-and require_thy initiators dir str (base_keywords, tasks) =
+fun require_thys initiators dir strs tasks =
+ fold_map (require_thy initiators dir) strs tasks |>> forall I
+and require_thy initiators dir str tasks =
let
val path = Path.expand (Path.explode str);
val name = Path.implode (Path.base path);
in
(case try (Graph.get_node tasks) name of
- SOME task => (task_finished task, (base_keywords, tasks))
+ SOME task => (task_finished task, tasks)
| NONE =>
let
val dir' = Path.append dir (Path.dir path);
val _ = member (op =) initiators name andalso error (cycle_msg initiators);
- val (current, deps, imports, uses, keywords) = check_deps base_keywords dir' name
+ val (current, deps, imports, uses, keywords) = check_deps dir' name
handle ERROR msg => cat_error msg
(loader_msg "the error(s) above occurred while examining theory" [name] ^
required_by "\n" initiators);
val parents = map base_name imports;
- val (parents_current, (base_keywords', tasks')) =
+ val (parents_current, tasks') =
require_thys (name :: initiators)
- (Path.append dir (master_dir (Option.map #1 deps))) imports (base_keywords, tasks);
+ (Path.append dir (master_dir (Option.map #1 deps))) imports tasks;
val all_current = current andalso parents_current;
val task =
@@ -292,9 +292,8 @@
val load = load_thy initiators update_time dep text name uses keywords;
in Task (parents, load) end);
- val base_keywords'' = keywords @ base_keywords';
val tasks'' = new_entry name parents task tasks';
- in (all_current, (base_keywords'', tasks'')) end)
+ in (all_current, tasks'') end)
end;
end;
@@ -303,7 +302,7 @@
(* use_thy *)
fun use_thys_wrt dir arg =
- schedule_tasks (snd (snd (require_thys [] dir arg ([], Graph.empty))));
+ schedule_tasks (snd (require_thys [] dir arg Graph.empty));
val use_thys = use_thys_wrt Path.current;
val use_thy = use_thys o single;
@@ -326,7 +325,7 @@
fun register_thy theory =
let
val name = Context.theory_name theory;
- val {master, ...} = Thy_Load.check_thy [] (Thy_Load.master_directory theory) name;
+ val {master, ...} = Thy_Load.check_thy (Thy_Load.master_directory theory) name;
val imports = Thy_Load.imports_of theory;
in
NAMED_CRITICAL "Thy_Info" (fn () =>
--- a/src/Pure/Thy/thy_load.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/Pure/Thy/thy_load.ML Thu Aug 23 15:44:47 2012 +0200
@@ -8,12 +8,13 @@
sig
val master_directory: theory -> Path.T
val imports_of: theory -> string list
- val provide: Path.T * (Path.T * SHA1.digest) -> theory -> theory
- val check_file: Path.T -> Path.T -> Path.T
val thy_path: Path.T -> Path.T
- val check_thy: Thy_Header.keywords -> Path.T -> string ->
+ val parse_files: string -> (theory -> Token.file list) parser
+ val check_thy: Path.T -> string ->
{master: Path.T * SHA1.digest, text: string, imports: string list,
uses: (Path.T * bool) list, keywords: Thy_Header.keywords}
+ val provide: Path.T * SHA1.digest -> theory -> theory
+ val provide_parse_files: string -> (theory -> Token.file list * theory) parser
val load_file: theory -> Path.T -> (Path.T * SHA1.digest) * string
val use_file: Path.T -> theory -> string * theory
val loaded_files: theory -> Path.T list
@@ -23,7 +24,6 @@
val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory
val load_thy: int -> Path.T -> Thy_Header.header -> Position.T -> string ->
theory list -> theory * unit future
- val parse_files: string -> (theory -> Token.files) parser
val set_master_path: Path.T -> unit
val get_master_path: unit -> Path.T
end;
@@ -36,7 +36,7 @@
type files =
{master_dir: Path.T, (*master directory of theory source*)
imports: string list, (*source specification of imports*)
- provided: (Path.T * (Path.T * SHA1.digest)) list}; (*source path, physical path, digest*)
+ provided: (Path.T * SHA1.digest) list}; (*source path, digest*)
fun make_files (master_dir, imports, provided): files =
{master_dir = master_dir, imports = imports, provided = provided};
@@ -59,17 +59,29 @@
fun put_deps master_dir imports = map_files (fn _ => (master_dir, imports, []));
-fun provide (src_path, path_id) =
- map_files (fn (master_dir, imports, provided) =>
- if AList.defined (op =) provided src_path then
- error ("Duplicate use of source file: " ^ Path.print src_path)
- else (master_dir, imports, (src_path, path_id) :: provided));
-
(* inlined files *)
fun check_file dir file = File.check_file (File.full_path dir file);
+fun make_file dir file =
+ let val full_path = check_file dir file
+ in {src_path = file, text = File.read full_path, pos = Path.position full_path} end;
+
+fun read_files cmd dir path =
+ let
+ val paths =
+ (case Keyword.command_files cmd of
+ [] => [path]
+ | exts => map (fn ext => Path.ext ext path) exts);
+ in map (make_file dir) paths end;
+
+fun parse_files cmd =
+ Scan.ahead Parse.not_eof -- Parse.path >> (fn (tok, name) => fn thy =>
+ (case Token.get_files tok of
+ SOME files => files
+ | NONE => read_files cmd (master_directory thy) (Path.explode name)));
+
local
fun clean ((i1, t1) :: (i2, t2) :: toks) =
@@ -89,42 +101,8 @@
handle ERROR msg => error (msg ^ Token.pos_of tok)
else NONE);
-fun command_files path exts =
- if null exts then [path]
- else map (fn ext => Path.ext ext path) exts;
-
in
-fun find_files syntax text =
- let val thy_load_commands = Keyword.thy_load_commands syntax in
- if exists (fn (cmd, _) => String.isSubstring cmd text) thy_load_commands then
- Thy_Syntax.parse_tokens (Keyword.lexicons_of syntax) Position.none text
- |> Thy_Syntax.parse_spans
- |> maps
- (fn Thy_Syntax.Span (Thy_Syntax.Command (cmd, _), toks) =>
- (case AList.lookup (op =) thy_load_commands cmd of
- SOME exts =>
- (case find_file toks of
- SOME (_, path) => command_files path exts
- | NONE => [])
- | NONE => [])
- | _ => [])
- else []
- end;
-
-fun read_files cmd dir path =
- let
- val files =
- command_files path (Keyword.command_files cmd)
- |> map (check_file dir #> (fn p => (File.read p, Path.position p)));
- in (dir, files) end;
-
-fun parse_files cmd =
- Scan.ahead Parse.not_eof -- Parse.path >> (fn (tok, name) => fn thy =>
- (case Token.get_files tok of
- SOME files => files
- | NONE => read_files cmd (master_directory thy) (Path.explode name)));
-
fun resolve_files master_dir span =
(case span of
Thy_Syntax.Span (Thy_Syntax.Command (cmd, pos), toks) =>
@@ -147,7 +125,7 @@
val thy_path = Path.ext "thy";
-fun check_thy base_keywords dir thy_name =
+fun check_thy dir thy_name =
let
val path = thy_path (Path.basic thy_name);
val master_file = check_file dir path;
@@ -156,19 +134,27 @@
val {name, imports, uses, keywords} = Thy_Header.read (Path.position master_file) text;
val _ = thy_name <> name andalso
error ("Bad file name " ^ Path.print path ^ " for theory " ^ quote name);
-
- val syntax =
- fold (Keyword.define_keywords o apsnd (Option.map Keyword.spec))
- (keywords @ base_keywords) (Keyword.get_keywords ());
- val more_uses = map (rpair false) (find_files syntax text);
in
{master = (master_file, SHA1.digest text), text = text,
- imports = imports, uses = uses @ more_uses, keywords = keywords}
+ imports = imports, uses = uses, keywords = keywords}
end;
(* load files *)
+fun provide (src_path, id) =
+ map_files (fn (master_dir, imports, provided) =>
+ if AList.defined (op =) provided src_path then
+ error ("Duplicate use of source file: " ^ Path.print src_path)
+ else (master_dir, imports, (src_path, id) :: provided));
+
+fun provide_parse_files cmd =
+ parse_files cmd >> (fn files => fn thy =>
+ let
+ val fs = files thy;
+ val thy' = fold (fn {src_path, text, ...} => provide (src_path, SHA1.digest text)) fs thy;
+ in (fs, thy') end);
+
fun load_file thy src_path =
let
val full_path = check_file (master_directory thy) src_path;
@@ -178,15 +164,17 @@
fun use_file src_path thy =
let
- val (path_id, text) = load_file thy src_path;
- val thy' = provide (src_path, path_id) thy;
+ val ((_, id), text) = load_file thy src_path;
+ val thy' = provide (src_path, id) thy;
in (text, thy') end;
-val loaded_files = map (#1 o #2) o #provided o Files.get;
+fun loaded_files thy =
+ let val {master_dir, provided, ...} = Files.get thy
+ in map (File.full_path master_dir o #1) provided end;
fun load_current thy =
#provided (Files.get thy) |>
- forall (fn (src_path, (_, id)) =>
+ forall (fn (src_path, id) =>
(case try (load_file thy) src_path of
NONE => false
| SOME ((_, id'), _) => id = id'));
@@ -208,7 +196,7 @@
val _ = eval_file path text;
val _ = Context.>> Local_Theory.propagate_ml_env;
- val provide = provide (src_path, (path, id));
+ val provide = provide (src_path, id);
val _ = Context.>> (Context.mapping provide (Local_Theory.background_theory provide));
in () end;
--- a/src/Pure/library.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/Pure/library.ML Thu Aug 23 15:44:47 2012 +0200
@@ -74,7 +74,6 @@
val take: int -> 'a list -> 'a list
val drop: int -> 'a list -> 'a list
val chop: int -> 'a list -> 'a list * 'a list
- val chop_while: ('a -> bool) -> 'a list -> 'a list * 'a list
val chop_groups: int -> 'a list -> 'a list list
val nth: 'a list -> int -> 'a
val nth_list: 'a list list -> int -> 'a list
@@ -413,10 +412,6 @@
| chop _ [] = ([], [])
| chop n (x :: xs) = chop (n - 1) xs |>> cons x;
-fun chop_while P [] = ([], [])
- | chop_while P (ys as x :: xs) =
- if P x then chop_while P xs |>> cons x else ([], ys);
-
fun chop_groups n list =
(case chop (Int.max (n, 1)) list of
([], _) => []
--- a/src/Pure/pure_syn.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/Pure/pure_syn.ML Thu Aug 23 15:44:47 2012 +0200
@@ -18,15 +18,13 @@
val _ =
Outer_Syntax.command
(("ML_file", Keyword.tag_ml Keyword.thy_load), Position.none) "ML text from file"
- (Scan.ahead Parse.path -- Thy_Load.parse_files "ML_file"
- >> (fn (name, files) => Toplevel.generic_theory (fn gthy =>
+ (Thy_Load.parse_files "ML_file" >> (fn files => Toplevel.generic_theory (fn gthy =>
let
- val src_path = Path.explode name;
- val (dir, [(txt, pos)]) = files (Context.theory_of gthy);
- val provide = Thy_Load.provide (src_path, (File.full_path dir src_path, SHA1.digest txt));
+ val [{src_path, text, pos}] = files (Context.theory_of gthy);
+ val provide = Thy_Load.provide (src_path, SHA1.digest text);
in
gthy
- |> ML_Context.exec (fn () => ML_Context.eval_text true pos txt)
+ |> ML_Context.exec (fn () => ML_Context.eval_text true pos text)
|> Local_Theory.propagate_ml_env
|> Context.mapping provide (Local_Theory.background_theory provide)
end)));
--- a/src/Tools/case_product.ML Thu Aug 23 15:32:22 2012 +0200
+++ b/src/Tools/case_product.ML Thu Aug 23 15:44:47 2012 +0200
@@ -48,8 +48,8 @@
val concl = Thm.concl_of thm1
fun is_consumes t = not (Logic.strip_assums_concl t aconv concl)
- val (p_cons1, p_cases1) = chop_while is_consumes (Thm.prems_of thm1)
- val (p_cons2, p_cases2) = chop_while is_consumes (Thm.prems_of thm2)
+ val (p_cons1, p_cases1) = take_prefix is_consumes (Thm.prems_of thm1)
+ val (p_cons2, p_cases2) = take_prefix is_consumes (Thm.prems_of thm2)
val p_cases_prod = map (fn p1 => map (fn p2 =>
let