merged
authorwenzelm
Thu Aug 23 15:44:47 2012 +0200 (2012-08-23)
changeset 489101c8c15c30356
parent 48909 b2dea007e55e
parent 48908 713f24d7a40f
child 48911 5debc3e4fa81
merged
     1.1 --- a/doc-src/System/Thy/Sessions.thy	Thu Aug 23 15:32:22 2012 +0200
     1.2 +++ b/doc-src/System/Thy/Sessions.thy	Thu Aug 23 15:44:47 2012 +0200
     1.3 @@ -189,6 +189,7 @@
     1.4      -d DIR       include session directory
     1.5      -g NAME      select session group NAME
     1.6      -j INT       maximum number of parallel jobs (default 1)
     1.7 +    -l           list session source files
     1.8      -n           no build -- test dependencies only
     1.9      -o OPTION    override session configuration OPTION
    1.10                   (via NAME=VAL or NAME)
    1.11 @@ -265,7 +266,9 @@
    1.12    @{setting ISABELLE_OUTPUT} (which is normally in @{setting
    1.13    ISABELLE_HOME_USER}, i.e.\ the user's home directory).
    1.14  
    1.15 -  \medskip Option @{verbatim "-v"} enables verbose mode.
    1.16 +  \medskip Option @{verbatim "-v"} increases the general level of
    1.17 +  verbosity.  Option @{verbatim "-l"} lists the source files that
    1.18 +  contribute to a session.
    1.19  *}
    1.20  
    1.21  subsubsection {* Examples *}
     2.1 --- a/doc-src/System/Thy/document/Sessions.tex	Thu Aug 23 15:32:22 2012 +0200
     2.2 +++ b/doc-src/System/Thy/document/Sessions.tex	Thu Aug 23 15:44:47 2012 +0200
     2.3 @@ -302,6 +302,7 @@
     2.4      -d DIR       include session directory
     2.5      -g NAME      select session group NAME
     2.6      -j INT       maximum number of parallel jobs (default 1)
     2.7 +    -l           list session source files
     2.8      -n           no build -- test dependencies only
     2.9      -o OPTION    override session configuration OPTION
    2.10                   (via NAME=VAL or NAME)
    2.11 @@ -373,7 +374,9 @@
    2.12    \verb|$ISABELLE_HOME/heaps| instead of the default location
    2.13    \hyperlink{setting.ISABELLE-OUTPUT}{\mbox{\isa{\isatt{ISABELLE{\isaliteral{5F}{\isacharunderscore}}OUTPUT}}}} (which is normally in \hyperlink{setting.ISABELLE-HOME-USER}{\mbox{\isa{\isatt{ISABELLE{\isaliteral{5F}{\isacharunderscore}}HOME{\isaliteral{5F}{\isacharunderscore}}USER}}}}, i.e.\ the user's home directory).
    2.14  
    2.15 -  \medskip Option \verb|-v| enables verbose mode.%
    2.16 +  \medskip Option \verb|-v| increases the general level of
    2.17 +  verbosity.  Option \verb|-l| lists the source files that
    2.18 +  contribute to a session.%
    2.19  \end{isamarkuptext}%
    2.20  \isamarkuptrue%
    2.21  %
     3.1 --- a/doc-src/antiquote_setup.ML	Thu Aug 23 15:32:22 2012 +0200
     3.2 +++ b/doc-src/antiquote_setup.ML	Thu Aug 23 15:44:47 2012 +0200
     3.3 @@ -135,7 +135,7 @@
     3.4  val thy_file_setup =
     3.5    Thy_Output.antiquotation @{binding thy_file} (Scan.lift Args.name)
     3.6      (fn {context = ctxt, ...} =>
     3.7 -      fn name => (Thy_Load.check_thy [] Path.current name; Thy_Output.output ctxt [Pretty.str name]));
     3.8 +      fn name => (Thy_Load.check_thy Path.current name; Thy_Output.output ctxt [Pretty.str name]));
     3.9  
    3.10  
    3.11  (* Isabelle/Isar entities (with index) *)
     4.1 --- a/etc/isar-keywords.el	Thu Aug 23 15:32:22 2012 +0200
     4.2 +++ b/etc/isar-keywords.el	Thu Aug 23 15:44:47 2012 +0200
     4.3 @@ -238,6 +238,8 @@
     4.4      "sorry"
     4.5      "spark_end"
     4.6      "spark_open"
     4.7 +    "spark_open_siv"
     4.8 +    "spark_open_vcg"
     4.9      "spark_proof_functions"
    4.10      "spark_status"
    4.11      "spark_types"
    4.12 @@ -543,6 +545,8 @@
    4.13      "sledgehammer_params"
    4.14      "spark_end"
    4.15      "spark_open"
    4.16 +    "spark_open_siv"
    4.17 +    "spark_open_vcg"
    4.18      "spark_proof_functions"
    4.19      "spark_types"
    4.20      "statespace"
     5.1 --- a/lib/Tools/build	Thu Aug 23 15:32:22 2012 +0200
     5.2 +++ b/lib/Tools/build	Thu Aug 23 15:44:47 2012 +0200
     5.3 @@ -33,6 +33,7 @@
     5.4    echo "    -d DIR       include session directory"
     5.5    echo "    -g NAME      select session group NAME"
     5.6    echo "    -j INT       maximum number of parallel jobs (default 1)"
     5.7 +  echo "    -l           list session source files"
     5.8    echo "    -n           no build -- test dependencies only"
     5.9    echo "    -o OPTION    override session configuration OPTION (via NAME=VAL or NAME)"
    5.10    echo "    -s           system build mode: produce output in ISABELLE_HOME"
    5.11 @@ -65,12 +66,13 @@
    5.12  declare -a INCLUDE_DIRS=()
    5.13  declare -a SESSION_GROUPS=()
    5.14  MAX_JOBS=1
    5.15 +LIST_FILES=false
    5.16  NO_BUILD=false
    5.17  eval "declare -a BUILD_OPTIONS=($ISABELLE_BUILD_OPTIONS)"
    5.18  SYSTEM_MODE=false
    5.19  VERBOSE=false
    5.20  
    5.21 -while getopts "D:abcd:g:j:no:sv" OPT
    5.22 +while getopts "D:abcd:g:j:lno:sv" OPT
    5.23  do
    5.24    case "$OPT" in
    5.25      D)
    5.26 @@ -95,6 +97,9 @@
    5.27        check_number "$OPTARG"
    5.28        MAX_JOBS="$OPTARG"
    5.29        ;;
    5.30 +    l)
    5.31 +      LIST_FILES="true"
    5.32 +      ;;
    5.33      n)
    5.34        NO_BUILD="true"
    5.35        ;;
    5.36 @@ -131,7 +136,7 @@
    5.37  
    5.38  "$ISABELLE_TOOL" java isabelle.Build \
    5.39    "$ALL_SESSIONS" "$BUILD_HEAP" "$CLEAN_BUILD" "$MAX_JOBS" \
    5.40 -  "$NO_BUILD" "$SYSTEM_MODE" "$VERBOSE" \
    5.41 +  "$LIST_FILES" "$NO_BUILD" "$SYSTEM_MODE" "$VERBOSE" \
    5.42    "${SELECT_DIRS[@]}" $'\n' "${INCLUDE_DIRS[@]}" $'\n' \
    5.43    "${SESSION_GROUPS[@]}" $'\n' "${BUILD_OPTIONS[@]}" $'\n' "$@"
    5.44  RC="$?"
     6.1 --- a/src/HOL/Boogie/Boogie.thy	Thu Aug 23 15:32:22 2012 +0200
     6.2 +++ b/src/HOL/Boogie/Boogie.thy	Thu Aug 23 15:44:47 2012 +0200
     6.3 @@ -7,7 +7,10 @@
     6.4  theory Boogie
     6.5  imports Word
     6.6  keywords
     6.7 -  "boogie_open" "boogie_end" :: thy_decl and "boogie_vc" :: thy_goal and "boogie_status" :: diag
     6.8 +  "boogie_open" :: thy_load and
     6.9 +  "boogie_end" :: thy_decl and
    6.10 +  "boogie_vc" :: thy_goal and
    6.11 +  "boogie_status" :: diag
    6.12  begin
    6.13  
    6.14  text {*
    6.15 @@ -64,7 +67,7 @@
    6.16  Proving Boogie-generated verification conditions happens inside
    6.17  a Boogie environment:
    6.18  
    6.19 -  boogie_open "b2i file without extension"
    6.20 +  boogie_open "b2i file with extension"
    6.21    boogie_vc "verification condition 1" ...
    6.22    boogie_vc "verification condition 2" ...
    6.23    boogie_vc "verification condition 3" ...
     7.1 --- a/src/HOL/Boogie/Examples/Boogie_Dijkstra.thy	Thu Aug 23 15:32:22 2012 +0200
     7.2 +++ b/src/HOL/Boogie/Examples/Boogie_Dijkstra.thy	Thu Aug 23 15:44:47 2012 +0200
     7.3 @@ -6,7 +6,6 @@
     7.4  
     7.5  theory Boogie_Dijkstra
     7.6  imports Boogie
     7.7 -uses ("Boogie_Dijkstra.b2i")
     7.8  begin
     7.9  
    7.10  text {*
     8.1 --- a/src/HOL/Boogie/Examples/Boogie_Max.thy	Thu Aug 23 15:32:22 2012 +0200
     8.2 +++ b/src/HOL/Boogie/Examples/Boogie_Max.thy	Thu Aug 23 15:44:47 2012 +0200
     8.3 @@ -6,7 +6,6 @@
     8.4  
     8.5  theory Boogie_Max
     8.6  imports Boogie
     8.7 -uses ("Boogie_Max.b2i")
     8.8  begin
     8.9  
    8.10  text {*
     9.1 --- a/src/HOL/Boogie/Examples/Boogie_Max_Stepwise.thy	Thu Aug 23 15:32:22 2012 +0200
     9.2 +++ b/src/HOL/Boogie/Examples/Boogie_Max_Stepwise.thy	Thu Aug 23 15:44:47 2012 +0200
     9.3 @@ -6,7 +6,6 @@
     9.4  
     9.5  theory Boogie_Max_Stepwise
     9.6  imports Boogie
     9.7 -uses ("Boogie_Max.b2i")
     9.8  begin
     9.9  
    9.10  text {*
    10.1 --- a/src/HOL/Boogie/Examples/VCC_Max.thy	Thu Aug 23 15:32:22 2012 +0200
    10.2 +++ b/src/HOL/Boogie/Examples/VCC_Max.thy	Thu Aug 23 15:44:47 2012 +0200
    10.3 @@ -6,7 +6,6 @@
    10.4  
    10.5  theory VCC_Max
    10.6  imports Boogie
    10.7 -uses ("VCC_Max.b2i")
    10.8  begin
    10.9  
   10.10  text {*
    11.1 --- a/src/HOL/Boogie/Tools/boogie_commands.ML	Thu Aug 23 15:32:22 2012 +0200
    11.2 +++ b/src/HOL/Boogie/Tools/boogie_commands.ML	Thu Aug 23 15:44:47 2012 +0200
    11.3 @@ -14,17 +14,16 @@
    11.4  
    11.5  (* commands *)
    11.6  
    11.7 -fun boogie_open ((quiet, base_name), offsets) thy =
    11.8 +fun boogie_open ((quiet, files), offsets) thy =
    11.9    let
   11.10 +    val ([{src_path = path, text, ...}], thy') = files thy
   11.11 +
   11.12      val ext = "b2i"
   11.13 -    fun check_ext path = snd (Path.split_ext path) = ext orelse
   11.14 +    val _ = snd (Path.split_ext path) = ext orelse
   11.15        error ("Bad file ending of file " ^ Path.print path ^ ", " ^
   11.16          "expected file ending " ^ quote ext)
   11.17  
   11.18 -    val base_path = Path.explode base_name |> tap check_ext
   11.19 -    val (text, thy') = Thy_Load.use_file base_path thy
   11.20 -
   11.21 -    val _ = Boogie_VCs.is_closed thy' orelse
   11.22 +    val _ = Boogie_VCs.is_closed thy orelse
   11.23        error ("Found the beginning of a new Boogie environment, " ^
   11.24          "but another Boogie environment is still open.")
   11.25    in
   11.26 @@ -44,7 +43,7 @@
   11.27  fun get_vc thy vc_name =
   11.28    (case Boogie_VCs.lookup thy vc_name of
   11.29      SOME vc => vc
   11.30 -  | NONE => 
   11.31 +  | NONE =>
   11.32        (case AList.lookup (op =) (Boogie_VCs.state_of thy) vc_name of
   11.33          SOME Boogie_VCs.Proved => error ("The verification condition " ^
   11.34            quote vc_name ^ " has already been proved.")
   11.35 @@ -258,7 +257,7 @@
   11.36      val unproved = map_filter not_proved (Boogie_VCs.state_of thy)
   11.37    in
   11.38      if null unproved then Boogie_VCs.close thy
   11.39 -    else error (Pretty.string_of (Pretty.big_list 
   11.40 +    else error (Pretty.string_of (Pretty.big_list
   11.41        "The following verification conditions have not been proved:"
   11.42        (map Pretty.str unproved)))
   11.43    end
   11.44 @@ -277,7 +276,7 @@
   11.45  val _ =
   11.46    Outer_Syntax.command @{command_spec "boogie_open"}
   11.47      "open a new Boogie environment and load a Boogie-generated .b2i file"
   11.48 -    (scan_opt "quiet" -- Parse.name -- vc_offsets >> 
   11.49 +    (scan_opt "quiet" -- Thy_Load.provide_parse_files "boogie_open" -- vc_offsets >>
   11.50        (Toplevel.theory o boogie_open))
   11.51  
   11.52  
   11.53 @@ -350,7 +349,7 @@
   11.54  val setup = Theory.at_end (fn thy =>
   11.55    let
   11.56      val _ = Boogie_VCs.is_closed thy
   11.57 -      orelse error ("Found the end of the theory, " ^ 
   11.58 +      orelse error ("Found the end of the theory, " ^
   11.59          "but the last Boogie environment is still open.")
   11.60    in NONE end)
   11.61  
    12.1 --- a/src/HOL/Quickcheck_Narrowing.thy	Thu Aug 23 15:32:22 2012 +0200
    12.2 +++ b/src/HOL/Quickcheck_Narrowing.thy	Thu Aug 23 15:44:47 2012 +0200
    12.3 @@ -5,9 +5,6 @@
    12.4  theory Quickcheck_Narrowing
    12.5  imports Quickcheck_Exhaustive
    12.6  keywords "find_unused_assms" :: diag
    12.7 -uses  (* FIXME session files *)
    12.8 -  ("Tools/Quickcheck/PNF_Narrowing_Engine.hs")
    12.9 -  ("Tools/Quickcheck/Narrowing_Engine.hs")
   12.10  begin
   12.11  
   12.12  subsection {* Counterexample generator *}
    13.1 --- a/src/HOL/ROOT	Thu Aug 23 15:32:22 2012 +0200
    13.2 +++ b/src/HOL/ROOT	Thu Aug 23 15:44:47 2012 +0200
    13.3 @@ -2,7 +2,11 @@
    13.4    description {* Classical Higher-order Logic *}
    13.5    options [document_graph]
    13.6    theories Complex_Main
    13.7 -  files "document/root.bib" "document/root.tex"
    13.8 +  files
    13.9 +    "Tools/Quickcheck/Narrowing_Engine.hs"
   13.10 +    "Tools/Quickcheck/PNF_Narrowing_Engine.hs"
   13.11 +    "document/root.bib"
   13.12 +    "document/root.tex"
   13.13  
   13.14  session "HOL-Base" = Pure +
   13.15    description {* Raw HOL base, with minimal tools *}
   13.16 @@ -18,11 +22,17 @@
   13.17    description {* HOL side-entry for Main only, without Complex_Main *}
   13.18    options [document = false]
   13.19    theories Main
   13.20 +  files
   13.21 +    "Tools/Quickcheck/Narrowing_Engine.hs"
   13.22 +    "Tools/Quickcheck/PNF_Narrowing_Engine.hs"
   13.23  
   13.24  session "HOL-Proofs" = Pure +
   13.25    description {* HOL-Main with explicit proof terms *}
   13.26    options [document = false, proofs = 2, parallel_proofs = 0]
   13.27    theories Main
   13.28 +  files
   13.29 +    "Tools/Quickcheck/Narrowing_Engine.hs"
   13.30 +    "Tools/Quickcheck/PNF_Narrowing_Engine.hs"
   13.31  
   13.32  session "HOL-Library" in Library = HOL +
   13.33    description {* Classical Higher-order Logic -- batteries included *}
    14.1 --- a/src/HOL/SPARK/SPARK_Setup.thy	Thu Aug 23 15:32:22 2012 +0200
    14.2 +++ b/src/HOL/SPARK/SPARK_Setup.thy	Thu Aug 23 15:44:47 2012 +0200
    14.3 @@ -8,6 +8,8 @@
    14.4  theory SPARK_Setup
    14.5  imports Word
    14.6  keywords
    14.7 +  "spark_open_vcg" :: thy_load ("vcg", "fdl", "rls") and
    14.8 +  "spark_open_siv" :: thy_load ("siv", "fdl", "rls") and
    14.9    "spark_open" "spark_proof_functions" "spark_types" "spark_end" :: thy_decl and
   14.10    "spark_vc" :: thy_goal and "spark_status" :: diag
   14.11  begin
    15.1 --- a/src/HOL/SPARK/Tools/spark_commands.ML	Thu Aug 23 15:32:22 2012 +0200
    15.2 +++ b/src/HOL/SPARK/Tools/spark_commands.ML	Thu Aug 23 15:44:47 2012 +0200
    15.3 @@ -13,8 +13,22 @@
    15.4  structure SPARK_Commands: SPARK_COMMANDS =
    15.5  struct
    15.6  
    15.7 -(* FIXME proper Thy_Load.use_file, also for fdl_path and rls_path (!?) *)
    15.8 -fun spark_open (vc_name, prfx) thy =
    15.9 +fun spark_open header (prfx, files) thy =
   15.10 +  let
   15.11 +    val ([{src_path, text = vc_text, pos = vc_pos, ...},
   15.12 +      {text = fdl_text, pos = fdl_pos, ...},
   15.13 +      {text = rls_text, pos = rls_pos, ...}], thy') = files thy;
   15.14 +    val base = fst (Path.split_ext (File.full_path (Thy_Load.master_directory thy') src_path));
   15.15 +  in
   15.16 +    SPARK_VCs.set_vcs
   15.17 +      (snd (Fdl_Parser.parse_declarations fdl_pos fdl_text))
   15.18 +      (Fdl_Parser.parse_rules rls_pos rls_text)
   15.19 +      (snd (Fdl_Parser.parse_vcs header vc_pos vc_text))
   15.20 +      base prfx thy'
   15.21 +  end;
   15.22 +
   15.23 +(* FIXME *)
   15.24 +fun spark_open_old (vc_name, prfx) thy =
   15.25    let
   15.26      val ((vc_path, vc_id), vc_text) = Thy_Load.load_file thy (Path.explode vc_name);
   15.27      val (base, header) =
   15.28 @@ -107,7 +121,19 @@
   15.29  val _ =
   15.30    Outer_Syntax.command @{command_spec "spark_open"}
   15.31      "open a new SPARK environment and load a SPARK-generated .vcg or .siv file"
   15.32 -    (Parse.name -- Parse.parname >> (Toplevel.theory o spark_open));
   15.33 +    (Parse.name -- Parse.parname >> (Toplevel.theory o spark_open_old));
   15.34 +
   15.35 +val _ =
   15.36 +  Outer_Syntax.command @{command_spec "spark_open_vcg"}
   15.37 +    "open a new SPARK environment and load a SPARK-generated .vcg file"
   15.38 +    (Parse.parname -- Thy_Load.provide_parse_files "spark_open_vcg"
   15.39 +      >> (Toplevel.theory o spark_open Fdl_Lexer.vcg_header));
   15.40 +
   15.41 +val _ =
   15.42 +  Outer_Syntax.command @{command_spec "spark_open_siv"}
   15.43 +    "open a new SPARK environment and load a SPARK-generated .siv file"
   15.44 +    (Parse.parname -- Thy_Load.provide_parse_files "spark_open_siv"
   15.45 +      >> (Toplevel.theory o spark_open Fdl_Lexer.siv_header));
   15.46  
   15.47  val pfun_type = Scan.option
   15.48    (Args.parens (Parse.list1 Parse.name) --| Args.colon -- Parse.name);
    16.1 --- a/src/HOL/Tools/ATP/atp_util.ML	Thu Aug 23 15:32:22 2012 +0200
    16.2 +++ b/src/HOL/Tools/ATP/atp_util.ML	Thu Aug 23 15:44:47 2012 +0200
    16.3 @@ -84,7 +84,7 @@
    16.4        | strip_c_style_comment (_ :: cs) accum = strip_c_style_comment cs accum
    16.5      and strip_spaces_in_list _ [] accum = accum
    16.6        | strip_spaces_in_list true (#"%" :: cs) accum =
    16.7 -        strip_spaces_in_list true (cs |> chop_while (not_equal #"\n") |> snd)
    16.8 +        strip_spaces_in_list true (cs |> take_prefix (not_equal #"\n") |> snd)
    16.9                               accum
   16.10        | strip_spaces_in_list true (#"/" :: #"*" :: cs) accum =
   16.11          strip_c_style_comment cs accum
    17.1 --- a/src/HOL/Tools/Quickcheck/narrowing_generators.ML	Thu Aug 23 15:32:22 2012 +0200
    17.2 +++ b/src/HOL/Tools/Quickcheck/narrowing_generators.ML	Thu Aug 23 15:44:47 2012 +0200
    17.3 @@ -202,12 +202,10 @@
    17.4  (** invocation of Haskell interpreter **)
    17.5  
    17.6  val narrowing_engine =
    17.7 -  Context.>>> (Context.map_theory_result
    17.8 -    (Thy_Load.use_file (Path.explode "Tools/Quickcheck/Narrowing_Engine.hs")))
    17.9 +  File.read (Path.explode "~~/src/HOL/Tools/Quickcheck/Narrowing_Engine.hs")
   17.10  
   17.11  val pnf_narrowing_engine =
   17.12 -  Context.>>> (Context.map_theory_result
   17.13 -    (Thy_Load.use_file (Path.explode "Tools/Quickcheck/PNF_Narrowing_Engine.hs")))
   17.14 +  File.read (Path.explode "~~/src/HOL/Tools/Quickcheck/PNF_Narrowing_Engine.hs")
   17.15  
   17.16  fun exec verbose code =
   17.17    ML_Context.exec (fn () => Secure.use_text ML_Env.local_context (0, "generated code") verbose code)
    18.1 --- a/src/HOL/Tools/SMT/smt_setup_solvers.ML	Thu Aug 23 15:32:22 2012 +0200
    18.2 +++ b/src/HOL/Tools/SMT/smt_setup_solvers.ML	Thu Aug 23 15:44:47 2012 +0200
    18.3 @@ -51,7 +51,7 @@
    18.4    let
    18.5      val empty_line = (fn "" => true | _ => false)
    18.6      val split_first = (fn [] => ("", []) | l :: ls => (l, ls))
    18.7 -    val (l, ls) = split_first (snd (chop_while empty_line lines))
    18.8 +    val (l, ls) = split_first (snd (take_prefix empty_line lines))
    18.9    in (test_outcome solver_name l, ls) end
   18.10  
   18.11  
    19.1 --- a/src/HOL/Tools/SMT/smt_solver.ML	Thu Aug 23 15:32:22 2012 +0200
    19.2 +++ b/src/HOL/Tools/SMT/smt_solver.ML	Thu Aug 23 15:44:47 2012 +0200
    19.3 @@ -96,7 +96,7 @@
    19.4        SMT_Config.with_timeout ctxt (run ctxt name mk_cmd) input
    19.5      val _ = SMT_Config.trace_msg ctxt (pretty "Solver:") err
    19.6  
    19.7 -    val ls = rev (snd (chop_while (equal "") (rev res)))
    19.8 +    val ls = fst (take_suffix (equal "") res)
    19.9      val _ = SMT_Config.trace_msg ctxt (pretty "Result:") ls
   19.10  
   19.11      val _ = return_code <> 0 andalso
    20.1 --- a/src/HOL/Tools/refute.ML	Thu Aug 23 15:32:22 2012 +0200
    20.2 +++ b/src/HOL/Tools/refute.ML	Thu Aug 23 15:44:47 2012 +0200
    20.3 @@ -2772,7 +2772,7 @@
    20.4                (* go back up the stack until we find a level where we can go *)
    20.5                (* to the next sibling node                                   *)
    20.6                let
    20.7 -                val offsets' = snd (chop_while
    20.8 +                val offsets' = snd (take_prefix
    20.9                    (fn off' => off' mod size_elem = size_elem - 1) offsets)
   20.10                in
   20.11                  case offsets' of
    21.1 --- a/src/Pure/Isar/code.ML	Thu Aug 23 15:32:22 2012 +0200
    21.2 +++ b/src/Pure/Isar/code.ML	Thu Aug 23 15:44:47 2012 +0200
    21.3 @@ -1017,7 +1017,7 @@
    21.4      val c = const_eqn thy thm;
    21.5      fun update_subsume thy (thm, proper) eqns = 
    21.6        let
    21.7 -        val args_of = snd o chop_while is_Var o rev o snd o strip_comb
    21.8 +        val args_of = snd o take_prefix is_Var o rev o snd o strip_comb
    21.9            o map_types Type.strip_sorts o fst o Logic.dest_equals o Thm.plain_prop_of;
   21.10          val args = args_of thm;
   21.11          val incr_idx = Logic.incr_indexes ([], Thm.maxidx_of thm + 1);
    22.1 --- a/src/Pure/Isar/keyword.ML	Thu Aug 23 15:32:22 2012 +0200
    22.2 +++ b/src/Pure/Isar/keyword.ML	Thu Aug 23 15:44:47 2012 +0200
    22.3 @@ -47,9 +47,6 @@
    22.4    type spec = (string * string list) * string list
    22.5    val spec: spec -> T
    22.6    val command_spec: (string * spec) * Position.T -> (string * T) * Position.T
    22.7 -  type keywords
    22.8 -  val lexicons_of: keywords -> Scan.lexicon * Scan.lexicon
    22.9 -  val get_keywords: unit -> keywords
   22.10    val get_lexicons: unit -> Scan.lexicon * Scan.lexicon
   22.11    val is_keyword: string -> bool
   22.12    val command_keyword: string -> T option
   22.13 @@ -57,8 +54,6 @@
   22.14    val command_tags: string -> string list
   22.15    val dest: unit -> string list * string list
   22.16    val status: unit -> unit
   22.17 -  val thy_load_commands: keywords -> (string * string list) list
   22.18 -  val define_keywords: string * T option -> keywords -> keywords
   22.19    val define: string * T option -> unit
   22.20    val is_diag: string -> bool
   22.21    val is_control: string -> bool
   22.22 @@ -172,11 +167,6 @@
   22.23  fun make_keywords (lexicons, commands) =
   22.24    Keywords {lexicons = lexicons, commands = commands};
   22.25  
   22.26 -fun map_keywords f (Keywords {lexicons, commands}) =
   22.27 -  make_keywords (f (lexicons, commands));
   22.28 -
   22.29 -fun lexicons_of (Keywords {lexicons, ...}) = lexicons;
   22.30 -
   22.31  local
   22.32  
   22.33  val global_keywords =
   22.34 @@ -186,11 +176,13 @@
   22.35  
   22.36  fun get_keywords () = ! global_keywords;
   22.37  
   22.38 -fun change_keywords f = CRITICAL (fn () => Unsynchronized.change global_keywords f);
   22.39 +fun change_keywords f = CRITICAL (fn () =>
   22.40 +  Unsynchronized.change global_keywords
   22.41 +    (fn Keywords {lexicons, commands} => make_keywords (f (lexicons, commands))));
   22.42  
   22.43  end;
   22.44  
   22.45 -fun get_lexicons () = lexicons_of (get_keywords ());
   22.46 +fun get_lexicons () = get_keywords () |> (fn Keywords {lexicons, ...} => lexicons);
   22.47  fun get_commands () = get_keywords () |> (fn Keywords {commands, ...} => commands);
   22.48  
   22.49  
   22.50 @@ -221,14 +213,9 @@
   22.51    in () end;
   22.52  
   22.53  
   22.54 -fun thy_load_commands (Keywords {commands, ...}) =
   22.55 -  Symtab.fold (fn (name, Keyword {kind, files, ...}) =>
   22.56 -      kind = kind_of thy_load ? cons (name, files)) commands [];
   22.57 -
   22.58 -
   22.59  (* define *)
   22.60  
   22.61 -fun define_keywords (name, opt_kind) = map_keywords (fn ((minor, major), commands) =>
   22.62 +fun define (name, opt_kind) = change_keywords (fn ((minor, major), commands) =>
   22.63    (case opt_kind of
   22.64      NONE =>
   22.65        let
   22.66 @@ -240,8 +227,6 @@
   22.67          val commands' = Symtab.update (name, kind) commands;
   22.68        in ((minor, major'), commands') end));
   22.69  
   22.70 -val define = change_keywords o define_keywords;
   22.71 -
   22.72  
   22.73  (* command categories *)
   22.74  
    23.1 --- a/src/Pure/Isar/token.ML	Thu Aug 23 15:32:22 2012 +0200
    23.2 +++ b/src/Pure/Isar/token.ML	Thu Aug 23 15:44:47 2012 +0200
    23.3 @@ -10,10 +10,10 @@
    23.4      Command | Keyword | Ident | LongIdent | SymIdent | Var | TypeIdent | TypeVar |
    23.5      Nat | Float | String | AltString | Verbatim | Space | Comment | InternalValue |
    23.6      Error of string | Sync | EOF
    23.7 -  type files = Path.T * (string * Position.T) list
    23.8 +  type file = {src_path: Path.T, text: string, pos: Position.T}
    23.9    datatype value =
   23.10      Text of string | Typ of typ | Term of term | Fact of thm list |
   23.11 -    Attribute of morphism -> attribute | Files of files
   23.12 +    Attribute of morphism -> attribute | Files of file list
   23.13    type T
   23.14    val str_of_kind: kind -> string
   23.15    val position_of: T -> Position.T
   23.16 @@ -44,8 +44,8 @@
   23.17    val content_of: T -> string
   23.18    val unparse: T -> string
   23.19    val text_of: T -> string * string
   23.20 -  val get_files: T -> files option
   23.21 -  val put_files: files -> T -> T
   23.22 +  val get_files: T -> file list option
   23.23 +  val put_files: file list -> T -> T
   23.24    val get_value: T -> value option
   23.25    val map_value: (value -> value) -> T -> T
   23.26    val mk_text: string -> T
   23.27 @@ -78,7 +78,7 @@
   23.28    args.ML).  Note that an assignable ref designates an intermediate
   23.29    state of internalization -- it is NOT meant to persist.*)
   23.30  
   23.31 -type files = Path.T * (string * Position.T) list;  (*master dir, multiple file content*)
   23.32 +type file = {src_path: Path.T, text: string, pos: Position.T};
   23.33  
   23.34  datatype value =
   23.35    Text of string |
   23.36 @@ -86,7 +86,7 @@
   23.37    Term of term |
   23.38    Fact of thm list |
   23.39    Attribute of morphism -> attribute |
   23.40 -  Files of files;
   23.41 +  Files of file list;
   23.42  
   23.43  datatype slot =
   23.44    Slot |
    24.1 --- a/src/Pure/ProofGeneral/pgip_isabelle.ML	Thu Aug 23 15:32:22 2012 +0200
    24.2 +++ b/src/Pure/ProofGeneral/pgip_isabelle.ML	Thu Aug 23 15:44:47 2012 +0200
    24.3 @@ -79,7 +79,7 @@
    24.4                 NONE => (NONE, NONE)
    24.5               | SOME fname =>
    24.6                 let val path = Path.explode fname in
    24.7 -                 if can (Thy_Load.check_file Path.current) path
    24.8 +                 if File.exists path
    24.9                   then (SOME (PgipTypes.pgipurl_of_path path), NONE)
   24.10                   else (NONE, SOME fname)
   24.11                 end);
    25.1 --- a/src/Pure/ProofGeneral/proof_general_pgip.ML	Thu Aug 23 15:32:22 2012 +0200
    25.2 +++ b/src/Pure/ProofGeneral/proof_general_pgip.ML	Thu Aug 23 15:44:47 2012 +0200
    25.3 @@ -594,14 +594,14 @@
    25.4  
    25.5          fun filerefs f =
    25.6              let val thy = thy_name f
    25.7 -                val filerefs = map #1 (#uses (Thy_Load.check_thy [] (Path.dir f) thy))
    25.8 +                val filerefs = map #1 (#uses (Thy_Load.check_thy (Path.dir f) thy))
    25.9              in
   25.10                  issue_pgip (Setrefs {url=url, thyname=NONE, objtype=SOME PgipTypes.ObjFile,
   25.11                                       name=NONE, idtables=[], fileurls=filerefs})
   25.12              end
   25.13  
   25.14          fun thyrefs thy =
   25.15 -            let val thyrefs = #imports (Thy_Load.check_thy [] Path.current thy)
   25.16 +            let val thyrefs = #imports (Thy_Load.check_thy Path.current thy)
   25.17              in
   25.18                  issue_pgip (Setrefs {url=url, thyname=thyname, objtype=SOME PgipTypes.ObjTheory,
   25.19                                       name=NONE, idtables=[{context=NONE, objtype=PgipTypes.ObjTheory,
    26.1 --- a/src/Pure/System/build.scala	Thu Aug 23 15:32:22 2012 +0200
    26.2 +++ b/src/Pure/System/build.scala	Thu Aug 23 15:44:47 2012 +0200
    26.3 @@ -328,7 +328,7 @@
    26.4      def sources(name: String): List[SHA1.Digest] = deps(name).sources.map(_._2)
    26.5    }
    26.6  
    26.7 -  def dependencies(verbose: Boolean, tree: Session_Tree): Deps =
    26.8 +  def dependencies(verbose: Boolean, list_files: Boolean, tree: Session_Tree): Deps =
    26.9      Deps((Map.empty[String, Session_Content] /: tree.topological_order)(
   26.10        { case (deps, (name, info)) =>
   26.11            val (preloaded, parent_syntax, parent_errors) =
   26.12 @@ -341,7 +341,7 @@
   26.13              }
   26.14            val thy_info = new Thy_Info(new Thy_Load(preloaded, parent_syntax))
   26.15  
   26.16 -          if (verbose) {
   26.17 +          if (verbose || list_files) {
   26.18              val groups =
   26.19                if (info.groups.isEmpty) ""
   26.20                else info.groups.mkString(" (", " ", ")")
   26.21 @@ -357,11 +357,15 @@
   26.22            val syntax = thy_deps.make_syntax
   26.23  
   26.24            val all_files =
   26.25 -            thy_deps.deps.map({ case (n, h) =>
   26.26 -              val thy = Path.explode(n.node).expand
   26.27 -              val uses = h.uses.map(p => (Path.explode(n.dir) + Path.explode(p._1)).expand)
   26.28 +            (thy_deps.deps.map({ case (n, h) =>
   26.29 +              val thy = Path.explode(n.node)
   26.30 +              val uses = h.uses.map(p => Path.explode(n.dir) + Path.explode(p._1))
   26.31                thy :: uses
   26.32 -            }).flatten ::: info.files.map(file => info.dir + file)
   26.33 +            }).flatten ::: info.files.map(file => info.dir + file)).map(_.expand)
   26.34 +
   26.35 +          if (list_files)
   26.36 +            echo(cat_lines(all_files.map(_.implode).sorted.map("  " + _)))
   26.37 +
   26.38            val sources =
   26.39              try { all_files.map(p => (p, SHA1.digest(p))) }
   26.40              catch {
   26.41 @@ -378,7 +382,7 @@
   26.42    {
   26.43      val (_, tree) =
   26.44        find_sessions(Options.init(), dirs.map((false, _))).required(false, Nil, List(session))
   26.45 -    dependencies(false, tree)(session)
   26.46 +    dependencies(false, false, tree)(session)
   26.47    }
   26.48  
   26.49    def outer_syntax(session: String): Outer_Syntax =
   26.50 @@ -531,6 +535,7 @@
   26.51      more_dirs: List[(Boolean, Path)] = Nil,
   26.52      session_groups: List[String] = Nil,
   26.53      max_jobs: Int = 1,
   26.54 +    list_files: Boolean = false,
   26.55      no_build: Boolean = false,
   26.56      build_options: List[String] = Nil,
   26.57      system_mode: Boolean = false,
   26.58 @@ -540,7 +545,7 @@
   26.59      val options = (Options.init() /: build_options)(_ + _)
   26.60      val (descendants, tree) =
   26.61        find_sessions(options, more_dirs).required(all_sessions, session_groups, sessions)
   26.62 -    val deps = dependencies(verbose, tree)
   26.63 +    val deps = dependencies(verbose, list_files, tree)
   26.64      val queue = Queue(tree)
   26.65  
   26.66      def make_stamp(name: String): String =
   26.67 @@ -692,6 +697,7 @@
   26.68            Properties.Value.Boolean(build_heap) ::
   26.69            Properties.Value.Boolean(clean_build) ::
   26.70            Properties.Value.Int(max_jobs) ::
   26.71 +          Properties.Value.Boolean(list_files) ::
   26.72            Properties.Value.Boolean(no_build) ::
   26.73            Properties.Value.Boolean(system_mode) ::
   26.74            Properties.Value.Boolean(verbose) ::
   26.75 @@ -700,7 +706,7 @@
   26.76                select_dirs.map(d => (true, Path.explode(d))) :::
   26.77                include_dirs.map(d => (false, Path.explode(d)))
   26.78              build(all_sessions, build_heap, clean_build, dirs, session_groups,
   26.79 -              max_jobs, no_build, build_options, system_mode, verbose, sessions)
   26.80 +              max_jobs, list_files, no_build, build_options, system_mode, verbose, sessions)
   26.81          case _ => error("Bad arguments:\n" + cat_lines(args))
   26.82        }
   26.83      }
    27.1 --- a/src/Pure/Thy/thy_info.ML	Thu Aug 23 15:32:22 2012 +0200
    27.2 +++ b/src/Pure/Thy/thy_info.ML	Thu Aug 23 15:44:47 2012 +0200
    27.3 @@ -236,16 +236,16 @@
    27.4      fun commit () = update_thy deps theory;
    27.5    in (theory, present, commit) end;
    27.6  
    27.7 -fun check_deps base_keywords dir name =
    27.8 +fun check_deps dir name =
    27.9    (case lookup_deps name of
   27.10      SOME NONE => (true, NONE, get_imports name, [], [])
   27.11    | NONE =>
   27.12 -      let val {master, text, imports, keywords, uses} = Thy_Load.check_thy base_keywords dir name
   27.13 +      let val {master, text, imports, keywords, uses} = Thy_Load.check_thy dir name
   27.14        in (false, SOME (make_deps master imports, text), imports, uses, keywords) end
   27.15    | SOME (SOME {master, ...}) =>
   27.16        let
   27.17          val {master = master', text = text', imports = imports', uses = uses', keywords = keywords'}
   27.18 -          = Thy_Load.check_thy base_keywords dir name;
   27.19 +          = Thy_Load.check_thy dir name;
   27.20          val deps' = SOME (make_deps master' imports', text');
   27.21          val current =
   27.22            #2 master = #2 master' andalso
   27.23 @@ -256,29 +256,29 @@
   27.24  
   27.25  in
   27.26  
   27.27 -fun require_thys initiators dir strs result =
   27.28 -      fold_map (require_thy initiators dir) strs result |>> forall I
   27.29 -and require_thy initiators dir str (base_keywords, tasks) =
   27.30 +fun require_thys initiators dir strs tasks =
   27.31 +      fold_map (require_thy initiators dir) strs tasks |>> forall I
   27.32 +and require_thy initiators dir str tasks =
   27.33    let
   27.34      val path = Path.expand (Path.explode str);
   27.35      val name = Path.implode (Path.base path);
   27.36    in
   27.37      (case try (Graph.get_node tasks) name of
   27.38 -      SOME task => (task_finished task, (base_keywords, tasks))
   27.39 +      SOME task => (task_finished task, tasks)
   27.40      | NONE =>
   27.41          let
   27.42            val dir' = Path.append dir (Path.dir path);
   27.43            val _ = member (op =) initiators name andalso error (cycle_msg initiators);
   27.44  
   27.45 -          val (current, deps, imports, uses, keywords) = check_deps base_keywords dir' name
   27.46 +          val (current, deps, imports, uses, keywords) = check_deps dir' name
   27.47              handle ERROR msg => cat_error msg
   27.48                (loader_msg "the error(s) above occurred while examining theory" [name] ^
   27.49                  required_by "\n" initiators);
   27.50  
   27.51            val parents = map base_name imports;
   27.52 -          val (parents_current, (base_keywords', tasks')) =
   27.53 +          val (parents_current, tasks') =
   27.54              require_thys (name :: initiators)
   27.55 -              (Path.append dir (master_dir (Option.map #1 deps))) imports (base_keywords, tasks);
   27.56 +              (Path.append dir (master_dir (Option.map #1 deps))) imports tasks;
   27.57  
   27.58            val all_current = current andalso parents_current;
   27.59            val task =
   27.60 @@ -292,9 +292,8 @@
   27.61                      val load = load_thy initiators update_time dep text name uses keywords;
   27.62                    in Task (parents, load) end);
   27.63  
   27.64 -          val base_keywords'' = keywords @ base_keywords';
   27.65            val tasks'' = new_entry name parents task tasks';
   27.66 -        in (all_current, (base_keywords'', tasks'')) end)
   27.67 +        in (all_current, tasks'') end)
   27.68    end;
   27.69  
   27.70  end;
   27.71 @@ -303,7 +302,7 @@
   27.72  (* use_thy *)
   27.73  
   27.74  fun use_thys_wrt dir arg =
   27.75 -  schedule_tasks (snd (snd (require_thys [] dir arg ([], Graph.empty))));
   27.76 +  schedule_tasks (snd (require_thys [] dir arg Graph.empty));
   27.77  
   27.78  val use_thys = use_thys_wrt Path.current;
   27.79  val use_thy = use_thys o single;
   27.80 @@ -326,7 +325,7 @@
   27.81  fun register_thy theory =
   27.82    let
   27.83      val name = Context.theory_name theory;
   27.84 -    val {master, ...} = Thy_Load.check_thy [] (Thy_Load.master_directory theory) name;
   27.85 +    val {master, ...} = Thy_Load.check_thy (Thy_Load.master_directory theory) name;
   27.86      val imports = Thy_Load.imports_of theory;
   27.87    in
   27.88      NAMED_CRITICAL "Thy_Info" (fn () =>
    28.1 --- a/src/Pure/Thy/thy_load.ML	Thu Aug 23 15:32:22 2012 +0200
    28.2 +++ b/src/Pure/Thy/thy_load.ML	Thu Aug 23 15:44:47 2012 +0200
    28.3 @@ -8,12 +8,13 @@
    28.4  sig
    28.5    val master_directory: theory -> Path.T
    28.6    val imports_of: theory -> string list
    28.7 -  val provide: Path.T * (Path.T * SHA1.digest) -> theory -> theory
    28.8 -  val check_file: Path.T -> Path.T -> Path.T
    28.9    val thy_path: Path.T -> Path.T
   28.10 -  val check_thy: Thy_Header.keywords -> Path.T -> string ->
   28.11 +  val parse_files: string -> (theory -> Token.file list) parser
   28.12 +  val check_thy: Path.T -> string ->
   28.13     {master: Path.T * SHA1.digest, text: string, imports: string list,
   28.14      uses: (Path.T * bool) list, keywords: Thy_Header.keywords}
   28.15 +  val provide: Path.T * SHA1.digest -> theory -> theory
   28.16 +  val provide_parse_files: string -> (theory -> Token.file list * theory) parser
   28.17    val load_file: theory -> Path.T -> (Path.T * SHA1.digest) * string
   28.18    val use_file: Path.T -> theory -> string * theory
   28.19    val loaded_files: theory -> Path.T list
   28.20 @@ -23,7 +24,6 @@
   28.21    val begin_theory: Path.T -> Thy_Header.header -> theory list -> theory
   28.22    val load_thy: int -> Path.T -> Thy_Header.header -> Position.T -> string ->
   28.23      theory list -> theory * unit future
   28.24 -  val parse_files: string -> (theory -> Token.files) parser
   28.25    val set_master_path: Path.T -> unit
   28.26    val get_master_path: unit -> Path.T
   28.27  end;
   28.28 @@ -36,7 +36,7 @@
   28.29  type files =
   28.30   {master_dir: Path.T,  (*master directory of theory source*)
   28.31    imports: string list,  (*source specification of imports*)
   28.32 -  provided: (Path.T * (Path.T * SHA1.digest)) list};  (*source path, physical path, digest*)
   28.33 +  provided: (Path.T * SHA1.digest) list};  (*source path, digest*)
   28.34  
   28.35  fun make_files (master_dir, imports, provided): files =
   28.36   {master_dir = master_dir, imports = imports, provided = provided};
   28.37 @@ -59,17 +59,29 @@
   28.38  
   28.39  fun put_deps master_dir imports = map_files (fn _ => (master_dir, imports, []));
   28.40  
   28.41 -fun provide (src_path, path_id) =
   28.42 -  map_files (fn (master_dir, imports, provided) =>
   28.43 -    if AList.defined (op =) provided src_path then
   28.44 -      error ("Duplicate use of source file: " ^ Path.print src_path)
   28.45 -    else (master_dir, imports, (src_path, path_id) :: provided));
   28.46 -
   28.47  
   28.48  (* inlined files *)
   28.49  
   28.50  fun check_file dir file = File.check_file (File.full_path dir file);
   28.51  
   28.52 +fun make_file dir file =
   28.53 +  let val full_path = check_file dir file
   28.54 +  in {src_path = file, text = File.read full_path, pos = Path.position full_path} end;
   28.55 +
   28.56 +fun read_files cmd dir path =
   28.57 +  let
   28.58 +    val paths =
   28.59 +      (case Keyword.command_files cmd of
   28.60 +        [] => [path]
   28.61 +      | exts => map (fn ext => Path.ext ext path) exts);
   28.62 +  in map (make_file dir) paths end;
   28.63 +
   28.64 +fun parse_files cmd =
   28.65 +  Scan.ahead Parse.not_eof -- Parse.path >> (fn (tok, name) => fn thy =>
   28.66 +    (case Token.get_files tok of
   28.67 +      SOME files => files
   28.68 +    | NONE => read_files cmd (master_directory thy) (Path.explode name)));
   28.69 +
   28.70  local
   28.71  
   28.72  fun clean ((i1, t1) :: (i2, t2) :: toks) =
   28.73 @@ -89,42 +101,8 @@
   28.74          handle ERROR msg => error (msg ^ Token.pos_of tok)
   28.75      else NONE);
   28.76  
   28.77 -fun command_files path exts =
   28.78 -  if null exts then [path]
   28.79 -  else map (fn ext => Path.ext ext path) exts;
   28.80 -
   28.81  in
   28.82  
   28.83 -fun find_files syntax text =
   28.84 -  let val thy_load_commands = Keyword.thy_load_commands syntax in
   28.85 -    if exists (fn (cmd, _) => String.isSubstring cmd text) thy_load_commands then
   28.86 -      Thy_Syntax.parse_tokens (Keyword.lexicons_of syntax) Position.none text
   28.87 -      |> Thy_Syntax.parse_spans
   28.88 -      |> maps
   28.89 -        (fn Thy_Syntax.Span (Thy_Syntax.Command (cmd, _), toks) =>
   28.90 -              (case AList.lookup (op =) thy_load_commands cmd of
   28.91 -                SOME exts =>
   28.92 -                  (case find_file toks of
   28.93 -                    SOME (_, path) => command_files path exts
   28.94 -                  | NONE => [])
   28.95 -              | NONE => [])
   28.96 -          | _ => [])
   28.97 -    else []
   28.98 -  end;
   28.99 -
  28.100 -fun read_files cmd dir path =
  28.101 -  let
  28.102 -    val files =
  28.103 -      command_files path (Keyword.command_files cmd)
  28.104 -      |> map (check_file dir #> (fn p => (File.read p, Path.position p)));
  28.105 -  in (dir, files) end;
  28.106 -
  28.107 -fun parse_files cmd =
  28.108 -  Scan.ahead Parse.not_eof -- Parse.path >> (fn (tok, name) => fn thy =>
  28.109 -    (case Token.get_files tok of
  28.110 -      SOME files => files
  28.111 -    | NONE => read_files cmd (master_directory thy) (Path.explode name)));
  28.112 -
  28.113  fun resolve_files master_dir span =
  28.114    (case span of
  28.115      Thy_Syntax.Span (Thy_Syntax.Command (cmd, pos), toks) =>
  28.116 @@ -147,7 +125,7 @@
  28.117  
  28.118  val thy_path = Path.ext "thy";
  28.119  
  28.120 -fun check_thy base_keywords dir thy_name =
  28.121 +fun check_thy dir thy_name =
  28.122    let
  28.123      val path = thy_path (Path.basic thy_name);
  28.124      val master_file = check_file dir path;
  28.125 @@ -156,19 +134,27 @@
  28.126      val {name, imports, uses, keywords} = Thy_Header.read (Path.position master_file) text;
  28.127      val _ = thy_name <> name andalso
  28.128        error ("Bad file name " ^ Path.print path ^ " for theory " ^ quote name);
  28.129 -
  28.130 -    val syntax =
  28.131 -      fold (Keyword.define_keywords o apsnd (Option.map Keyword.spec))
  28.132 -        (keywords @ base_keywords) (Keyword.get_keywords ());
  28.133 -    val more_uses = map (rpair false) (find_files syntax text);
  28.134    in
  28.135     {master = (master_file, SHA1.digest text), text = text,
  28.136 -    imports = imports, uses = uses @ more_uses, keywords = keywords}
  28.137 +    imports = imports, uses = uses, keywords = keywords}
  28.138    end;
  28.139  
  28.140  
  28.141  (* load files *)
  28.142  
  28.143 +fun provide (src_path, id) =
  28.144 +  map_files (fn (master_dir, imports, provided) =>
  28.145 +    if AList.defined (op =) provided src_path then
  28.146 +      error ("Duplicate use of source file: " ^ Path.print src_path)
  28.147 +    else (master_dir, imports, (src_path, id) :: provided));
  28.148 +
  28.149 +fun provide_parse_files cmd =
  28.150 +  parse_files cmd >> (fn files => fn thy =>
  28.151 +    let
  28.152 +      val fs = files thy;
  28.153 +      val thy' = fold (fn {src_path, text, ...} => provide (src_path, SHA1.digest text)) fs thy;
  28.154 +    in (fs, thy') end);
  28.155 +
  28.156  fun load_file thy src_path =
  28.157    let
  28.158      val full_path = check_file (master_directory thy) src_path;
  28.159 @@ -178,15 +164,17 @@
  28.160  
  28.161  fun use_file src_path thy =
  28.162    let
  28.163 -    val (path_id, text) = load_file thy src_path;
  28.164 -    val thy' = provide (src_path, path_id) thy;
  28.165 +    val ((_, id), text) = load_file thy src_path;
  28.166 +    val thy' = provide (src_path, id) thy;
  28.167    in (text, thy') end;
  28.168  
  28.169 -val loaded_files = map (#1 o #2) o #provided o Files.get;
  28.170 +fun loaded_files thy =
  28.171 +  let val {master_dir, provided, ...} = Files.get thy
  28.172 +  in map (File.full_path master_dir o #1) provided end;
  28.173  
  28.174  fun load_current thy =
  28.175    #provided (Files.get thy) |>
  28.176 -    forall (fn (src_path, (_, id)) =>
  28.177 +    forall (fn (src_path, id) =>
  28.178        (case try (load_file thy) src_path of
  28.179          NONE => false
  28.180        | SOME ((_, id'), _) => id = id'));
  28.181 @@ -208,7 +196,7 @@
  28.182        val _ = eval_file path text;
  28.183        val _ = Context.>> Local_Theory.propagate_ml_env;
  28.184  
  28.185 -      val provide = provide (src_path, (path, id));
  28.186 +      val provide = provide (src_path, id);
  28.187        val _ = Context.>> (Context.mapping provide (Local_Theory.background_theory provide));
  28.188      in () end;
  28.189  
    29.1 --- a/src/Pure/library.ML	Thu Aug 23 15:32:22 2012 +0200
    29.2 +++ b/src/Pure/library.ML	Thu Aug 23 15:44:47 2012 +0200
    29.3 @@ -74,7 +74,6 @@
    29.4    val take: int -> 'a list -> 'a list
    29.5    val drop: int -> 'a list -> 'a list
    29.6    val chop: int -> 'a list -> 'a list * 'a list
    29.7 -  val chop_while: ('a -> bool) -> 'a list -> 'a list * 'a list
    29.8    val chop_groups: int -> 'a list -> 'a list list
    29.9    val nth: 'a list -> int -> 'a
   29.10    val nth_list: 'a list list -> int -> 'a list
   29.11 @@ -413,10 +412,6 @@
   29.12    | chop _ [] = ([], [])
   29.13    | chop n (x :: xs) = chop (n - 1) xs |>> cons x;
   29.14  
   29.15 -fun chop_while P [] = ([], [])
   29.16 -  | chop_while P (ys as x :: xs) =
   29.17 -      if P x then chop_while P xs |>> cons x else ([], ys);
   29.18 -
   29.19  fun chop_groups n list =
   29.20    (case chop (Int.max (n, 1)) list of
   29.21      ([], _) => []
    30.1 --- a/src/Pure/pure_syn.ML	Thu Aug 23 15:32:22 2012 +0200
    30.2 +++ b/src/Pure/pure_syn.ML	Thu Aug 23 15:44:47 2012 +0200
    30.3 @@ -18,15 +18,13 @@
    30.4  val _ =
    30.5    Outer_Syntax.command
    30.6      (("ML_file", Keyword.tag_ml Keyword.thy_load), Position.none) "ML text from file"
    30.7 -    (Scan.ahead Parse.path -- Thy_Load.parse_files "ML_file"
    30.8 -      >> (fn (name, files) => Toplevel.generic_theory (fn gthy =>
    30.9 +    (Thy_Load.parse_files "ML_file" >> (fn files => Toplevel.generic_theory (fn gthy =>
   30.10          let
   30.11 -          val src_path = Path.explode name;
   30.12 -          val (dir, [(txt, pos)]) = files (Context.theory_of gthy);
   30.13 -          val provide = Thy_Load.provide (src_path, (File.full_path dir src_path, SHA1.digest txt));
   30.14 +          val [{src_path, text, pos}] = files (Context.theory_of gthy);
   30.15 +          val provide = Thy_Load.provide (src_path, SHA1.digest text);
   30.16          in
   30.17            gthy
   30.18 -          |> ML_Context.exec (fn () => ML_Context.eval_text true pos txt)
   30.19 +          |> ML_Context.exec (fn () => ML_Context.eval_text true pos text)
   30.20            |> Local_Theory.propagate_ml_env
   30.21            |> Context.mapping provide (Local_Theory.background_theory provide)
   30.22          end)));
    31.1 --- a/src/Tools/case_product.ML	Thu Aug 23 15:32:22 2012 +0200
    31.2 +++ b/src/Tools/case_product.ML	Thu Aug 23 15:44:47 2012 +0200
    31.3 @@ -48,8 +48,8 @@
    31.4      val concl = Thm.concl_of thm1
    31.5  
    31.6      fun is_consumes t = not (Logic.strip_assums_concl t aconv concl)
    31.7 -    val (p_cons1, p_cases1) = chop_while is_consumes (Thm.prems_of thm1)
    31.8 -    val (p_cons2, p_cases2) = chop_while is_consumes (Thm.prems_of thm2)
    31.9 +    val (p_cons1, p_cases1) = take_prefix is_consumes (Thm.prems_of thm1)
   31.10 +    val (p_cons2, p_cases2) = take_prefix is_consumes (Thm.prems_of thm2)
   31.11  
   31.12      val p_cases_prod = map (fn p1 => map (fn p2 =>
   31.13        let