merged
authornipkow
Thu, 15 Dec 2011 09:13:32 +0100
changeset 45886 728cc8553471
parent 45884 58a10da12812 (diff)
parent 45885 19ee710d9c14 (current diff)
child 45887 bfb5234a70ba
merged
src/HOL/IsaMakefile
--- a/Admin/Benchmarks/HOL-datatype/Brackin.thy	Thu Dec 15 09:13:23 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,43 +0,0 @@
-(*  Title:      Admin/Benchmarks/HOL-datatype/Brackin.thy
-
-A couple from Steve Brackin's work.
-*)
-
-theory Brackin imports Main begin
-
-datatype   T = X1 | X2 | X3 | X4 | X5 | X6 | X7 | X8 | X9 | X10 | X11 |
-                X12 | X13 | X14 | X15 | X16 | X17 | X18 | X19 | X20 | X21 |
-                X22 | X23 | X24 | X25 | X26 | X27 | X28 | X29 | X30 | X31 |
-                X32 | X33 | X34
-
-datatype  
-  ('a, 'b, 'c) TY1 =
-      NoF__
-    | Fk__ 'a "('a, 'b, 'c) TY2"
-and
-  ('a, 'b, 'c) TY2 =
-      Ta__ bool
-    | Td__ bool
-    | Tf__ "('a, 'b, 'c) TY1"
-    | Tk__ bool
-    | Tp__ bool
-    | App__ 'a "('a, 'b, 'c) TY1" "('a, 'b, 'c) TY2" "('a, 'b, 'c) TY3"
-    | Pair__ "('a, 'b, 'c) TY2" "('a, 'b, 'c) TY2"
-and
-  ('a, 'b, 'c) TY3 =
-      NoS__
-    | Fresh__ "('a, 'b, 'c) TY2"
-    | Trustworthy__ 'a
-    | PrivateKey__ 'a 'b 'c
-    | PublicKey__ 'a 'b 'c
-    | Conveyed__ 'a "('a, 'b, 'c) TY2"
-    | Possesses__ 'a "('a, 'b, 'c) TY2"
-    | Received__ 'a "('a, 'b, 'c) TY2"
-    | Recognizes__ 'a "('a, 'b, 'c) TY2"
-    | NeverMalFromSelf__ 'a 'b "('a, 'b, 'c) TY2"
-    | Sends__ 'a "('a, 'b, 'c) TY2" 'b
-    | SharedSecret__ 'a "('a, 'b, 'c) TY2" 'b
-    | Believes__ 'a "('a, 'b, 'c) TY3"
-    | And__ "('a, 'b, 'c) TY3" "('a, 'b, 'c) TY3"
-
-end
--- a/Admin/Benchmarks/HOL-datatype/Instructions.thy	Thu Dec 15 09:13:23 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,162 +0,0 @@
-(*  Title:      Admin/Benchmarks/HOL-datatype/Instructions.thy
-
-Example from Konrad: 68000 instruction set.
-*)
-
-theory Instructions imports Main begin
-
-datatype Size = Byte | Word | Long
-
-datatype DataRegister
-              = RegD0
-              | RegD1
-              | RegD2
-              | RegD3
-              | RegD4
-              | RegD5
-              | RegD6
-              | RegD7
-
-datatype AddressRegister
-              = RegA0
-              | RegA1
-              | RegA2
-              | RegA3
-              | RegA4
-              | RegA5
-              | RegA6
-              | RegA7
-
-datatype DataOrAddressRegister
-              = data DataRegister
-              | address AddressRegister
-
-datatype Condition
-              = Hi
-              | Ls
-              | Cc
-              | Cs
-              | Ne
-              | Eq
-              | Vc
-              | Vs
-              | Pl
-              | Mi
-              | Ge
-              | Lt
-              | Gt
-              | Le
-
-datatype AddressingMode
-        = immediate nat
-        | direct DataOrAddressRegister
-        | indirect AddressRegister
-        | postinc AddressRegister
-        | predec AddressRegister
-        | indirectdisp nat AddressRegister
-        | indirectindex nat AddressRegister DataOrAddressRegister Size
-        | absolute nat
-        | pcdisp nat
-        | pcindex nat DataOrAddressRegister Size
-
-datatype M68kInstruction
-    = ABCD AddressingMode AddressingMode
-    | ADD Size AddressingMode AddressingMode
-    | ADDA Size AddressingMode AddressRegister
-    | ADDI Size nat AddressingMode
-    | ADDQ Size nat AddressingMode
-    | ADDX Size AddressingMode AddressingMode
-    | AND Size AddressingMode AddressingMode
-    | ANDI Size nat AddressingMode
-    | ANDItoCCR nat
-    | ANDItoSR nat
-    | ASL Size AddressingMode DataRegister
-    | ASLW AddressingMode
-    | ASR Size AddressingMode DataRegister
-    | ASRW AddressingMode
-    | Bcc Condition Size nat
-    | BTST Size AddressingMode AddressingMode
-    | BCHG Size AddressingMode AddressingMode
-    | BCLR Size AddressingMode AddressingMode
-    | BSET Size AddressingMode AddressingMode
-    | BRA Size nat
-    | BSR Size nat
-    | CHK AddressingMode DataRegister
-    | CLR Size AddressingMode
-    | CMP Size AddressingMode DataRegister
-    | CMPA Size AddressingMode AddressRegister
-    | CMPI Size nat AddressingMode
-    | CMPM Size AddressRegister AddressRegister
-    | DBT DataRegister nat
-    | DBF DataRegister nat
-    | DBcc Condition DataRegister nat
-    | DIVS AddressingMode DataRegister
-    | DIVU AddressingMode DataRegister
-    | EOR Size DataRegister AddressingMode
-    | EORI Size nat AddressingMode
-    | EORItoCCR nat
-    | EORItoSR nat
-    | EXG DataOrAddressRegister DataOrAddressRegister
-    | EXT Size DataRegister
-    | ILLEGAL
-    | JMP AddressingMode
-    | JSR AddressingMode
-    | LEA AddressingMode AddressRegister
-    | LINK AddressRegister nat
-    | LSL Size AddressingMode DataRegister
-    | LSLW AddressingMode
-    | LSR Size AddressingMode DataRegister
-    | LSRW AddressingMode
-    | MOVE Size AddressingMode AddressingMode
-    | MOVEtoCCR AddressingMode
-    | MOVEtoSR AddressingMode
-    | MOVEfromSR AddressingMode
-    | MOVEtoUSP AddressingMode
-    | MOVEfromUSP AddressingMode
-    | MOVEA Size AddressingMode AddressRegister
-    | MOVEMto Size AddressingMode "DataOrAddressRegister list"
-    | MOVEMfrom Size "DataOrAddressRegister list" AddressingMode
-    | MOVEP Size AddressingMode AddressingMode
-    | MOVEQ nat DataRegister
-    | MULS AddressingMode DataRegister
-    | MULU AddressingMode DataRegister
-    | NBCD AddressingMode
-    | NEG Size AddressingMode
-    | NEGX Size AddressingMode
-    | NOP
-    | NOT Size AddressingMode
-    | OR Size AddressingMode AddressingMode
-    | ORI Size nat AddressingMode
-    | ORItoCCR nat
-    | ORItoSR nat
-    | PEA AddressingMode
-    | RESET
-    | ROL Size AddressingMode DataRegister
-    | ROLW AddressingMode
-    | ROR Size AddressingMode DataRegister
-    | RORW AddressingMode
-    | ROXL Size AddressingMode DataRegister
-    | ROXLW AddressingMode
-    | ROXR Size AddressingMode DataRegister
-    | ROXRW AddressingMode
-    | RTE
-    | RTR
-    | RTS
-    | SBCD AddressingMode AddressingMode
-    | ST AddressingMode
-    | SF AddressingMode
-    | Scc Condition AddressingMode
-    | STOP nat
-    | SUB Size AddressingMode AddressingMode
-    | SUBA Size AddressingMode AddressingMode
-    | SUBI Size nat AddressingMode
-    | SUBQ Size nat AddressingMode
-    | SUBX Size AddressingMode AddressingMode
-    | SWAP DataRegister
-    | TAS AddressingMode
-    | TRAP nat
-    | TRAPV
-    | TST Size AddressingMode
-    | UNLK AddressRegister
-
-end
--- a/Admin/Benchmarks/HOL-datatype/ROOT.ML	Thu Dec 15 09:13:23 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,15 +0,0 @@
-(*  Title:      Admin/Benchmarks/HOL-datatype/ROOT.ML
-
-Some rather large datatype examples (from John Harrison).
-*)
-
-val tests = ["Brackin", "Instructions", "SML", "Verilog"];
-
-Toplevel.timing := true;
-
-warning "\nset quick_and_dirty\n"; quick_and_dirty := true;
-use_thys tests;
-
-warning "\nreset quick_and_dirty\n"; quick_and_dirty := false;
-List.app Thy_Info.remove_thy tests;
-use_thys tests;
--- a/Admin/Benchmarks/HOL-datatype/SML.thy	Thu Dec 15 09:13:23 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,91 +0,0 @@
-(*  Title:      Admin/Benchmarks/HOL-datatype/SML.thy
-
-Example from Myra: part of the syntax of SML.
-*)
-
-theory SML imports Main begin
-
-datatype
-  string = EMPTY_STRING | CONS_STRING nat string
-
-datatype
-   strid = STRID string
-
-datatype
-   var = VAR string
-
-datatype
-   con = CON string
-
-datatype
-   scon = SCINT nat | SCSTR string
-
-datatype
-   excon = EXCON string
-
-datatype
-   label = LABEL string
-
-datatype
-  'a nonemptylist = Head_and_tail 'a "'a list"
-
-datatype
-  'a long = BASE 'a | QUALIFIED strid "'a long"
-
-datatype
-   atpat_e = WILDCARDatpat_e
-           | SCONatpat_e scon
-           | VARatpat_e var
-           | CONatpat_e "con long"
-           | EXCONatpat_e "excon long"
-           | RECORDatpat_e "patrow_e option"
-           | PARatpat_e pat_e
-and
-   patrow_e = DOTDOTDOT_e
-            | PATROW_e label pat_e "patrow_e option"
-and
-   pat_e = ATPATpat_e atpat_e
-         | CONpat_e "con long" atpat_e
-         | EXCONpat_e "excon long" atpat_e
-         | LAYEREDpat_e var pat_e
-and
-   conbind_e = CONBIND_e con "conbind_e option"
-and
-   datbind_e = DATBIND_e conbind_e "datbind_e option"
-and
-   exbind_e = EXBIND1_e excon "exbind_e option"
-            | EXBIND2_e excon "excon long" "exbind_e option"
-and
-   atexp_e = SCONatexp_e scon
-           | VARatexp_e "var long"
-           | CONatexp_e "con long"
-           | EXCONatexp_e "excon long"
-           | RECORDatexp_e "exprow_e option"
-           | LETatexp_e dec_e exp_e
-           | PARatexp_e exp_e
-and
-   exprow_e = EXPROW_e label exp_e "exprow_e option"
-and
-   exp_e = ATEXPexp_e atexp_e
-         | APPexp_e exp_e atexp_e
-         | HANDLEexp_e exp_e match_e
-         | RAISEexp_e exp_e
-         | FNexp_e match_e
-and
-   match_e = MATCH_e mrule_e "match_e option"
-and
-   mrule_e = MRULE_e pat_e exp_e
-and
-   dec_e = VALdec_e valbind_e
-         | DATATYPEdec_e datbind_e
-         | ABSTYPEdec_e datbind_e dec_e
-         | EXCEPTdec_e exbind_e
-         | LOCALdec_e dec_e dec_e
-         | OPENdec_e "strid long nonemptylist"
-         | EMPTYdec_e
-         | SEQdec_e dec_e dec_e
-and
-   valbind_e = PLAINvalbind_e pat_e exp_e "valbind_e option"
-             | RECvalbind_e valbind_e
-
-end
--- a/Admin/Benchmarks/HOL-datatype/Verilog.thy	Thu Dec 15 09:13:23 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,138 +0,0 @@
-(*  Title:      Admin/Benchmarks/HOL-datatype/Verilog.thy
-
-Example from Daryl: a Verilog grammar.
-*)
-
-theory Verilog imports Main begin
-
-datatype
-  Source_text
-     = module string "string list" "Module_item list"
-     | Source_textMeta string
-and
-  Module_item
-     = "declaration" Declaration
-     | initial Statement
-     | always Statement
-     | assign Lvalue Expression
-     | Module_itemMeta string
-and
-  Declaration
-     = reg_declaration "Range option" "string list"
-     | net_declaration "Range option" "string list"
-     | input_declaration "Range option" "string list"
-     | output_declaration "Range option" "string list"
-     | DeclarationMeta string
-and
-  Range = range Expression Expression | RangeMeta string
-and
-  Statement
-     = clock_statement Clock Statement_or_null
-     | blocking_assignment Lvalue Expression
-     | non_blocking_assignment Lvalue Expression
-     | conditional_statement
-          Expression Statement_or_null "Statement_or_null option"
-     | case_statement Expression "Case_item list"
-     | while_loop Expression Statement
-     | repeat_loop Expression Statement
-     | for_loop
-          Lvalue Expression Expression Lvalue Expression Statement
-     | forever_loop Statement
-     | disable string
-     | seq_block "string option" "Statement list"
-     | StatementMeta string
-and
-  Statement_or_null
-     = statement Statement | null_statement | Statement_or_nullMeta string
-and
-  Clock
-     = posedge string
-     | negedge string
-     | clock string
-     | ClockMeta string
-and
-  Case_item
-     = case_item "Expression list" Statement_or_null
-     | default_case_item Statement_or_null
-     | Case_itemMeta string
-and
-  Expression
-     = plus Expression Expression
-     | minus Expression Expression
-     | lshift Expression Expression
-     | rshift Expression Expression
-     | lt Expression Expression
-     | leq Expression Expression
-     | gt Expression Expression
-     | geq Expression Expression
-     | logeq Expression Expression
-     | logneq Expression Expression
-     | caseeq Expression Expression
-     | caseneq Expression Expression
-     | bitand Expression Expression
-     | bitxor Expression Expression
-     | bitor Expression Expression
-     | logand Expression Expression
-     | logor Expression Expression
-     | conditional Expression Expression Expression
-     | positive Primary
-     | negative Primary
-     | lognot Primary
-     | bitnot Primary
-     | reducand Primary
-     | reducxor Primary
-     | reducor Primary
-     | reducnand Primary
-     | reducxnor Primary
-     | reducnor Primary
-     | primary Primary
-     | ExpressionMeta string
-and
-  Primary
-     = primary_number Number
-     | primary_IDENTIFIER string
-     | primary_bit_select string Expression
-     | primary_part_select string Expression Expression
-     | primary_gen_bit_select Expression Expression
-     | primary_gen_part_select Expression Expression Expression
-     | primary_concatenation Concatenation
-     | primary_multiple_concatenation Multiple_concatenation
-     | brackets Expression
-     | PrimaryMeta string
-and
-  Lvalue
-     = lvalue string
-     | lvalue_bit_select string Expression
-     | lvalue_part_select string Expression Expression
-     | lvalue_concatenation Concatenation
-     | LvalueMeta string
-and
-  Number
-     = decimal string
-     | based "string option" string
-     | NumberMeta string
-and
-  Concatenation
-     = concatenation "Expression list" | ConcatenationMeta string
-and
-  Multiple_concatenation
-     = multiple_concatenation Expression "Expression list"
-     | Multiple_concatenationMeta string
-and
-  meta
-     = Meta_Source_text Source_text
-     | Meta_Module_item Module_item
-     | Meta_Declaration Declaration
-     | Meta_Range Range
-     | Meta_Statement Statement
-     | Meta_Statement_or_null Statement_or_null
-     | Meta_Clock Clock
-     | Meta_Case_item Case_item
-     | Meta_Expression Expression
-     | Meta_Primary Primary
-     | Meta_Lvalue Lvalue
-     | Meta_Number Number
-     | Meta_Concatenation Concatenation
-     | Meta_Multiple_concatenation Multiple_concatenation
-
-end
--- a/Admin/Benchmarks/HOL-record/ROOT.ML	Thu Dec 15 09:13:23 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,14 +0,0 @@
-(*  Title:      Admin/Benchmarks/HOL-record/ROOT.ML
-
-Some benchmark on large record.
-*)
-
-val tests = ["Record_Benchmark"];
-
-Toplevel.timing := true;
-
-warning "\nset quick_and_dirty\n"; quick_and_dirty := true;
-use_thys tests;
-
-warning "\nreset quick_and_dirty\n"; quick_and_dirty := false;
-use_thys tests;
--- a/Admin/Benchmarks/HOL-record/Record_Benchmark.thy	Thu Dec 15 09:13:23 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,390 +0,0 @@
-(*  Title:      Admin/Benchmarks/HOL-record/Record_Benchmark.thy
-    Author:     Norbert Schirmer, DFKI
-*)
-
-header {* Benchmark for large record *}
-
-theory Record_Benchmark
-imports Main
-begin
-
-declare [[record_timing]]
-
-record many_A =
-A000::nat
-A001::nat
-A002::nat
-A003::nat
-A004::nat
-A005::nat
-A006::nat
-A007::nat
-A008::nat
-A009::nat
-A010::nat
-A011::nat
-A012::nat
-A013::nat
-A014::nat
-A015::nat
-A016::nat
-A017::nat
-A018::nat
-A019::nat
-A020::nat
-A021::nat
-A022::nat
-A023::nat
-A024::nat
-A025::nat
-A026::nat
-A027::nat
-A028::nat
-A029::nat
-A030::nat
-A031::nat
-A032::nat
-A033::nat
-A034::nat
-A035::nat
-A036::nat
-A037::nat
-A038::nat
-A039::nat
-A040::nat
-A041::nat
-A042::nat
-A043::nat
-A044::nat
-A045::nat
-A046::nat
-A047::nat
-A048::nat
-A049::nat
-A050::nat
-A051::nat
-A052::nat
-A053::nat
-A054::nat
-A055::nat
-A056::nat
-A057::nat
-A058::nat
-A059::nat
-A060::nat
-A061::nat
-A062::nat
-A063::nat
-A064::nat
-A065::nat
-A066::nat
-A067::nat
-A068::nat
-A069::nat
-A070::nat
-A071::nat
-A072::nat
-A073::nat
-A074::nat
-A075::nat
-A076::nat
-A077::nat
-A078::nat
-A079::nat
-A080::nat
-A081::nat
-A082::nat
-A083::nat
-A084::nat
-A085::nat
-A086::nat
-A087::nat
-A088::nat
-A089::nat
-A090::nat
-A091::nat
-A092::nat
-A093::nat
-A094::nat
-A095::nat
-A096::nat
-A097::nat
-A098::nat
-A099::nat
-A100::nat
-A101::nat
-A102::nat
-A103::nat
-A104::nat
-A105::nat
-A106::nat
-A107::nat
-A108::nat
-A109::nat
-A110::nat
-A111::nat
-A112::nat
-A113::nat
-A114::nat
-A115::nat
-A116::nat
-A117::nat
-A118::nat
-A119::nat
-A120::nat
-A121::nat
-A122::nat
-A123::nat
-A124::nat
-A125::nat
-A126::nat
-A127::nat
-A128::nat
-A129::nat
-A130::nat
-A131::nat
-A132::nat
-A133::nat
-A134::nat
-A135::nat
-A136::nat
-A137::nat
-A138::nat
-A139::nat
-A140::nat
-A141::nat
-A142::nat
-A143::nat
-A144::nat
-A145::nat
-A146::nat
-A147::nat
-A148::nat
-A149::nat
-A150::nat
-A151::nat
-A152::nat
-A153::nat
-A154::nat
-A155::nat
-A156::nat
-A157::nat
-A158::nat
-A159::nat
-A160::nat
-A161::nat
-A162::nat
-A163::nat
-A164::nat
-A165::nat
-A166::nat
-A167::nat
-A168::nat
-A169::nat
-A170::nat
-A171::nat
-A172::nat
-A173::nat
-A174::nat
-A175::nat
-A176::nat
-A177::nat
-A178::nat
-A179::nat
-A180::nat
-A181::nat
-A182::nat
-A183::nat
-A184::nat
-A185::nat
-A186::nat
-A187::nat
-A188::nat
-A189::nat
-A190::nat
-A191::nat
-A192::nat
-A193::nat
-A194::nat
-A195::nat
-A196::nat
-A197::nat
-A198::nat
-A199::nat
-A200::nat
-A201::nat
-A202::nat
-A203::nat
-A204::nat
-A205::nat
-A206::nat
-A207::nat
-A208::nat
-A209::nat
-A210::nat
-A211::nat
-A212::nat
-A213::nat
-A214::nat
-A215::nat
-A216::nat
-A217::nat
-A218::nat
-A219::nat
-A220::nat
-A221::nat
-A222::nat
-A223::nat
-A224::nat
-A225::nat
-A226::nat
-A227::nat
-A228::nat
-A229::nat
-A230::nat
-A231::nat
-A232::nat
-A233::nat
-A234::nat
-A235::nat
-A236::nat
-A237::nat
-A238::nat
-A239::nat
-A240::nat
-A241::nat
-A242::nat
-A243::nat
-A244::nat
-A245::nat
-A246::nat
-A247::nat
-A248::nat
-A249::nat
-A250::nat
-A251::nat
-A252::nat
-A253::nat
-A254::nat
-A255::nat
-A256::nat
-A257::nat
-A258::nat
-A259::nat
-A260::nat
-A261::nat
-A262::nat
-A263::nat
-A264::nat
-A265::nat
-A266::nat
-A267::nat
-A268::nat
-A269::nat
-A270::nat
-A271::nat
-A272::nat
-A273::nat
-A274::nat
-A275::nat
-A276::nat
-A277::nat
-A278::nat
-A279::nat
-A280::nat
-A281::nat
-A282::nat
-A283::nat
-A284::nat
-A285::nat
-A286::nat
-A287::nat
-A288::nat
-A289::nat
-A290::nat
-A291::nat
-A292::nat
-A293::nat
-A294::nat
-A295::nat
-A296::nat
-A297::nat
-A298::nat
-A299::nat
-
-lemma "A155 (r\<lparr>A255:=x\<rparr>) = A155 r"
-  by simp
-
-lemma "A155 (r\<lparr>A255:=x,A253:=y,A255:=z \<rparr>) = A155 r"
-  by simp
-
-lemma "(r\<lparr>A255:=x,A253:=y,A255:=z \<rparr>) = r\<lparr>A253:=y,A255:=z\<rparr>"
-  by simp
-
-lemma "(r\<lparr>A255:=x,A253:=y,A255:=z \<rparr>) = r\<lparr>A253:=y,A255:=z\<rparr>"
-  apply (tactic {* simp_tac (HOL_basic_ss addsimprocs [Record.upd_simproc]) 1*})
-  done
-
-lemma "(\<forall>r. P (A155 r)) \<longrightarrow> (\<forall>x. P x)"
-  apply (tactic {* simp_tac (HOL_basic_ss addsimprocs [Record.split_simproc (K ~1)]) 1*})
-  apply simp
-  done
-
-lemma "(\<forall>r. P (A155 r)) \<longrightarrow> (\<forall>x. P x)"
-  apply (tactic {* Record.split_simp_tac [] (K ~1) 1*})
-  apply simp
-  done
-
-lemma "(\<exists>r. P (A155 r)) \<longrightarrow> (\<exists>x. P x)"
-  apply (tactic {* simp_tac (HOL_basic_ss addsimprocs [Record.split_simproc (K ~1)]) 1*})
-  apply simp
-  done
-
-lemma "(\<exists>r. P (A155 r)) \<longrightarrow> (\<exists>x. P x)"
-  apply (tactic {* Record.split_simp_tac [] (K ~1) 1*})
-  apply simp
-  done
-
-lemma "\<And>r. P (A155 r) \<Longrightarrow> (\<exists>x. P x)"
-  apply (tactic {* simp_tac (HOL_basic_ss addsimprocs [Record.split_simproc (K ~1)]) 1*})
-  apply auto
-  done
-
-lemma "\<And>r. P (A155 r) \<Longrightarrow> (\<exists>x. P x)"
-  apply (tactic {* Record.split_simp_tac [] (K ~1) 1*})
-  apply auto
-  done
-
-lemma "P (A155 r) \<Longrightarrow> (\<exists>x. P x)"
-  apply (tactic {* Record.split_simp_tac [] (K ~1) 1*})
-  apply auto
-  done
-
-lemma fixes r shows "P (A155 r) \<Longrightarrow> (\<exists>x. P x)"
-  apply (tactic {* Record.split_simp_tac [] (K ~1) 1*})
-  apply auto
-  done
-
-
-lemma True
-proof -
-  {
-    fix P r
-    assume pre: "P (A155 r)"
-    have "\<exists>x. P x"
-      using pre
-      apply -
-      apply (tactic {* Record.split_simp_tac [] (K ~1) 1*})
-      apply auto 
-      done
-  }
-  show ?thesis ..
-qed
-
-
-lemma "\<exists>r. A155 r = x"
-  apply (tactic {*simp_tac (HOL_basic_ss addsimprocs [Record.ex_sel_eq_simproc]) 1*})
-  done
-
-
-end
\ No newline at end of file
--- a/Admin/Benchmarks/IsaMakefile	Thu Dec 15 09:13:23 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,43 +0,0 @@
-
-## targets
-
-default: all
-images:
-test: HOL-datatype HOL-record
-all: images test
-
-
-## global settings
-
-SRC = $(ISABELLE_HOME)/src
-OUT = $(ISABELLE_OUTPUT)
-LOG = $(OUT)/log
-
-
-## HOL-datatype
-
-HOL:
-	@cd $(SRC)/HOL; $(ISABELLE_TOOL) make HOL
-
-
-HOL-datatype: HOL $(LOG)/HOL-datatype.gz
-
-$(LOG)/HOL-datatype.gz: $(OUT)/HOL HOL-datatype/ROOT.ML		\
-  HOL-datatype/Brackin.thy HOL-datatype/Instructions.thy	\
-  HOL-datatype/SML.thy HOL-datatype/Verilog.thy
-	@$(ISABELLE_TOOL) usedir -s datatype $(OUT)/HOL HOL-datatype
-
-
-## HOL-record
-
-HOL-record: HOL $(LOG)/HOL-record.gz
-
-$(LOG)/HOL-record.gz: $(OUT)/HOL HOL-record/ROOT.ML	\
-   HOL-record/Record_Benchmark.thy
-	@$(ISABELLE_TOOL) usedir -s record $(OUT)/HOL HOL-record
-
-
-## clean
-
-clean:
-	@rm -f $(LOG)/HOL-datatype.gz $(LOG)/HOL-record.gz
--- a/Admin/isatest/isatest-makeall	Thu Dec 15 09:13:23 2011 +0100
+++ b/Admin/isatest/isatest-makeall	Thu Dec 15 09:13:32 2011 +0100
@@ -46,6 +46,8 @@
 
 [ -d $DISTPREFIX ] || fail "$DISTPREFIX is not a directory."
 
+TARGETS=all
+
 # make file flags and nice setup for different target platforms
 case $HOSTNAME in
     atbroy51)
@@ -69,6 +71,7 @@
   
     macbroy2)
         MFLAGS="-k"
+        TARGETS=full
         NICE=""
         ;;
 
@@ -97,6 +100,12 @@
         NICE=""
         ;;
 
+    macbroy30)
+        MFLAGS="-k"
+        TARGETS=full
+        NICE=""
+        ;;
+
     *)
         MFLAGS="-k"
         # be nice by default
@@ -119,7 +128,7 @@
   TOOL="$ISABELLE_TOOL make $MFLAGS $TARGETS"
 else
   DIR="."
-  TOOL="$ISABELLE_TOOL makeall $MFLAGS all"
+  TOOL="$ISABELLE_TOOL makeall $MFLAGS $TARGETS"
 fi
 
 IDENT=$(cat "$DISTPREFIX/ISABELLE_IDENT")
--- a/NEWS	Thu Dec 15 09:13:23 2011 +0100
+++ b/NEWS	Thu Dec 15 09:13:32 2011 +0100
@@ -53,6 +53,10 @@
 
 *** HOL ***
 
+* Theory HOL/Library/AList has been renamed to AList_Impl. INCOMPABILITY.
+
+* 'datatype' specifications allow explicit sort constraints.
+
 * Theory HOL/Library/Diagonalize has been removed. INCOMPATIBILITY, use
 theory HOL/Library/Nat_Bijection instead.
 
--- a/doc-src/IsarRef/Thy/HOL_Specific.thy	Thu Dec 15 09:13:23 2011 +0100
+++ b/doc-src/IsarRef/Thy/HOL_Specific.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -693,7 +693,7 @@
     @@{command (HOL) rep_datatype} ('(' (@{syntax name} +) ')')? (@{syntax term} +)
     ;
 
-    spec: @{syntax typespec} @{syntax mixfix}? '=' (cons + '|')
+    spec: @{syntax typespec_sorts} @{syntax mixfix}? '=' (cons + '|')
     ;
     cons: @{syntax name} (@{syntax type} * ) @{syntax mixfix}?
   "}
--- a/doc-src/IsarRef/Thy/document/HOL_Specific.tex	Thu Dec 15 09:13:23 2011 +0100
+++ b/doc-src/IsarRef/Thy/document/HOL_Specific.tex	Thu Dec 15 09:13:32 2011 +0100
@@ -1036,7 +1036,7 @@
 \rail@endplus
 \rail@end
 \rail@begin{2}{\isa{spec}}
-\rail@nont{\hyperlink{syntax.typespec}{\mbox{\isa{typespec}}}}[]
+\rail@nont{\hyperlink{syntax.typespec-sorts}{\mbox{\isa{typespec{\isaliteral{5F}{\isacharunderscore}}sorts}}}}[]
 \rail@bar
 \rail@nextbar{1}
 \rail@nont{\hyperlink{syntax.mixfix}{\mbox{\isa{mixfix}}}}[]
--- a/doc-src/Sledgehammer/sledgehammer.tex	Thu Dec 15 09:13:23 2011 +0100
+++ b/doc-src/Sledgehammer/sledgehammer.tex	Thu Dec 15 09:13:32 2011 +0100
@@ -178,8 +178,8 @@
 
 in it.
 
-\item[\labelitemi] If you prefer to build E or SPASS yourself, or obtained a
-Vampire executable from somewhere (e.g., \url{http://www.vprover.org/}),
+\item[\labelitemi] If you prefer to build E or SPASS yourself, or found a
+Vampire executable somewhere (e.g., \url{http://www.vprover.org/}),
 set the environment variable \texttt{E\_HOME}, \texttt{SPASS\_HOME}, or
 \texttt{VAMPIRE\_HOME} to the directory that contains the \texttt{eproof},
 \texttt{SPASS}, or \texttt{vampire} executable. Sledgehammer has been tested
@@ -188,7 +188,7 @@
 reset to 0; hence the (new) Vampire versions 0.6, 1.0, and 1.8 are more recent
 than, say, Vampire 9.0 or 11.5.}%
 . Since the ATPs' output formats are neither documented nor stable, other
-versions of the ATPs might or might not work well with Sledgehammer. Ideally,
+versions of the ATPs might not work well with Sledgehammer. Ideally,
 also set \texttt{E\_VERSION}, \texttt{SPASS\_VERSION}, or
 \texttt{VAMPIRE\_VERSION} to the ATP's version number (e.g., ``1.4'').
 \end{enum}
@@ -220,20 +220,36 @@
 
 There are two main ways of installing SMT solvers locally.
 
+\sloppy
+
 \begin{enum}
 \item[\labelitemi] If you installed an official Isabelle package with everything
 inside, it should already include properly setup executables for CVC3 and Z3,
 ready to use.%
 \footnote{Yices's license prevents us from doing the same for this otherwise
 wonderful tool.}
-For Z3, you additionally need to set the environment variable
-\texttt{Z3\_NON\_COMMERCIAL} to ``yes'' to confirm that you are a noncommercial
-user.
+For Z3, you must also set the variable
+\texttt{Z3\_NON\_COMMERCIAL} to ``yes'' to confirm that you are a
+noncommercial user, either in the environment in which Isabelle is
+launched or in your
+\texttt{\char`\~/\$ISABELLE\_HOME\_USER/etc/settings} file.
 
-\item[\labelitemi] Otherwise, follow the instructions documented in the \emph{SMT}
-theory (\texttt{\$ISABELLE\_HOME/src/HOL/SMT.thy}).
+\item[\labelitemi] If you prefer to build CVC3 yourself, or found a
+Yices or Z3 executable somewhere (e.g.,
+\url{http://yices.csl.sri.com/download.shtml} or
+\url{http://research.microsoft.com/en-us/um/redmond/projects/z3/download.html}),
+set the environment variable \texttt{CVC3\_\allowbreak SOLVER},
+\texttt{YICES\_SOLVER}, or \texttt{Z3\_SOLVER} to the complete path of
+the executable, including the file name. Sledgehammer has been tested
+with CVC3 2.2, Yices 1.0.28, and Z3 3.0 to 3.2.
+Since the SMT solvers' output formats are somewhat unstable, other
+versions of the solvers might not work well with Sledgehammer. Ideally,
+also set \texttt{CVC3\_VERSION}, \texttt{YICES\_VERSION}, or
+\texttt{Z3\_VERSION} to the solver's version number (e.g., ``3.2'').
 \end{enum}
 
+\fussy
+
 \section{First Steps}
 \label{first-steps}
 
@@ -779,18 +795,20 @@
 \item[\labelitemi] \textbf{\textit{yices}:} Yices is an SMT solver developed at
 SRI \cite{yices}. To use Yices, set the environment variable
 \texttt{YICES\_SOLVER} to the complete path of the executable, including the
-file name. Sledgehammer has been tested with version 1.0.
+file name. Sledgehammer has been tested with version 1.0.28.
 
 \item[\labelitemi] \textbf{\textit{z3}:} Z3 is an SMT solver developed at
 Microsoft Research \cite{z3}. To use Z3, set the environment variable
 \texttt{Z3\_SOLVER} to the complete path of the executable, including the file
 name, and set \texttt{Z3\_NON\_COMMERCIAL} to ``yes'' to confirm that you are a
-noncommercial user. Sledgehammer has been tested with versions 2.7 to 2.18.
+noncommercial user. Sledgehammer has been tested with versions 3.0 to 3.2.
 
 \item[\labelitemi] \textbf{\textit{z3\_tptp}:} This version of Z3 pretends to be
 an ATP, exploiting Z3's support for the TPTP untyped and typed first-order
-formats (FOF and TFF0). It is included for experimental purposes. It requires
-version 3.0 or above.
+formats (FOF and TFF0). It is included for experimental purposes. It
+requires version 3.0 or above. To use it, set the environment variable
+\texttt{Z3\_HOME} to the directory that contains the \texttt{z3}
+executable.
 \end{enum}
 
 In addition, the following remote provers are supported:
--- a/src/CCL/IsaMakefile	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/CCL/IsaMakefile	Thu Dec 15 09:13:32 2011 +0100
@@ -8,6 +8,7 @@
 images: CCL
 test: CCL-ex
 all: images test
+full: all
 smlnj: all
 
 
--- a/src/CTT/IsaMakefile	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/CTT/IsaMakefile	Thu Dec 15 09:13:32 2011 +0100
@@ -8,6 +8,7 @@
 images: CTT
 test: CTT-ex
 all: images test
+full: all
 smlnj: all
 
 
--- a/src/Cube/IsaMakefile	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/Cube/IsaMakefile	Thu Dec 15 09:13:32 2011 +0100
@@ -8,6 +8,7 @@
 images:
 test: Pure-Cube
 all: images test
+full: all
 smlnj: all
 
 
--- a/src/FOL/IsaMakefile	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/FOL/IsaMakefile	Thu Dec 15 09:13:32 2011 +0100
@@ -8,6 +8,7 @@
 images: FOL
 test: FOL-ex
 all: images test
+full: all
 smlnj: all
 
 
--- a/src/FOLP/IsaMakefile	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/FOLP/IsaMakefile	Thu Dec 15 09:13:32 2011 +0100
@@ -8,6 +8,7 @@
 images: FOLP
 test: FOLP-ex
 all: images test
+full: all
 smlnj: all
 
 
--- a/src/HOL/ATP.thy	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/ATP.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -12,6 +12,7 @@
      "Tools/ATP/atp_util.ML"
      "Tools/ATP/atp_problem.ML"
      "Tools/ATP/atp_proof.ML"
+     "Tools/ATP/atp_redirect.ML"
      ("Tools/ATP/atp_translate.ML")
      ("Tools/ATP/atp_reconstruct.ML")
      ("Tools/ATP/atp_systems.ML")
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Datatype_Benchmark/Brackin.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -0,0 +1,43 @@
+(*  Title:      HOL/Datatype_Benchmark/Brackin.thy
+
+A couple from Steve Brackin's work.
+*)
+
+theory Brackin imports Main begin
+
+datatype   T = X1 | X2 | X3 | X4 | X5 | X6 | X7 | X8 | X9 | X10 | X11 |
+                X12 | X13 | X14 | X15 | X16 | X17 | X18 | X19 | X20 | X21 |
+                X22 | X23 | X24 | X25 | X26 | X27 | X28 | X29 | X30 | X31 |
+                X32 | X33 | X34
+
+datatype  
+  ('a, 'b, 'c) TY1 =
+      NoF__
+    | Fk__ 'a "('a, 'b, 'c) TY2"
+and
+  ('a, 'b, 'c) TY2 =
+      Ta__ bool
+    | Td__ bool
+    | Tf__ "('a, 'b, 'c) TY1"
+    | Tk__ bool
+    | Tp__ bool
+    | App__ 'a "('a, 'b, 'c) TY1" "('a, 'b, 'c) TY2" "('a, 'b, 'c) TY3"
+    | Pair__ "('a, 'b, 'c) TY2" "('a, 'b, 'c) TY2"
+and
+  ('a, 'b, 'c) TY3 =
+      NoS__
+    | Fresh__ "('a, 'b, 'c) TY2"
+    | Trustworthy__ 'a
+    | PrivateKey__ 'a 'b 'c
+    | PublicKey__ 'a 'b 'c
+    | Conveyed__ 'a "('a, 'b, 'c) TY2"
+    | Possesses__ 'a "('a, 'b, 'c) TY2"
+    | Received__ 'a "('a, 'b, 'c) TY2"
+    | Recognizes__ 'a "('a, 'b, 'c) TY2"
+    | NeverMalFromSelf__ 'a 'b "('a, 'b, 'c) TY2"
+    | Sends__ 'a "('a, 'b, 'c) TY2" 'b
+    | SharedSecret__ 'a "('a, 'b, 'c) TY2" 'b
+    | Believes__ 'a "('a, 'b, 'c) TY3"
+    | And__ "('a, 'b, 'c) TY3" "('a, 'b, 'c) TY3"
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Datatype_Benchmark/Instructions.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -0,0 +1,162 @@
+(*  Title:      HOL/Datatype_Benchmark/Instructions.thy
+
+Example from Konrad: 68000 instruction set.
+*)
+
+theory Instructions imports Main begin
+
+datatype Size = Byte | Word | Long
+
+datatype DataRegister
+              = RegD0
+              | RegD1
+              | RegD2
+              | RegD3
+              | RegD4
+              | RegD5
+              | RegD6
+              | RegD7
+
+datatype AddressRegister
+              = RegA0
+              | RegA1
+              | RegA2
+              | RegA3
+              | RegA4
+              | RegA5
+              | RegA6
+              | RegA7
+
+datatype DataOrAddressRegister
+              = data DataRegister
+              | address AddressRegister
+
+datatype Condition
+              = Hi
+              | Ls
+              | Cc
+              | Cs
+              | Ne
+              | Eq
+              | Vc
+              | Vs
+              | Pl
+              | Mi
+              | Ge
+              | Lt
+              | Gt
+              | Le
+
+datatype AddressingMode
+        = immediate nat
+        | direct DataOrAddressRegister
+        | indirect AddressRegister
+        | postinc AddressRegister
+        | predec AddressRegister
+        | indirectdisp nat AddressRegister
+        | indirectindex nat AddressRegister DataOrAddressRegister Size
+        | absolute nat
+        | pcdisp nat
+        | pcindex nat DataOrAddressRegister Size
+
+datatype M68kInstruction
+    = ABCD AddressingMode AddressingMode
+    | ADD Size AddressingMode AddressingMode
+    | ADDA Size AddressingMode AddressRegister
+    | ADDI Size nat AddressingMode
+    | ADDQ Size nat AddressingMode
+    | ADDX Size AddressingMode AddressingMode
+    | AND Size AddressingMode AddressingMode
+    | ANDI Size nat AddressingMode
+    | ANDItoCCR nat
+    | ANDItoSR nat
+    | ASL Size AddressingMode DataRegister
+    | ASLW AddressingMode
+    | ASR Size AddressingMode DataRegister
+    | ASRW AddressingMode
+    | Bcc Condition Size nat
+    | BTST Size AddressingMode AddressingMode
+    | BCHG Size AddressingMode AddressingMode
+    | BCLR Size AddressingMode AddressingMode
+    | BSET Size AddressingMode AddressingMode
+    | BRA Size nat
+    | BSR Size nat
+    | CHK AddressingMode DataRegister
+    | CLR Size AddressingMode
+    | CMP Size AddressingMode DataRegister
+    | CMPA Size AddressingMode AddressRegister
+    | CMPI Size nat AddressingMode
+    | CMPM Size AddressRegister AddressRegister
+    | DBT DataRegister nat
+    | DBF DataRegister nat
+    | DBcc Condition DataRegister nat
+    | DIVS AddressingMode DataRegister
+    | DIVU AddressingMode DataRegister
+    | EOR Size DataRegister AddressingMode
+    | EORI Size nat AddressingMode
+    | EORItoCCR nat
+    | EORItoSR nat
+    | EXG DataOrAddressRegister DataOrAddressRegister
+    | EXT Size DataRegister
+    | ILLEGAL
+    | JMP AddressingMode
+    | JSR AddressingMode
+    | LEA AddressingMode AddressRegister
+    | LINK AddressRegister nat
+    | LSL Size AddressingMode DataRegister
+    | LSLW AddressingMode
+    | LSR Size AddressingMode DataRegister
+    | LSRW AddressingMode
+    | MOVE Size AddressingMode AddressingMode
+    | MOVEtoCCR AddressingMode
+    | MOVEtoSR AddressingMode
+    | MOVEfromSR AddressingMode
+    | MOVEtoUSP AddressingMode
+    | MOVEfromUSP AddressingMode
+    | MOVEA Size AddressingMode AddressRegister
+    | MOVEMto Size AddressingMode "DataOrAddressRegister list"
+    | MOVEMfrom Size "DataOrAddressRegister list" AddressingMode
+    | MOVEP Size AddressingMode AddressingMode
+    | MOVEQ nat DataRegister
+    | MULS AddressingMode DataRegister
+    | MULU AddressingMode DataRegister
+    | NBCD AddressingMode
+    | NEG Size AddressingMode
+    | NEGX Size AddressingMode
+    | NOP
+    | NOT Size AddressingMode
+    | OR Size AddressingMode AddressingMode
+    | ORI Size nat AddressingMode
+    | ORItoCCR nat
+    | ORItoSR nat
+    | PEA AddressingMode
+    | RESET
+    | ROL Size AddressingMode DataRegister
+    | ROLW AddressingMode
+    | ROR Size AddressingMode DataRegister
+    | RORW AddressingMode
+    | ROXL Size AddressingMode DataRegister
+    | ROXLW AddressingMode
+    | ROXR Size AddressingMode DataRegister
+    | ROXRW AddressingMode
+    | RTE
+    | RTR
+    | RTS
+    | SBCD AddressingMode AddressingMode
+    | ST AddressingMode
+    | SF AddressingMode
+    | Scc Condition AddressingMode
+    | STOP nat
+    | SUB Size AddressingMode AddressingMode
+    | SUBA Size AddressingMode AddressingMode
+    | SUBI Size nat AddressingMode
+    | SUBQ Size nat AddressingMode
+    | SUBX Size AddressingMode AddressingMode
+    | SWAP DataRegister
+    | TAS AddressingMode
+    | TRAP nat
+    | TRAPV
+    | TST Size AddressingMode
+    | UNLK AddressRegister
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Datatype_Benchmark/ROOT.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -0,0 +1,8 @@
+(*  Title:      HOL/Datatype_Benchmark/ROOT.ML
+
+Some rather large datatype examples (from John Harrison).
+*)
+
+Toplevel.timing := true;
+
+use_thys ["Brackin", "Instructions", "SML", "Verilog"];
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Datatype_Benchmark/SML.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -0,0 +1,91 @@
+(*  Title:      HOL/Datatype_Benchmark/SML.thy
+
+Example from Myra: part of the syntax of SML.
+*)
+
+theory SML imports Main begin
+
+datatype
+  string = EMPTY_STRING | CONS_STRING nat string
+
+datatype
+   strid = STRID string
+
+datatype
+   var = VAR string
+
+datatype
+   con = CON string
+
+datatype
+   scon = SCINT nat | SCSTR string
+
+datatype
+   excon = EXCON string
+
+datatype
+   label = LABEL string
+
+datatype
+  'a nonemptylist = Head_and_tail 'a "'a list"
+
+datatype
+  'a long = BASE 'a | QUALIFIED strid "'a long"
+
+datatype
+   atpat_e = WILDCARDatpat_e
+           | SCONatpat_e scon
+           | VARatpat_e var
+           | CONatpat_e "con long"
+           | EXCONatpat_e "excon long"
+           | RECORDatpat_e "patrow_e option"
+           | PARatpat_e pat_e
+and
+   patrow_e = DOTDOTDOT_e
+            | PATROW_e label pat_e "patrow_e option"
+and
+   pat_e = ATPATpat_e atpat_e
+         | CONpat_e "con long" atpat_e
+         | EXCONpat_e "excon long" atpat_e
+         | LAYEREDpat_e var pat_e
+and
+   conbind_e = CONBIND_e con "conbind_e option"
+and
+   datbind_e = DATBIND_e conbind_e "datbind_e option"
+and
+   exbind_e = EXBIND1_e excon "exbind_e option"
+            | EXBIND2_e excon "excon long" "exbind_e option"
+and
+   atexp_e = SCONatexp_e scon
+           | VARatexp_e "var long"
+           | CONatexp_e "con long"
+           | EXCONatexp_e "excon long"
+           | RECORDatexp_e "exprow_e option"
+           | LETatexp_e dec_e exp_e
+           | PARatexp_e exp_e
+and
+   exprow_e = EXPROW_e label exp_e "exprow_e option"
+and
+   exp_e = ATEXPexp_e atexp_e
+         | APPexp_e exp_e atexp_e
+         | HANDLEexp_e exp_e match_e
+         | RAISEexp_e exp_e
+         | FNexp_e match_e
+and
+   match_e = MATCH_e mrule_e "match_e option"
+and
+   mrule_e = MRULE_e pat_e exp_e
+and
+   dec_e = VALdec_e valbind_e
+         | DATATYPEdec_e datbind_e
+         | ABSTYPEdec_e datbind_e dec_e
+         | EXCEPTdec_e exbind_e
+         | LOCALdec_e dec_e dec_e
+         | OPENdec_e "strid long nonemptylist"
+         | EMPTYdec_e
+         | SEQdec_e dec_e dec_e
+and
+   valbind_e = PLAINvalbind_e pat_e exp_e "valbind_e option"
+             | RECvalbind_e valbind_e
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Datatype_Benchmark/Verilog.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -0,0 +1,138 @@
+(*  Title:      HOL/Datatype_Benchmark/Verilog.thy
+
+Example from Daryl: a Verilog grammar.
+*)
+
+theory Verilog imports Main begin
+
+datatype
+  Source_text
+     = module string "string list" "Module_item list"
+     | Source_textMeta string
+and
+  Module_item
+     = "declaration" Declaration
+     | initial Statement
+     | always Statement
+     | assign Lvalue Expression
+     | Module_itemMeta string
+and
+  Declaration
+     = reg_declaration "Range option" "string list"
+     | net_declaration "Range option" "string list"
+     | input_declaration "Range option" "string list"
+     | output_declaration "Range option" "string list"
+     | DeclarationMeta string
+and
+  Range = range Expression Expression | RangeMeta string
+and
+  Statement
+     = clock_statement Clock Statement_or_null
+     | blocking_assignment Lvalue Expression
+     | non_blocking_assignment Lvalue Expression
+     | conditional_statement
+          Expression Statement_or_null "Statement_or_null option"
+     | case_statement Expression "Case_item list"
+     | while_loop Expression Statement
+     | repeat_loop Expression Statement
+     | for_loop
+          Lvalue Expression Expression Lvalue Expression Statement
+     | forever_loop Statement
+     | disable string
+     | seq_block "string option" "Statement list"
+     | StatementMeta string
+and
+  Statement_or_null
+     = statement Statement | null_statement | Statement_or_nullMeta string
+and
+  Clock
+     = posedge string
+     | negedge string
+     | clock string
+     | ClockMeta string
+and
+  Case_item
+     = case_item "Expression list" Statement_or_null
+     | default_case_item Statement_or_null
+     | Case_itemMeta string
+and
+  Expression
+     = plus Expression Expression
+     | minus Expression Expression
+     | lshift Expression Expression
+     | rshift Expression Expression
+     | lt Expression Expression
+     | leq Expression Expression
+     | gt Expression Expression
+     | geq Expression Expression
+     | logeq Expression Expression
+     | logneq Expression Expression
+     | caseeq Expression Expression
+     | caseneq Expression Expression
+     | bitand Expression Expression
+     | bitxor Expression Expression
+     | bitor Expression Expression
+     | logand Expression Expression
+     | logor Expression Expression
+     | conditional Expression Expression Expression
+     | positive Primary
+     | negative Primary
+     | lognot Primary
+     | bitnot Primary
+     | reducand Primary
+     | reducxor Primary
+     | reducor Primary
+     | reducnand Primary
+     | reducxnor Primary
+     | reducnor Primary
+     | primary Primary
+     | ExpressionMeta string
+and
+  Primary
+     = primary_number Number
+     | primary_IDENTIFIER string
+     | primary_bit_select string Expression
+     | primary_part_select string Expression Expression
+     | primary_gen_bit_select Expression Expression
+     | primary_gen_part_select Expression Expression Expression
+     | primary_concatenation Concatenation
+     | primary_multiple_concatenation Multiple_concatenation
+     | brackets Expression
+     | PrimaryMeta string
+and
+  Lvalue
+     = lvalue string
+     | lvalue_bit_select string Expression
+     | lvalue_part_select string Expression Expression
+     | lvalue_concatenation Concatenation
+     | LvalueMeta string
+and
+  Number
+     = decimal string
+     | based "string option" string
+     | NumberMeta string
+and
+  Concatenation
+     = concatenation "Expression list" | ConcatenationMeta string
+and
+  Multiple_concatenation
+     = multiple_concatenation Expression "Expression list"
+     | Multiple_concatenationMeta string
+and
+  meta
+     = Meta_Source_text Source_text
+     | Meta_Module_item Module_item
+     | Meta_Declaration Declaration
+     | Meta_Range Range
+     | Meta_Statement Statement
+     | Meta_Statement_or_null Statement_or_null
+     | Meta_Clock Clock
+     | Meta_Case_item Case_item
+     | Meta_Expression Expression
+     | Meta_Primary Primary
+     | Meta_Lvalue Lvalue
+     | Meta_Number Number
+     | Meta_Concatenation Concatenation
+     | Meta_Multiple_concatenation Multiple_concatenation
+
+end
--- a/src/HOL/HOLCF/IsaMakefile	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/HOLCF/IsaMakefile	Thu Dec 15 09:13:32 2011 +0100
@@ -17,6 +17,7 @@
   IOA-Storage \
   IOA-ex
 all: images test
+full: all
 
 
 ## global settings
--- a/src/HOL/IsaMakefile	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/IsaMakefile	Thu Dec 15 09:13:32 2011 +0100
@@ -81,6 +81,10 @@
   HOL-ZF
     # ^ this is the sort position
 
+benchmark: \
+  HOL-Datatype_Benchmark \
+  HOL-Record_Benchmark
+
 images-no-smlnj: \
   HOL-Probability
 
@@ -91,8 +95,10 @@
   HOL-Nominal-Examples
 
 all: test-no-smlnj test images-no-smlnj images
+full: all benchmark
 smlnj: test images
 
+
 ## global settings
 
 SRC = $(ISABELLE_HOME)/src
@@ -200,6 +206,7 @@
   Tools/ATP/atp_problem.ML \
   Tools/ATP/atp_proof.ML \
   Tools/ATP/atp_reconstruct.ML \
+  Tools/ATP/atp_redirect.ML \
   Tools/ATP/atp_systems.ML \
   Tools/ATP/atp_translate.ML \
   Tools/ATP/atp_util.ML \
@@ -429,7 +436,7 @@
 $(OUT)/HOL-Library: $(OUT)/HOL Library/ROOT.ML				\
   $(SRC)/HOL/Tools/float_arith.ML $(SRC)/Tools/float.ML			\
   Library/Abstract_Rat.thy $(SRC)/Tools/Adhoc_Overloading.thy		\
-  Library/AList.thy Library/AList_Mapping.thy 				\
+  Library/AList_Impl.thy Library/AList_Mapping.thy 			\
   Library/BigO.thy Library/Binomial.thy 				\
   Library/Bit.thy Library/Boolean_Algebra.thy Library/Cardinality.thy	\
   Library/Char_nat.thy Library/Code_Char.thy Library/Code_Char_chr.thy	\
@@ -1772,6 +1779,28 @@
 	@cd HOLCF/IOA; $(ISABELLE_TOOL) usedir $(OUT)/IOA ex
 
 
+
+## HOL-Datatype_Benchmark
+
+HOL-Datatype_Benchmark: HOL $(LOG)/HOL-Datatype_Benchmark.gz
+
+$(LOG)/HOL-Datatype_Benchmark.gz: $(OUT)/HOL				\
+  Datatype_Benchmark/ROOT.ML Datatype_Benchmark/Brackin.thy		\
+  Datatype_Benchmark/Instructions.thy Datatype_Benchmark/SML.thy	\
+  Datatype_Benchmark/Verilog.thy
+	@$(ISABELLE_TOOL) usedir $(OUT)/HOL Datatype_Benchmark
+
+
+## HOL-Record_Benchmark
+
+HOL-Record_Benchmark: HOL $(LOG)/HOL-Record_Benchmark.gz
+
+$(LOG)/HOL-Record_Benchmark.gz: $(OUT)/HOL Record_Benchmark/ROOT.ML	\
+   Record_Benchmark/Record_Benchmark.thy
+	@$(ISABELLE_TOOL) usedir $(OUT)/HOL Record_Benchmark
+
+
+
 ## clean
 
 clean:
@@ -1799,23 +1828,25 @@
 		$(LOG)/HOL-Proofs.gz $(LOG)/HOL-Proofs-ex.gz		\
 		$(LOG)/HOL-Proofs-Extraction.gz				\
 		$(LOG)/HOL-Proofs-Lambda.gz $(LOG)/HOL-SET_Protocol.gz	\
-		$(LOG)/HOL-Word-SMT_Examples.gz				\
-		$(LOG)/HOL-SPARK.gz $(LOG)/HOL-SPARK-Examples.gz	\
-		$(LOG)/HOL-SPARK-Manual.gz				\
-		$(LOG)/HOL-Statespace.gz				\
+		$(LOG)/HOL-Word-SMT_Examples.gz $(LOG)/HOL-SPARK.gz	\
+		$(LOG)/HOL-SPARK-Examples.gz				\
+		$(LOG)/HOL-SPARK-Manual.gz $(LOG)/HOL-Statespace.gz	\
 		$(LOG)/HOL-UNITY.gz $(LOG)/HOL-Unix.gz			\
 		$(LOG)/HOL-Word-Examples.gz $(LOG)/HOL-Word.gz		\
 		$(LOG)/HOL-ZF.gz $(LOG)/HOL-ex.gz $(LOG)/HOL.gz		\
 		$(LOG)/HOL4.gz $(LOG)/TLA-Buffer.gz $(LOG)/TLA-Inc.gz	\
 		$(LOG)/TLA-Memory.gz $(LOG)/TLA.gz $(OUT)/HOL		\
 		$(OUT)/HOL-Algebra $(OUT)/HOL-Base $(OUT)/HOL-Boogie	\
-		$(OUT)/HOL-IMP $(OUT)/HOL-Main $(OUT)/HOL-Multivariate_Analysis	\
-		$(OUT)/HOL-NSA $(OUT)/HOL-Nominal $(OUT)/HOL-Plain	\
+		$(OUT)/HOL-IMP $(OUT)/HOL-Main				\
+		$(OUT)/HOL-Multivariate_Analysis $(OUT)/HOL-NSA		\
+		$(OUT)/HOL-Nominal $(OUT)/HOL-Plain			\
 		$(OUT)/HOL-Probability $(OUT)/HOL-Proofs		\
-		$(OUT)/HOL-SPARK					\
-		$(OUT)/HOL-Word $(OUT)/HOL4 $(OUT)/TLA			\
-		$(OUT)/HOLCF $(LOG)/HOLCF.gz $(LOG)/HOLCF-IMP.gz	\
-		$(LOG)/HOLCF-ex.gz $(LOG)/HOLCF-FOCUS.gz $(OUT)/IOA	\
-		$(LOG)/IOA.gz $(LOG)/IOA-ABP.gz $(LOG)/IOA-NTP.gz	\
+		$(OUT)/HOL-SPARK $(OUT)/HOL-Word $(OUT)/HOL4		\
+		$(OUT)/TLA $(OUT)/HOLCF $(LOG)/HOLCF.gz			\
+		$(LOG)/HOLCF-IMP.gz $(LOG)/HOLCF-ex.gz			\
+		$(LOG)/HOLCF-FOCUS.gz $(OUT)/IOA $(LOG)/IOA.gz		\
+		$(LOG)/IOA-ABP.gz $(LOG)/IOA-NTP.gz			\
 		$(LOG)/IOA-Storage.gz $(LOG)/HOLCF-Library.gz		\
-		$(LOG)/IOA-ex.gz $(LOG)/HOLCF-Tutorial.gz
+		$(LOG)/IOA-ex.gz $(LOG)/HOLCF-Tutorial.gz		\
+		$(LOG)/HOL-Datatype_Benchmark.gz			\
+		$(LOG)/HOL-Record_Benchmark.gz
--- a/src/HOL/Library/AList.thy	Thu Dec 15 09:13:23 2011 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,656 +0,0 @@
-(*  Title:      HOL/Library/AList.thy
-    Author:     Norbert Schirmer, Tobias Nipkow, Martin Wildmoser, TU Muenchen
-*)
-
-header {* Implementation of Association Lists *}
-
-theory AList
-imports Main More_List
-begin
-
-text {*
-  The operations preserve distinctness of keys and 
-  function @{term "clearjunk"} distributes over them. Since 
-  @{term clearjunk} enforces distinctness of keys it can be used
-  to establish the invariant, e.g. for inductive proofs.
-*}
-
-subsection {* @{text update} and @{text updates} *}
-
-primrec update :: "'key \<Rightarrow> 'val \<Rightarrow> ('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list" where
-    "update k v [] = [(k, v)]"
-  | "update k v (p#ps) = (if fst p = k then (k, v) # ps else p # update k v ps)"
-
-lemma update_conv': "map_of (update k v al)  = (map_of al)(k\<mapsto>v)"
-  by (induct al) (auto simp add: fun_eq_iff)
-
-corollary update_conv: "map_of (update k v al) k' = ((map_of al)(k\<mapsto>v)) k'"
-  by (simp add: update_conv')
-
-lemma dom_update: "fst ` set (update k v al) = {k} \<union> fst ` set al"
-  by (induct al) auto
-
-lemma update_keys:
-  "map fst (update k v al) =
-    (if k \<in> set (map fst al) then map fst al else map fst al @ [k])"
-  by (induct al) simp_all
-
-lemma distinct_update:
-  assumes "distinct (map fst al)" 
-  shows "distinct (map fst (update k v al))"
-  using assms by (simp add: update_keys)
-
-lemma update_filter: 
-  "a\<noteq>k \<Longrightarrow> update k v [q\<leftarrow>ps . fst q \<noteq> a] = [q\<leftarrow>update k v ps . fst q \<noteq> a]"
-  by (induct ps) auto
-
-lemma update_triv: "map_of al k = Some v \<Longrightarrow> update k v al = al"
-  by (induct al) auto
-
-lemma update_nonempty [simp]: "update k v al \<noteq> []"
-  by (induct al) auto
-
-lemma update_eqD: "update k v al = update k v' al' \<Longrightarrow> v = v'"
-proof (induct al arbitrary: al') 
-  case Nil thus ?case 
-    by (cases al') (auto split: split_if_asm)
-next
-  case Cons thus ?case
-    by (cases al') (auto split: split_if_asm)
-qed
-
-lemma update_last [simp]: "update k v (update k v' al) = update k v al"
-  by (induct al) auto
-
-text {* Note that the lists are not necessarily the same:
-        @{term "update k v (update k' v' []) = [(k', v'), (k, v)]"} and 
-        @{term "update k' v' (update k v []) = [(k, v), (k', v')]"}.*}
-lemma update_swap: "k\<noteq>k' 
-  \<Longrightarrow> map_of (update k v (update k' v' al)) = map_of (update k' v' (update k v al))"
-  by (simp add: update_conv' fun_eq_iff)
-
-lemma update_Some_unfold: 
-  "map_of (update k v al) x = Some y \<longleftrightarrow>
-    x = k \<and> v = y \<or> x \<noteq> k \<and> map_of al x = Some y"
-  by (simp add: update_conv' map_upd_Some_unfold)
-
-lemma image_update [simp]:
-  "x \<notin> A \<Longrightarrow> map_of (update x y al) ` A = map_of al ` A"
-  by (simp add: update_conv' image_map_upd)
-
-definition updates :: "'key list \<Rightarrow> 'val list \<Rightarrow> ('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list" where
-  "updates ks vs = More_List.fold (prod_case update) (zip ks vs)"
-
-lemma updates_simps [simp]:
-  "updates [] vs ps = ps"
-  "updates ks [] ps = ps"
-  "updates (k#ks) (v#vs) ps = updates ks vs (update k v ps)"
-  by (simp_all add: updates_def)
-
-lemma updates_key_simp [simp]:
-  "updates (k # ks) vs ps =
-    (case vs of [] \<Rightarrow> ps | v # vs \<Rightarrow> updates ks vs (update k v ps))"
-  by (cases vs) simp_all
-
-lemma updates_conv': "map_of (updates ks vs al) = (map_of al)(ks[\<mapsto>]vs)"
-proof -
-  have "map_of \<circ> More_List.fold (prod_case update) (zip ks vs) =
-    More_List.fold (\<lambda>(k, v) f. f(k \<mapsto> v)) (zip ks vs) \<circ> map_of"
-    by (rule fold_commute) (auto simp add: fun_eq_iff update_conv')
-  then show ?thesis by (auto simp add: updates_def fun_eq_iff map_upds_fold_map_upd foldl_fold split_def)
-qed
-
-lemma updates_conv: "map_of (updates ks vs al) k = ((map_of al)(ks[\<mapsto>]vs)) k"
-  by (simp add: updates_conv')
-
-lemma distinct_updates:
-  assumes "distinct (map fst al)"
-  shows "distinct (map fst (updates ks vs al))"
-proof -
-  have "distinct (More_List.fold
-       (\<lambda>(k, v) al. if k \<in> set al then al else al @ [k])
-       (zip ks vs) (map fst al))"
-    by (rule fold_invariant [of "zip ks vs" "\<lambda>_. True"]) (auto intro: assms)
-  moreover have "map fst \<circ> More_List.fold (prod_case update) (zip ks vs) =
-    More_List.fold (\<lambda>(k, v) al. if k \<in> set al then al else al @ [k]) (zip ks vs) \<circ> map fst"
-    by (rule fold_commute) (simp add: update_keys split_def prod_case_beta comp_def)
-  ultimately show ?thesis by (simp add: updates_def fun_eq_iff)
-qed
-
-lemma updates_append1[simp]: "size ks < size vs \<Longrightarrow>
-  updates (ks@[k]) vs al = update k (vs!size ks) (updates ks vs al)"
-  by (induct ks arbitrary: vs al) (auto split: list.splits)
-
-lemma updates_list_update_drop[simp]:
- "\<lbrakk>size ks \<le> i; i < size vs\<rbrakk>
-   \<Longrightarrow> updates ks (vs[i:=v]) al = updates ks vs al"
-  by (induct ks arbitrary: al vs i) (auto split:list.splits nat.splits)
-
-lemma update_updates_conv_if: "
- map_of (updates xs ys (update x y al)) =
- map_of (if x \<in>  set(take (length ys) xs) then updates xs ys al
-                                  else (update x y (updates xs ys al)))"
-  by (simp add: updates_conv' update_conv' map_upd_upds_conv_if)
-
-lemma updates_twist [simp]:
- "k \<notin> set ks \<Longrightarrow> 
-  map_of (updates ks vs (update k v al)) = map_of (update k v (updates ks vs al))"
-  by (simp add: updates_conv' update_conv' map_upds_twist)
-
-lemma updates_apply_notin[simp]:
- "k \<notin> set ks ==> map_of (updates ks vs al) k = map_of al k"
-  by (simp add: updates_conv)
-
-lemma updates_append_drop[simp]:
-  "size xs = size ys \<Longrightarrow> updates (xs@zs) ys al = updates xs ys al"
-  by (induct xs arbitrary: ys al) (auto split: list.splits)
-
-lemma updates_append2_drop[simp]:
-  "size xs = size ys \<Longrightarrow> updates xs (ys@zs) al = updates xs ys al"
-  by (induct xs arbitrary: ys al) (auto split: list.splits)
-
-
-subsection {* @{text delete} *}
-
-definition delete :: "'key \<Rightarrow> ('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list" where
-  delete_eq: "delete k = filter (\<lambda>(k', _). k \<noteq> k')"
-
-lemma delete_simps [simp]:
-  "delete k [] = []"
-  "delete k (p#ps) = (if fst p = k then delete k ps else p # delete k ps)"
-  by (auto simp add: delete_eq)
-
-lemma delete_conv': "map_of (delete k al) = (map_of al)(k := None)"
-  by (induct al) (auto simp add: fun_eq_iff)
-
-corollary delete_conv: "map_of (delete k al) k' = ((map_of al)(k := None)) k'"
-  by (simp add: delete_conv')
-
-lemma delete_keys:
-  "map fst (delete k al) = removeAll k (map fst al)"
-  by (simp add: delete_eq removeAll_filter_not_eq filter_map split_def comp_def)
-
-lemma distinct_delete:
-  assumes "distinct (map fst al)" 
-  shows "distinct (map fst (delete k al))"
-  using assms by (simp add: delete_keys distinct_removeAll)
-
-lemma delete_id [simp]: "k \<notin> fst ` set al \<Longrightarrow> delete k al = al"
-  by (auto simp add: image_iff delete_eq filter_id_conv)
-
-lemma delete_idem: "delete k (delete k al) = delete k al"
-  by (simp add: delete_eq)
-
-lemma map_of_delete [simp]:
-    "k' \<noteq> k \<Longrightarrow> map_of (delete k al) k' = map_of al k'"
-  by (simp add: delete_conv')
-
-lemma delete_notin_dom: "k \<notin> fst ` set (delete k al)"
-  by (auto simp add: delete_eq)
-
-lemma dom_delete_subset: "fst ` set (delete k al) \<subseteq> fst ` set al"
-  by (auto simp add: delete_eq)
-
-lemma delete_update_same:
-  "delete k (update k v al) = delete k al"
-  by (induct al) simp_all
-
-lemma delete_update:
-  "k \<noteq> l \<Longrightarrow> delete l (update k v al) = update k v (delete l al)"
-  by (induct al) simp_all
-
-lemma delete_twist: "delete x (delete y al) = delete y (delete x al)"
-  by (simp add: delete_eq conj_commute)
-
-lemma length_delete_le: "length (delete k al) \<le> length al"
-  by (simp add: delete_eq)
-
-
-subsection {* @{text restrict} *}
-
-definition restrict :: "'key set \<Rightarrow> ('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list" where
-  restrict_eq: "restrict A = filter (\<lambda>(k, v). k \<in> A)"
-
-lemma restr_simps [simp]:
-  "restrict A [] = []"
-  "restrict A (p#ps) = (if fst p \<in> A then p # restrict A ps else restrict A ps)"
-  by (auto simp add: restrict_eq)
-
-lemma restr_conv': "map_of (restrict A al) = ((map_of al)|` A)"
-proof
-  fix k
-  show "map_of (restrict A al) k = ((map_of al)|` A) k"
-    by (induct al) (simp, cases "k \<in> A", auto)
-qed
-
-corollary restr_conv: "map_of (restrict A al) k = ((map_of al)|` A) k"
-  by (simp add: restr_conv')
-
-lemma distinct_restr:
-  "distinct (map fst al) \<Longrightarrow> distinct (map fst (restrict A al))"
-  by (induct al) (auto simp add: restrict_eq)
-
-lemma restr_empty [simp]: 
-  "restrict {} al = []" 
-  "restrict A [] = []"
-  by (induct al) (auto simp add: restrict_eq)
-
-lemma restr_in [simp]: "x \<in> A \<Longrightarrow> map_of (restrict A al) x = map_of al x"
-  by (simp add: restr_conv')
-
-lemma restr_out [simp]: "x \<notin> A \<Longrightarrow> map_of (restrict A al) x = None"
-  by (simp add: restr_conv')
-
-lemma dom_restr [simp]: "fst ` set (restrict A al) = fst ` set al \<inter> A"
-  by (induct al) (auto simp add: restrict_eq)
-
-lemma restr_upd_same [simp]: "restrict (-{x}) (update x y al) = restrict (-{x}) al"
-  by (induct al) (auto simp add: restrict_eq)
-
-lemma restr_restr [simp]: "restrict A (restrict B al) = restrict (A\<inter>B) al"
-  by (induct al) (auto simp add: restrict_eq)
-
-lemma restr_update[simp]:
- "map_of (restrict D (update x y al)) = 
-  map_of ((if x \<in> D then (update x y (restrict (D-{x}) al)) else restrict D al))"
-  by (simp add: restr_conv' update_conv')
-
-lemma restr_delete [simp]:
-  "(delete x (restrict D al)) = 
-    (if x \<in> D then restrict (D - {x}) al else restrict D al)"
-apply (simp add: delete_eq restrict_eq)
-apply (auto simp add: split_def)
-proof -
-  have "\<And>y. y \<noteq> x \<longleftrightarrow> x \<noteq> y" by auto
-  then show "[p \<leftarrow> al. fst p \<in> D \<and> x \<noteq> fst p] = [p \<leftarrow> al. fst p \<in> D \<and> fst p \<noteq> x]"
-    by simp
-  assume "x \<notin> D"
-  then have "\<And>y. y \<in> D \<longleftrightarrow> y \<in> D \<and> x \<noteq> y" by auto
-  then show "[p \<leftarrow> al . fst p \<in> D \<and> x \<noteq> fst p] = [p \<leftarrow> al . fst p \<in> D]"
-    by simp
-qed
-
-lemma update_restr:
- "map_of (update x y (restrict D al)) = map_of (update x y (restrict (D-{x}) al))"
-  by (simp add: update_conv' restr_conv') (rule fun_upd_restrict)
-
-lemma upate_restr_conv [simp]:
- "x \<in> D \<Longrightarrow> 
- map_of (update x y (restrict D al)) = map_of (update x y (restrict (D-{x}) al))"
-  by (simp add: update_conv' restr_conv')
-
-lemma restr_updates [simp]: "
- \<lbrakk> length xs = length ys; set xs \<subseteq> D \<rbrakk>
- \<Longrightarrow> map_of (restrict D (updates xs ys al)) = 
-     map_of (updates xs ys (restrict (D - set xs) al))"
-  by (simp add: updates_conv' restr_conv')
-
-lemma restr_delete_twist: "(restrict A (delete a ps)) = delete a (restrict A ps)"
-  by (induct ps) auto
-
-
-subsection {* @{text clearjunk} *}
-
-function clearjunk  :: "('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list" where
-    "clearjunk [] = []"  
-  | "clearjunk (p#ps) = p # clearjunk (delete (fst p) ps)"
-  by pat_completeness auto
-termination by (relation "measure length")
-  (simp_all add: less_Suc_eq_le length_delete_le)
-
-lemma map_of_clearjunk:
-  "map_of (clearjunk al) = map_of al"
-  by (induct al rule: clearjunk.induct)
-    (simp_all add: fun_eq_iff)
-
-lemma clearjunk_keys_set:
-  "set (map fst (clearjunk al)) = set (map fst al)"
-  by (induct al rule: clearjunk.induct)
-    (simp_all add: delete_keys)
-
-lemma dom_clearjunk:
-  "fst ` set (clearjunk al) = fst ` set al"
-  using clearjunk_keys_set by simp
-
-lemma distinct_clearjunk [simp]:
-  "distinct (map fst (clearjunk al))"
-  by (induct al rule: clearjunk.induct)
-    (simp_all del: set_map add: clearjunk_keys_set delete_keys)
-
-lemma ran_clearjunk:
-  "ran (map_of (clearjunk al)) = ran (map_of al)"
-  by (simp add: map_of_clearjunk)
-
-lemma ran_map_of:
-  "ran (map_of al) = snd ` set (clearjunk al)"
-proof -
-  have "ran (map_of al) = ran (map_of (clearjunk al))"
-    by (simp add: ran_clearjunk)
-  also have "\<dots> = snd ` set (clearjunk al)"
-    by (simp add: ran_distinct)
-  finally show ?thesis .
-qed
-
-lemma clearjunk_update:
-  "clearjunk (update k v al) = update k v (clearjunk al)"
-  by (induct al rule: clearjunk.induct)
-    (simp_all add: delete_update)
-
-lemma clearjunk_updates:
-  "clearjunk (updates ks vs al) = updates ks vs (clearjunk al)"
-proof -
-  have "clearjunk \<circ> More_List.fold (prod_case update) (zip ks vs) =
-    More_List.fold (prod_case update) (zip ks vs) \<circ> clearjunk"
-    by (rule fold_commute) (simp add: clearjunk_update prod_case_beta o_def)
-  then show ?thesis by (simp add: updates_def fun_eq_iff)
-qed
-
-lemma clearjunk_delete:
-  "clearjunk (delete x al) = delete x (clearjunk al)"
-  by (induct al rule: clearjunk.induct) (auto simp add: delete_idem delete_twist)
-
-lemma clearjunk_restrict:
- "clearjunk (restrict A al) = restrict A (clearjunk al)"
-  by (induct al rule: clearjunk.induct) (auto simp add: restr_delete_twist)
-
-lemma distinct_clearjunk_id [simp]:
-  "distinct (map fst al) \<Longrightarrow> clearjunk al = al"
-  by (induct al rule: clearjunk.induct) auto
-
-lemma clearjunk_idem:
-  "clearjunk (clearjunk al) = clearjunk al"
-  by simp
-
-lemma length_clearjunk:
-  "length (clearjunk al) \<le> length al"
-proof (induct al rule: clearjunk.induct [case_names Nil Cons])
-  case Nil then show ?case by simp
-next
-  case (Cons kv al)
-  moreover have "length (delete (fst kv) al) \<le> length al" by (fact length_delete_le)
-  ultimately have "length (clearjunk (delete (fst kv) al)) \<le> length al" by (rule order_trans)
-  then show ?case by simp
-qed
-
-lemma delete_map:
-  assumes "\<And>kv. fst (f kv) = fst kv"
-  shows "delete k (map f ps) = map f (delete k ps)"
-  by (simp add: delete_eq filter_map comp_def split_def assms)
-
-lemma clearjunk_map:
-  assumes "\<And>kv. fst (f kv) = fst kv"
-  shows "clearjunk (map f ps) = map f (clearjunk ps)"
-  by (induct ps rule: clearjunk.induct [case_names Nil Cons])
-    (simp_all add: clearjunk_delete delete_map assms)
-
-
-subsection {* @{text map_ran} *}
-
-definition map_ran :: "('key \<Rightarrow> 'val \<Rightarrow> 'val) \<Rightarrow> ('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list" where
-  "map_ran f = map (\<lambda>(k, v). (k, f k v))"
-
-lemma map_ran_simps [simp]:
-  "map_ran f [] = []"
-  "map_ran f ((k, v) # ps) = (k, f k v) # map_ran f ps"
-  by (simp_all add: map_ran_def)
-
-lemma dom_map_ran:
-  "fst ` set (map_ran f al) = fst ` set al"
-  by (simp add: map_ran_def image_image split_def)
-  
-lemma map_ran_conv:
-  "map_of (map_ran f al) k = Option.map (f k) (map_of al k)"
-  by (induct al) auto
-
-lemma distinct_map_ran:
-  "distinct (map fst al) \<Longrightarrow> distinct (map fst (map_ran f al))"
-  by (simp add: map_ran_def split_def comp_def)
-
-lemma map_ran_filter:
-  "map_ran f [p\<leftarrow>ps. fst p \<noteq> a] = [p\<leftarrow>map_ran f ps. fst p \<noteq> a]"
-  by (simp add: map_ran_def filter_map split_def comp_def)
-
-lemma clearjunk_map_ran:
-  "clearjunk (map_ran f al) = map_ran f (clearjunk al)"
-  by (simp add: map_ran_def split_def clearjunk_map)
-
-
-subsection {* @{text merge} *}
-
-definition merge :: "('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list" where
-  "merge qs ps = foldr (\<lambda>(k, v). update k v) ps qs"
-
-lemma merge_simps [simp]:
-  "merge qs [] = qs"
-  "merge qs (p#ps) = update (fst p) (snd p) (merge qs ps)"
-  by (simp_all add: merge_def split_def)
-
-lemma merge_updates:
-  "merge qs ps = updates (rev (map fst ps)) (rev (map snd ps)) qs"
-  by (simp add: merge_def updates_def foldr_fold_rev zip_rev zip_map_fst_snd)
-
-lemma dom_merge: "fst ` set (merge xs ys) = fst ` set xs \<union> fst ` set ys"
-  by (induct ys arbitrary: xs) (auto simp add: dom_update)
-
-lemma distinct_merge:
-  assumes "distinct (map fst xs)"
-  shows "distinct (map fst (merge xs ys))"
-using assms by (simp add: merge_updates distinct_updates)
-
-lemma clearjunk_merge:
-  "clearjunk (merge xs ys) = merge (clearjunk xs) ys"
-  by (simp add: merge_updates clearjunk_updates)
-
-lemma merge_conv':
-  "map_of (merge xs ys) = map_of xs ++ map_of ys"
-proof -
-  have "map_of \<circ> More_List.fold (prod_case update) (rev ys) =
-    More_List.fold (\<lambda>(k, v) m. m(k \<mapsto> v)) (rev ys) \<circ> map_of"
-    by (rule fold_commute) (simp add: update_conv' prod_case_beta split_def fun_eq_iff)
-  then show ?thesis
-    by (simp add: merge_def map_add_map_of_foldr foldr_fold_rev fun_eq_iff)
-qed
-
-corollary merge_conv:
-  "map_of (merge xs ys) k = (map_of xs ++ map_of ys) k"
-  by (simp add: merge_conv')
-
-lemma merge_empty: "map_of (merge [] ys) = map_of ys"
-  by (simp add: merge_conv')
-
-lemma merge_assoc[simp]: "map_of (merge m1 (merge m2 m3)) = 
-                           map_of (merge (merge m1 m2) m3)"
-  by (simp add: merge_conv')
-
-lemma merge_Some_iff: 
- "(map_of (merge m n) k = Some x) = 
-  (map_of n k = Some x \<or> map_of n k = None \<and> map_of m k = Some x)"
-  by (simp add: merge_conv' map_add_Some_iff)
-
-lemmas merge_SomeD [dest!] = merge_Some_iff [THEN iffD1]
-
-lemma merge_find_right[simp]: "map_of n k = Some v \<Longrightarrow> map_of (merge m n) k = Some v"
-  by (simp add: merge_conv')
-
-lemma merge_None [iff]: 
-  "(map_of (merge m n) k = None) = (map_of n k = None \<and> map_of m k = None)"
-  by (simp add: merge_conv')
-
-lemma merge_upd[simp]: 
-  "map_of (merge m (update k v n)) = map_of (update k v (merge m n))"
-  by (simp add: update_conv' merge_conv')
-
-lemma merge_updatess[simp]: 
-  "map_of (merge m (updates xs ys n)) = map_of (updates xs ys (merge m n))"
-  by (simp add: updates_conv' merge_conv')
-
-lemma merge_append: "map_of (xs@ys) = map_of (merge ys xs)"
-  by (simp add: merge_conv')
-
-
-subsection {* @{text compose} *}
-
-function compose :: "('key \<times> 'a) list \<Rightarrow> ('a \<times> 'b) list \<Rightarrow> ('key \<times> 'b) list" where
-    "compose [] ys = []"
-  | "compose (x#xs) ys = (case map_of ys (snd x)
-       of None \<Rightarrow> compose (delete (fst x) xs) ys
-        | Some v \<Rightarrow> (fst x, v) # compose xs ys)"
-  by pat_completeness auto
-termination by (relation "measure (length \<circ> fst)")
-  (simp_all add: less_Suc_eq_le length_delete_le)
-
-lemma compose_first_None [simp]: 
-  assumes "map_of xs k = None" 
-  shows "map_of (compose xs ys) k = None"
-using assms by (induct xs ys rule: compose.induct)
-  (auto split: option.splits split_if_asm)
-
-lemma compose_conv: 
-  shows "map_of (compose xs ys) k = (map_of ys \<circ>\<^sub>m map_of xs) k"
-proof (induct xs ys rule: compose.induct)
-  case 1 then show ?case by simp
-next
-  case (2 x xs ys) show ?case
-  proof (cases "map_of ys (snd x)")
-    case None with 2
-    have hyp: "map_of (compose (delete (fst x) xs) ys) k =
-               (map_of ys \<circ>\<^sub>m map_of (delete (fst x) xs)) k"
-      by simp
-    show ?thesis
-    proof (cases "fst x = k")
-      case True
-      from True delete_notin_dom [of k xs]
-      have "map_of (delete (fst x) xs) k = None"
-        by (simp add: map_of_eq_None_iff)
-      with hyp show ?thesis
-        using True None
-        by simp
-    next
-      case False
-      from False have "map_of (delete (fst x) xs) k = map_of xs k"
-        by simp
-      with hyp show ?thesis
-        using False None
-        by (simp add: map_comp_def)
-    qed
-  next
-    case (Some v)
-    with 2
-    have "map_of (compose xs ys) k = (map_of ys \<circ>\<^sub>m map_of xs) k"
-      by simp
-    with Some show ?thesis
-      by (auto simp add: map_comp_def)
-  qed
-qed
-   
-lemma compose_conv': 
-  shows "map_of (compose xs ys) = (map_of ys \<circ>\<^sub>m map_of xs)"
-  by (rule ext) (rule compose_conv)
-
-lemma compose_first_Some [simp]:
-  assumes "map_of xs k = Some v" 
-  shows "map_of (compose xs ys) k = map_of ys v"
-using assms by (simp add: compose_conv)
-
-lemma dom_compose: "fst ` set (compose xs ys) \<subseteq> fst ` set xs"
-proof (induct xs ys rule: compose.induct)
-  case 1 thus ?case by simp
-next
-  case (2 x xs ys)
-  show ?case
-  proof (cases "map_of ys (snd x)")
-    case None
-    with "2.hyps"
-    have "fst ` set (compose (delete (fst x) xs) ys) \<subseteq> fst ` set (delete (fst x) xs)"
-      by simp
-    also
-    have "\<dots> \<subseteq> fst ` set xs"
-      by (rule dom_delete_subset)
-    finally show ?thesis
-      using None
-      by auto
-  next
-    case (Some v)
-    with "2.hyps"
-    have "fst ` set (compose xs ys) \<subseteq> fst ` set xs"
-      by simp
-    with Some show ?thesis
-      by auto
-  qed
-qed
-
-lemma distinct_compose:
- assumes "distinct (map fst xs)"
- shows "distinct (map fst (compose xs ys))"
-using assms
-proof (induct xs ys rule: compose.induct)
-  case 1 thus ?case by simp
-next
-  case (2 x xs ys)
-  show ?case
-  proof (cases "map_of ys (snd x)")
-    case None
-    with 2 show ?thesis by simp
-  next
-    case (Some v)
-    with 2 dom_compose [of xs ys] show ?thesis 
-      by (auto)
-  qed
-qed
-
-lemma compose_delete_twist: "(compose (delete k xs) ys) = delete k (compose xs ys)"
-proof (induct xs ys rule: compose.induct)
-  case 1 thus ?case by simp
-next
-  case (2 x xs ys)
-  show ?case
-  proof (cases "map_of ys (snd x)")
-    case None
-    with 2 have 
-      hyp: "compose (delete k (delete (fst x) xs)) ys =
-            delete k (compose (delete (fst x) xs) ys)"
-      by simp
-    show ?thesis
-    proof (cases "fst x = k")
-      case True
-      with None hyp
-      show ?thesis
-        by (simp add: delete_idem)
-    next
-      case False
-      from None False hyp
-      show ?thesis
-        by (simp add: delete_twist)
-    qed
-  next
-    case (Some v)
-    with 2 have hyp: "compose (delete k xs) ys = delete k (compose xs ys)" by simp
-    with Some show ?thesis
-      by simp
-  qed
-qed
-
-lemma compose_clearjunk: "compose xs (clearjunk ys) = compose xs ys"
-  by (induct xs ys rule: compose.induct) 
-     (auto simp add: map_of_clearjunk split: option.splits)
-   
-lemma clearjunk_compose: "clearjunk (compose xs ys) = compose (clearjunk xs) ys"
-  by (induct xs rule: clearjunk.induct)
-     (auto split: option.splits simp add: clearjunk_delete delete_idem
-               compose_delete_twist)
-   
-lemma compose_empty [simp]:
- "compose xs [] = []"
-  by (induct xs) (auto simp add: compose_delete_twist)
-
-lemma compose_Some_iff:
-  "(map_of (compose xs ys) k = Some v) = 
-     (\<exists>k'. map_of xs k = Some k' \<and> map_of ys k' = Some v)" 
-  by (simp add: compose_conv map_comp_Some_iff)
-
-lemma map_comp_None_iff:
-  "(map_of (compose xs ys) k = None) = 
-    (map_of xs k = None \<or> (\<exists>k'. map_of xs k = Some k' \<and> map_of ys k' = None)) " 
-  by (simp add: compose_conv map_comp_None_iff)
-
-end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Library/AList_Impl.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -0,0 +1,698 @@
+(*  Title:      HOL/Library/AList_Impl.thy
+    Author:     Norbert Schirmer, Tobias Nipkow, Martin Wildmoser, TU Muenchen
+*)
+
+header {* Implementation of Association Lists *}
+
+theory AList_Impl
+imports Main More_List
+begin
+
+text {*
+  The operations preserve distinctness of keys and 
+  function @{term "clearjunk"} distributes over them. Since 
+  @{term clearjunk} enforces distinctness of keys it can be used
+  to establish the invariant, e.g. for inductive proofs.
+*}
+
+subsection {* @{text update} and @{text updates} *}
+
+primrec update :: "'key \<Rightarrow> 'val \<Rightarrow> ('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list" where
+    "update k v [] = [(k, v)]"
+  | "update k v (p#ps) = (if fst p = k then (k, v) # ps else p # update k v ps)"
+
+lemma update_conv': "map_of (update k v al)  = (map_of al)(k\<mapsto>v)"
+  by (induct al) (auto simp add: fun_eq_iff)
+
+corollary update_conv: "map_of (update k v al) k' = ((map_of al)(k\<mapsto>v)) k'"
+  by (simp add: update_conv')
+
+lemma dom_update: "fst ` set (update k v al) = {k} \<union> fst ` set al"
+  by (induct al) auto
+
+lemma update_keys:
+  "map fst (update k v al) =
+    (if k \<in> set (map fst al) then map fst al else map fst al @ [k])"
+  by (induct al) simp_all
+
+lemma distinct_update:
+  assumes "distinct (map fst al)" 
+  shows "distinct (map fst (update k v al))"
+  using assms by (simp add: update_keys)
+
+lemma update_filter: 
+  "a\<noteq>k \<Longrightarrow> update k v [q\<leftarrow>ps . fst q \<noteq> a] = [q\<leftarrow>update k v ps . fst q \<noteq> a]"
+  by (induct ps) auto
+
+lemma update_triv: "map_of al k = Some v \<Longrightarrow> update k v al = al"
+  by (induct al) auto
+
+lemma update_nonempty [simp]: "update k v al \<noteq> []"
+  by (induct al) auto
+
+lemma update_eqD: "update k v al = update k v' al' \<Longrightarrow> v = v'"
+proof (induct al arbitrary: al') 
+  case Nil thus ?case 
+    by (cases al') (auto split: split_if_asm)
+next
+  case Cons thus ?case
+    by (cases al') (auto split: split_if_asm)
+qed
+
+lemma update_last [simp]: "update k v (update k v' al) = update k v al"
+  by (induct al) auto
+
+text {* Note that the lists are not necessarily the same:
+        @{term "update k v (update k' v' []) = [(k', v'), (k, v)]"} and 
+        @{term "update k' v' (update k v []) = [(k, v), (k', v')]"}.*}
+lemma update_swap: "k\<noteq>k' 
+  \<Longrightarrow> map_of (update k v (update k' v' al)) = map_of (update k' v' (update k v al))"
+  by (simp add: update_conv' fun_eq_iff)
+
+lemma update_Some_unfold: 
+  "map_of (update k v al) x = Some y \<longleftrightarrow>
+    x = k \<and> v = y \<or> x \<noteq> k \<and> map_of al x = Some y"
+  by (simp add: update_conv' map_upd_Some_unfold)
+
+lemma image_update [simp]:
+  "x \<notin> A \<Longrightarrow> map_of (update x y al) ` A = map_of al ` A"
+  by (simp add: update_conv' image_map_upd)
+
+definition updates :: "'key list \<Rightarrow> 'val list \<Rightarrow> ('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list" where
+  "updates ks vs = More_List.fold (prod_case update) (zip ks vs)"
+
+lemma updates_simps [simp]:
+  "updates [] vs ps = ps"
+  "updates ks [] ps = ps"
+  "updates (k#ks) (v#vs) ps = updates ks vs (update k v ps)"
+  by (simp_all add: updates_def)
+
+lemma updates_key_simp [simp]:
+  "updates (k # ks) vs ps =
+    (case vs of [] \<Rightarrow> ps | v # vs \<Rightarrow> updates ks vs (update k v ps))"
+  by (cases vs) simp_all
+
+lemma updates_conv': "map_of (updates ks vs al) = (map_of al)(ks[\<mapsto>]vs)"
+proof -
+  have "map_of \<circ> More_List.fold (prod_case update) (zip ks vs) =
+    More_List.fold (\<lambda>(k, v) f. f(k \<mapsto> v)) (zip ks vs) \<circ> map_of"
+    by (rule fold_commute) (auto simp add: fun_eq_iff update_conv')
+  then show ?thesis by (auto simp add: updates_def fun_eq_iff map_upds_fold_map_upd foldl_fold split_def)
+qed
+
+lemma updates_conv: "map_of (updates ks vs al) k = ((map_of al)(ks[\<mapsto>]vs)) k"
+  by (simp add: updates_conv')
+
+lemma distinct_updates:
+  assumes "distinct (map fst al)"
+  shows "distinct (map fst (updates ks vs al))"
+proof -
+  have "distinct (More_List.fold
+       (\<lambda>(k, v) al. if k \<in> set al then al else al @ [k])
+       (zip ks vs) (map fst al))"
+    by (rule fold_invariant [of "zip ks vs" "\<lambda>_. True"]) (auto intro: assms)
+  moreover have "map fst \<circ> More_List.fold (prod_case update) (zip ks vs) =
+    More_List.fold (\<lambda>(k, v) al. if k \<in> set al then al else al @ [k]) (zip ks vs) \<circ> map fst"
+    by (rule fold_commute) (simp add: update_keys split_def prod_case_beta comp_def)
+  ultimately show ?thesis by (simp add: updates_def fun_eq_iff)
+qed
+
+lemma updates_append1[simp]: "size ks < size vs \<Longrightarrow>
+  updates (ks@[k]) vs al = update k (vs!size ks) (updates ks vs al)"
+  by (induct ks arbitrary: vs al) (auto split: list.splits)
+
+lemma updates_list_update_drop[simp]:
+ "\<lbrakk>size ks \<le> i; i < size vs\<rbrakk>
+   \<Longrightarrow> updates ks (vs[i:=v]) al = updates ks vs al"
+  by (induct ks arbitrary: al vs i) (auto split:list.splits nat.splits)
+
+lemma update_updates_conv_if: "
+ map_of (updates xs ys (update x y al)) =
+ map_of (if x \<in>  set(take (length ys) xs) then updates xs ys al
+                                  else (update x y (updates xs ys al)))"
+  by (simp add: updates_conv' update_conv' map_upd_upds_conv_if)
+
+lemma updates_twist [simp]:
+ "k \<notin> set ks \<Longrightarrow> 
+  map_of (updates ks vs (update k v al)) = map_of (update k v (updates ks vs al))"
+  by (simp add: updates_conv' update_conv' map_upds_twist)
+
+lemma updates_apply_notin[simp]:
+ "k \<notin> set ks ==> map_of (updates ks vs al) k = map_of al k"
+  by (simp add: updates_conv)
+
+lemma updates_append_drop[simp]:
+  "size xs = size ys \<Longrightarrow> updates (xs@zs) ys al = updates xs ys al"
+  by (induct xs arbitrary: ys al) (auto split: list.splits)
+
+lemma updates_append2_drop[simp]:
+  "size xs = size ys \<Longrightarrow> updates xs (ys@zs) al = updates xs ys al"
+  by (induct xs arbitrary: ys al) (auto split: list.splits)
+
+
+subsection {* @{text delete} *}
+
+definition delete :: "'key \<Rightarrow> ('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list" where
+  delete_eq: "delete k = filter (\<lambda>(k', _). k \<noteq> k')"
+
+lemma delete_simps [simp]:
+  "delete k [] = []"
+  "delete k (p#ps) = (if fst p = k then delete k ps else p # delete k ps)"
+  by (auto simp add: delete_eq)
+
+lemma delete_conv': "map_of (delete k al) = (map_of al)(k := None)"
+  by (induct al) (auto simp add: fun_eq_iff)
+
+corollary delete_conv: "map_of (delete k al) k' = ((map_of al)(k := None)) k'"
+  by (simp add: delete_conv')
+
+lemma delete_keys:
+  "map fst (delete k al) = removeAll k (map fst al)"
+  by (simp add: delete_eq removeAll_filter_not_eq filter_map split_def comp_def)
+
+lemma distinct_delete:
+  assumes "distinct (map fst al)" 
+  shows "distinct (map fst (delete k al))"
+  using assms by (simp add: delete_keys distinct_removeAll)
+
+lemma delete_id [simp]: "k \<notin> fst ` set al \<Longrightarrow> delete k al = al"
+  by (auto simp add: image_iff delete_eq filter_id_conv)
+
+lemma delete_idem: "delete k (delete k al) = delete k al"
+  by (simp add: delete_eq)
+
+lemma map_of_delete [simp]:
+    "k' \<noteq> k \<Longrightarrow> map_of (delete k al) k' = map_of al k'"
+  by (simp add: delete_conv')
+
+lemma delete_notin_dom: "k \<notin> fst ` set (delete k al)"
+  by (auto simp add: delete_eq)
+
+lemma dom_delete_subset: "fst ` set (delete k al) \<subseteq> fst ` set al"
+  by (auto simp add: delete_eq)
+
+lemma delete_update_same:
+  "delete k (update k v al) = delete k al"
+  by (induct al) simp_all
+
+lemma delete_update:
+  "k \<noteq> l \<Longrightarrow> delete l (update k v al) = update k v (delete l al)"
+  by (induct al) simp_all
+
+lemma delete_twist: "delete x (delete y al) = delete y (delete x al)"
+  by (simp add: delete_eq conj_commute)
+
+lemma length_delete_le: "length (delete k al) \<le> length al"
+  by (simp add: delete_eq)
+
+
+subsection {* @{text restrict} *}
+
+definition restrict :: "'key set \<Rightarrow> ('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list" where
+  restrict_eq: "restrict A = filter (\<lambda>(k, v). k \<in> A)"
+
+lemma restr_simps [simp]:
+  "restrict A [] = []"
+  "restrict A (p#ps) = (if fst p \<in> A then p # restrict A ps else restrict A ps)"
+  by (auto simp add: restrict_eq)
+
+lemma restr_conv': "map_of (restrict A al) = ((map_of al)|` A)"
+proof
+  fix k
+  show "map_of (restrict A al) k = ((map_of al)|` A) k"
+    by (induct al) (simp, cases "k \<in> A", auto)
+qed
+
+corollary restr_conv: "map_of (restrict A al) k = ((map_of al)|` A) k"
+  by (simp add: restr_conv')
+
+lemma distinct_restr:
+  "distinct (map fst al) \<Longrightarrow> distinct (map fst (restrict A al))"
+  by (induct al) (auto simp add: restrict_eq)
+
+lemma restr_empty [simp]: 
+  "restrict {} al = []" 
+  "restrict A [] = []"
+  by (induct al) (auto simp add: restrict_eq)
+
+lemma restr_in [simp]: "x \<in> A \<Longrightarrow> map_of (restrict A al) x = map_of al x"
+  by (simp add: restr_conv')
+
+lemma restr_out [simp]: "x \<notin> A \<Longrightarrow> map_of (restrict A al) x = None"
+  by (simp add: restr_conv')
+
+lemma dom_restr [simp]: "fst ` set (restrict A al) = fst ` set al \<inter> A"
+  by (induct al) (auto simp add: restrict_eq)
+
+lemma restr_upd_same [simp]: "restrict (-{x}) (update x y al) = restrict (-{x}) al"
+  by (induct al) (auto simp add: restrict_eq)
+
+lemma restr_restr [simp]: "restrict A (restrict B al) = restrict (A\<inter>B) al"
+  by (induct al) (auto simp add: restrict_eq)
+
+lemma restr_update[simp]:
+ "map_of (restrict D (update x y al)) = 
+  map_of ((if x \<in> D then (update x y (restrict (D-{x}) al)) else restrict D al))"
+  by (simp add: restr_conv' update_conv')
+
+lemma restr_delete [simp]:
+  "(delete x (restrict D al)) = 
+    (if x \<in> D then restrict (D - {x}) al else restrict D al)"
+apply (simp add: delete_eq restrict_eq)
+apply (auto simp add: split_def)
+proof -
+  have "\<And>y. y \<noteq> x \<longleftrightarrow> x \<noteq> y" by auto
+  then show "[p \<leftarrow> al. fst p \<in> D \<and> x \<noteq> fst p] = [p \<leftarrow> al. fst p \<in> D \<and> fst p \<noteq> x]"
+    by simp
+  assume "x \<notin> D"
+  then have "\<And>y. y \<in> D \<longleftrightarrow> y \<in> D \<and> x \<noteq> y" by auto
+  then show "[p \<leftarrow> al . fst p \<in> D \<and> x \<noteq> fst p] = [p \<leftarrow> al . fst p \<in> D]"
+    by simp
+qed
+
+lemma update_restr:
+ "map_of (update x y (restrict D al)) = map_of (update x y (restrict (D-{x}) al))"
+  by (simp add: update_conv' restr_conv') (rule fun_upd_restrict)
+
+lemma update_restr_conv [simp]:
+ "x \<in> D \<Longrightarrow> 
+ map_of (update x y (restrict D al)) = map_of (update x y (restrict (D-{x}) al))"
+  by (simp add: update_conv' restr_conv')
+
+lemma restr_updates [simp]: "
+ \<lbrakk> length xs = length ys; set xs \<subseteq> D \<rbrakk>
+ \<Longrightarrow> map_of (restrict D (updates xs ys al)) = 
+     map_of (updates xs ys (restrict (D - set xs) al))"
+  by (simp add: updates_conv' restr_conv')
+
+lemma restr_delete_twist: "(restrict A (delete a ps)) = delete a (restrict A ps)"
+  by (induct ps) auto
+
+
+subsection {* @{text clearjunk} *}
+
+function clearjunk  :: "('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list" where
+    "clearjunk [] = []"  
+  | "clearjunk (p#ps) = p # clearjunk (delete (fst p) ps)"
+  by pat_completeness auto
+termination by (relation "measure length")
+  (simp_all add: less_Suc_eq_le length_delete_le)
+
+lemma map_of_clearjunk:
+  "map_of (clearjunk al) = map_of al"
+  by (induct al rule: clearjunk.induct)
+    (simp_all add: fun_eq_iff)
+
+lemma clearjunk_keys_set:
+  "set (map fst (clearjunk al)) = set (map fst al)"
+  by (induct al rule: clearjunk.induct)
+    (simp_all add: delete_keys)
+
+lemma dom_clearjunk:
+  "fst ` set (clearjunk al) = fst ` set al"
+  using clearjunk_keys_set by simp
+
+lemma distinct_clearjunk [simp]:
+  "distinct (map fst (clearjunk al))"
+  by (induct al rule: clearjunk.induct)
+    (simp_all del: set_map add: clearjunk_keys_set delete_keys)
+
+lemma ran_clearjunk:
+  "ran (map_of (clearjunk al)) = ran (map_of al)"
+  by (simp add: map_of_clearjunk)
+
+lemma ran_map_of:
+  "ran (map_of al) = snd ` set (clearjunk al)"
+proof -
+  have "ran (map_of al) = ran (map_of (clearjunk al))"
+    by (simp add: ran_clearjunk)
+  also have "\<dots> = snd ` set (clearjunk al)"
+    by (simp add: ran_distinct)
+  finally show ?thesis .
+qed
+
+lemma clearjunk_update:
+  "clearjunk (update k v al) = update k v (clearjunk al)"
+  by (induct al rule: clearjunk.induct)
+    (simp_all add: delete_update)
+
+lemma clearjunk_updates:
+  "clearjunk (updates ks vs al) = updates ks vs (clearjunk al)"
+proof -
+  have "clearjunk \<circ> More_List.fold (prod_case update) (zip ks vs) =
+    More_List.fold (prod_case update) (zip ks vs) \<circ> clearjunk"
+    by (rule fold_commute) (simp add: clearjunk_update prod_case_beta o_def)
+  then show ?thesis by (simp add: updates_def fun_eq_iff)
+qed
+
+lemma clearjunk_delete:
+  "clearjunk (delete x al) = delete x (clearjunk al)"
+  by (induct al rule: clearjunk.induct) (auto simp add: delete_idem delete_twist)
+
+lemma clearjunk_restrict:
+ "clearjunk (restrict A al) = restrict A (clearjunk al)"
+  by (induct al rule: clearjunk.induct) (auto simp add: restr_delete_twist)
+
+lemma distinct_clearjunk_id [simp]:
+  "distinct (map fst al) \<Longrightarrow> clearjunk al = al"
+  by (induct al rule: clearjunk.induct) auto
+
+lemma clearjunk_idem:
+  "clearjunk (clearjunk al) = clearjunk al"
+  by simp
+
+lemma length_clearjunk:
+  "length (clearjunk al) \<le> length al"
+proof (induct al rule: clearjunk.induct [case_names Nil Cons])
+  case Nil then show ?case by simp
+next
+  case (Cons kv al)
+  moreover have "length (delete (fst kv) al) \<le> length al" by (fact length_delete_le)
+  ultimately have "length (clearjunk (delete (fst kv) al)) \<le> length al" by (rule order_trans)
+  then show ?case by simp
+qed
+
+lemma delete_map:
+  assumes "\<And>kv. fst (f kv) = fst kv"
+  shows "delete k (map f ps) = map f (delete k ps)"
+  by (simp add: delete_eq filter_map comp_def split_def assms)
+
+lemma clearjunk_map:
+  assumes "\<And>kv. fst (f kv) = fst kv"
+  shows "clearjunk (map f ps) = map f (clearjunk ps)"
+  by (induct ps rule: clearjunk.induct [case_names Nil Cons])
+    (simp_all add: clearjunk_delete delete_map assms)
+
+
+subsection {* @{text map_ran} *}
+
+definition map_ran :: "('key \<Rightarrow> 'val \<Rightarrow> 'val) \<Rightarrow> ('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list" where
+  "map_ran f = map (\<lambda>(k, v). (k, f k v))"
+
+lemma map_ran_simps [simp]:
+  "map_ran f [] = []"
+  "map_ran f ((k, v) # ps) = (k, f k v) # map_ran f ps"
+  by (simp_all add: map_ran_def)
+
+lemma dom_map_ran:
+  "fst ` set (map_ran f al) = fst ` set al"
+  by (simp add: map_ran_def image_image split_def)
+  
+lemma map_ran_conv:
+  "map_of (map_ran f al) k = Option.map (f k) (map_of al k)"
+  by (induct al) auto
+
+lemma distinct_map_ran:
+  "distinct (map fst al) \<Longrightarrow> distinct (map fst (map_ran f al))"
+  by (simp add: map_ran_def split_def comp_def)
+
+lemma map_ran_filter:
+  "map_ran f [p\<leftarrow>ps. fst p \<noteq> a] = [p\<leftarrow>map_ran f ps. fst p \<noteq> a]"
+  by (simp add: map_ran_def filter_map split_def comp_def)
+
+lemma clearjunk_map_ran:
+  "clearjunk (map_ran f al) = map_ran f (clearjunk al)"
+  by (simp add: map_ran_def split_def clearjunk_map)
+
+
+subsection {* @{text merge} *}
+
+definition merge :: "('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list" where
+  "merge qs ps = foldr (\<lambda>(k, v). update k v) ps qs"
+
+lemma merge_simps [simp]:
+  "merge qs [] = qs"
+  "merge qs (p#ps) = update (fst p) (snd p) (merge qs ps)"
+  by (simp_all add: merge_def split_def)
+
+lemma merge_updates:
+  "merge qs ps = updates (rev (map fst ps)) (rev (map snd ps)) qs"
+  by (simp add: merge_def updates_def foldr_fold_rev zip_rev zip_map_fst_snd)
+
+lemma dom_merge: "fst ` set (merge xs ys) = fst ` set xs \<union> fst ` set ys"
+  by (induct ys arbitrary: xs) (auto simp add: dom_update)
+
+lemma distinct_merge:
+  assumes "distinct (map fst xs)"
+  shows "distinct (map fst (merge xs ys))"
+using assms by (simp add: merge_updates distinct_updates)
+
+lemma clearjunk_merge:
+  "clearjunk (merge xs ys) = merge (clearjunk xs) ys"
+  by (simp add: merge_updates clearjunk_updates)
+
+lemma merge_conv':
+  "map_of (merge xs ys) = map_of xs ++ map_of ys"
+proof -
+  have "map_of \<circ> More_List.fold (prod_case update) (rev ys) =
+    More_List.fold (\<lambda>(k, v) m. m(k \<mapsto> v)) (rev ys) \<circ> map_of"
+    by (rule fold_commute) (simp add: update_conv' prod_case_beta split_def fun_eq_iff)
+  then show ?thesis
+    by (simp add: merge_def map_add_map_of_foldr foldr_fold_rev fun_eq_iff)
+qed
+
+corollary merge_conv:
+  "map_of (merge xs ys) k = (map_of xs ++ map_of ys) k"
+  by (simp add: merge_conv')
+
+lemma merge_empty: "map_of (merge [] ys) = map_of ys"
+  by (simp add: merge_conv')
+
+lemma merge_assoc[simp]: "map_of (merge m1 (merge m2 m3)) = 
+                           map_of (merge (merge m1 m2) m3)"
+  by (simp add: merge_conv')
+
+lemma merge_Some_iff: 
+ "(map_of (merge m n) k = Some x) = 
+  (map_of n k = Some x \<or> map_of n k = None \<and> map_of m k = Some x)"
+  by (simp add: merge_conv' map_add_Some_iff)
+
+lemmas merge_SomeD [dest!] = merge_Some_iff [THEN iffD1]
+
+lemma merge_find_right[simp]: "map_of n k = Some v \<Longrightarrow> map_of (merge m n) k = Some v"
+  by (simp add: merge_conv')
+
+lemma merge_None [iff]: 
+  "(map_of (merge m n) k = None) = (map_of n k = None \<and> map_of m k = None)"
+  by (simp add: merge_conv')
+
+lemma merge_upd[simp]: 
+  "map_of (merge m (update k v n)) = map_of (update k v (merge m n))"
+  by (simp add: update_conv' merge_conv')
+
+lemma merge_updatess[simp]: 
+  "map_of (merge m (updates xs ys n)) = map_of (updates xs ys (merge m n))"
+  by (simp add: updates_conv' merge_conv')
+
+lemma merge_append: "map_of (xs@ys) = map_of (merge ys xs)"
+  by (simp add: merge_conv')
+
+
+subsection {* @{text compose} *}
+
+function compose :: "('key \<times> 'a) list \<Rightarrow> ('a \<times> 'b) list \<Rightarrow> ('key \<times> 'b) list" where
+    "compose [] ys = []"
+  | "compose (x#xs) ys = (case map_of ys (snd x)
+       of None \<Rightarrow> compose (delete (fst x) xs) ys
+        | Some v \<Rightarrow> (fst x, v) # compose xs ys)"
+  by pat_completeness auto
+termination by (relation "measure (length \<circ> fst)")
+  (simp_all add: less_Suc_eq_le length_delete_le)
+
+lemma compose_first_None [simp]: 
+  assumes "map_of xs k = None" 
+  shows "map_of (compose xs ys) k = None"
+using assms by (induct xs ys rule: compose.induct)
+  (auto split: option.splits split_if_asm)
+
+lemma compose_conv: 
+  shows "map_of (compose xs ys) k = (map_of ys \<circ>\<^sub>m map_of xs) k"
+proof (induct xs ys rule: compose.induct)
+  case 1 then show ?case by simp
+next
+  case (2 x xs ys) show ?case
+  proof (cases "map_of ys (snd x)")
+    case None with 2
+    have hyp: "map_of (compose (delete (fst x) xs) ys) k =
+               (map_of ys \<circ>\<^sub>m map_of (delete (fst x) xs)) k"
+      by simp
+    show ?thesis
+    proof (cases "fst x = k")
+      case True
+      from True delete_notin_dom [of k xs]
+      have "map_of (delete (fst x) xs) k = None"
+        by (simp add: map_of_eq_None_iff)
+      with hyp show ?thesis
+        using True None
+        by simp
+    next
+      case False
+      from False have "map_of (delete (fst x) xs) k = map_of xs k"
+        by simp
+      with hyp show ?thesis
+        using False None
+        by (simp add: map_comp_def)
+    qed
+  next
+    case (Some v)
+    with 2
+    have "map_of (compose xs ys) k = (map_of ys \<circ>\<^sub>m map_of xs) k"
+      by simp
+    with Some show ?thesis
+      by (auto simp add: map_comp_def)
+  qed
+qed
+   
+lemma compose_conv': 
+  shows "map_of (compose xs ys) = (map_of ys \<circ>\<^sub>m map_of xs)"
+  by (rule ext) (rule compose_conv)
+
+lemma compose_first_Some [simp]:
+  assumes "map_of xs k = Some v" 
+  shows "map_of (compose xs ys) k = map_of ys v"
+using assms by (simp add: compose_conv)
+
+lemma dom_compose: "fst ` set (compose xs ys) \<subseteq> fst ` set xs"
+proof (induct xs ys rule: compose.induct)
+  case 1 thus ?case by simp
+next
+  case (2 x xs ys)
+  show ?case
+  proof (cases "map_of ys (snd x)")
+    case None
+    with "2.hyps"
+    have "fst ` set (compose (delete (fst x) xs) ys) \<subseteq> fst ` set (delete (fst x) xs)"
+      by simp
+    also
+    have "\<dots> \<subseteq> fst ` set xs"
+      by (rule dom_delete_subset)
+    finally show ?thesis
+      using None
+      by auto
+  next
+    case (Some v)
+    with "2.hyps"
+    have "fst ` set (compose xs ys) \<subseteq> fst ` set xs"
+      by simp
+    with Some show ?thesis
+      by auto
+  qed
+qed
+
+lemma distinct_compose:
+ assumes "distinct (map fst xs)"
+ shows "distinct (map fst (compose xs ys))"
+using assms
+proof (induct xs ys rule: compose.induct)
+  case 1 thus ?case by simp
+next
+  case (2 x xs ys)
+  show ?case
+  proof (cases "map_of ys (snd x)")
+    case None
+    with 2 show ?thesis by simp
+  next
+    case (Some v)
+    with 2 dom_compose [of xs ys] show ?thesis 
+      by (auto)
+  qed
+qed
+
+lemma compose_delete_twist: "(compose (delete k xs) ys) = delete k (compose xs ys)"
+proof (induct xs ys rule: compose.induct)
+  case 1 thus ?case by simp
+next
+  case (2 x xs ys)
+  show ?case
+  proof (cases "map_of ys (snd x)")
+    case None
+    with 2 have 
+      hyp: "compose (delete k (delete (fst x) xs)) ys =
+            delete k (compose (delete (fst x) xs) ys)"
+      by simp
+    show ?thesis
+    proof (cases "fst x = k")
+      case True
+      with None hyp
+      show ?thesis
+        by (simp add: delete_idem)
+    next
+      case False
+      from None False hyp
+      show ?thesis
+        by (simp add: delete_twist)
+    qed
+  next
+    case (Some v)
+    with 2 have hyp: "compose (delete k xs) ys = delete k (compose xs ys)" by simp
+    with Some show ?thesis
+      by simp
+  qed
+qed
+
+lemma compose_clearjunk: "compose xs (clearjunk ys) = compose xs ys"
+  by (induct xs ys rule: compose.induct) 
+     (auto simp add: map_of_clearjunk split: option.splits)
+   
+lemma clearjunk_compose: "clearjunk (compose xs ys) = compose (clearjunk xs) ys"
+  by (induct xs rule: clearjunk.induct)
+     (auto split: option.splits simp add: clearjunk_delete delete_idem
+               compose_delete_twist)
+   
+lemma compose_empty [simp]:
+ "compose xs [] = []"
+  by (induct xs) (auto simp add: compose_delete_twist)
+
+lemma compose_Some_iff:
+  "(map_of (compose xs ys) k = Some v) = 
+     (\<exists>k'. map_of xs k = Some k' \<and> map_of ys k' = Some v)" 
+  by (simp add: compose_conv map_comp_Some_iff)
+
+lemma map_comp_None_iff:
+  "(map_of (compose xs ys) k = None) = 
+    (map_of xs k = None \<or> (\<exists>k'. map_of xs k = Some k' \<and> map_of ys k' = None)) " 
+  by (simp add: compose_conv map_comp_None_iff)
+
+subsection {* @{text map_entry} *}
+
+fun map_entry :: "'key \<Rightarrow> ('val \<Rightarrow> 'val) \<Rightarrow> ('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list"
+where
+  "map_entry k f [] = []"
+| "map_entry k f (p # ps) = (if fst p = k then (k, f (snd p)) # ps else p # map_entry k f ps)"
+
+lemma map_of_map_entry:
+  "map_of (map_entry k f xs) = (map_of xs)(k := case map_of xs k of None => None | Some v' => Some (f v'))"
+by (induct xs) auto
+
+lemma dom_map_entry:
+  "fst ` set (map_entry k f xs) = fst ` set xs"
+by (induct xs) auto
+
+lemma distinct_map_entry:
+  assumes "distinct (map fst xs)"
+  shows "distinct (map fst (map_entry k f xs))"
+using assms by (induct xs) (auto simp add: dom_map_entry)
+
+subsection {* @{text map_default} *}
+
+fun map_default :: "'key \<Rightarrow> 'val \<Rightarrow> ('val \<Rightarrow> 'val) \<Rightarrow> ('key \<times> 'val) list \<Rightarrow> ('key \<times> 'val) list"
+where
+  "map_default k v f [] = [(k, v)]"
+| "map_default k v f (p # ps) = (if fst p = k then (k, f (snd p)) # ps else p # map_default k v f ps)"
+
+lemma map_of_map_default:
+  "map_of (map_default k v f xs) = (map_of xs)(k := case map_of xs k of None => Some v | Some v' => Some (f v'))"
+by (induct xs) auto
+
+lemma dom_map_default:
+  "fst ` set (map_default k v f xs) = insert k (fst ` set xs)" 
+by (induct xs) auto
+
+lemma distinct_map_default:
+  assumes "distinct (map fst xs)"
+  shows "distinct (map fst (map_default k v f xs))"
+using assms by (induct xs) (auto simp add: dom_map_default)
+
+hide_const (open) map_entry
+
+end
--- a/src/HOL/Library/AList_Mapping.thy	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Library/AList_Mapping.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -5,7 +5,7 @@
 header {* Implementation of mappings with Association Lists *}
 
 theory AList_Mapping
-imports AList Mapping
+imports AList_Impl Mapping
 begin
 
 definition Mapping :: "('a \<times> 'b) list \<Rightarrow> ('a, 'b) mapping" where
--- a/src/HOL/Library/Multiset.thy	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Library/Multiset.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -1103,7 +1103,7 @@
 begin
 
 definition
-  "HOL.equal A B \<longleftrightarrow> (A::'a multiset) \<le> B \<and> B \<le> A"
+  [code]: "HOL.equal A B \<longleftrightarrow> (A::'a multiset) \<le> B \<and> B \<le> A"
 
 instance proof
 qed (simp add: equal_multiset_def eq_iff)
--- a/src/HOL/List.thy	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/List.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -1354,6 +1354,10 @@
 apply (case_tac n, auto)
 done
 
+lemma nth_tl:
+  assumes "n < length (tl x)" shows "tl x ! n = x ! Suc n"
+using assms by (induct x) auto
+
 lemma hd_conv_nth: "xs \<noteq> [] \<Longrightarrow> hd xs = xs!0"
 by(cases xs) simp_all
 
@@ -1545,6 +1549,12 @@
 lemma last_appendR[simp]: "ys \<noteq> [] \<Longrightarrow> last(xs @ ys) = last ys"
 by(simp add:last_append)
 
+lemma last_tl: "xs = [] \<or> tl xs \<noteq> [] \<Longrightarrow>last (tl xs) = last xs"
+by (induct xs) simp_all
+
+lemma butlast_tl: "butlast (tl xs) = tl (butlast xs)"
+by (induct xs) simp_all
+
 lemma hd_rev: "xs \<noteq> [] \<Longrightarrow> hd(rev xs) = last xs"
 by(rule rev_exhaust[of xs]) simp_all
 
@@ -1578,6 +1588,15 @@
 apply (auto split:nat.split)
 done
 
+lemma nth_butlast:
+  assumes "n < length (butlast xs)" shows "butlast xs ! n = xs ! n"
+proof (cases xs)
+  case (Cons y ys)
+  moreover from assms have "butlast xs ! n = (butlast xs @ [last xs]) ! n"
+    by (simp add: nth_append)
+  ultimately show ?thesis using append_butlast_last_id by simp
+qed simp
+
 lemma last_conv_nth: "xs\<noteq>[] \<Longrightarrow> last xs = xs!(length xs - 1)"
 by(induct xs)(auto simp:neq_Nil_conv)
 
@@ -1899,6 +1918,17 @@
 "(!!x. x:set xs ==> P(x)) ==> dropWhile P (xs @ ys) = dropWhile P ys"
 by (induct xs) auto
 
+lemma dropWhile_append3:
+  "\<not> P y \<Longrightarrow>dropWhile P (xs @ y # ys) = dropWhile P xs @ y # ys"
+by (induct xs) auto
+
+lemma dropWhile_last:
+  "x \<in> set xs \<Longrightarrow> \<not> P x \<Longrightarrow> last (dropWhile P xs) = last xs"
+by (auto simp add: dropWhile_append3 in_set_conv_decomp)
+
+lemma set_dropWhileD: "x \<in> set (dropWhile P xs) \<Longrightarrow> x \<in> set xs"
+by (induct xs) (auto split: split_if_asm)
+
 lemma set_takeWhileD: "x : set (takeWhile P xs) ==> x : set xs \<and> P x"
 by (induct xs) (auto split: split_if_asm)
 
@@ -2890,6 +2920,30 @@
 apply (metis Cons_eq_appendI eq_Nil_appendI split_list)
 done
 
+lemma not_distinct_conv_prefix:
+  defines "dec as xs y ys \<equiv> y \<in> set xs \<and> distinct xs \<and> as = xs @ y # ys"
+  shows "\<not>distinct as \<longleftrightarrow> (\<exists>xs y ys. dec as xs y ys)" (is "?L = ?R")
+proof
+  assume "?L" then show "?R"
+  proof (induct "length as" arbitrary: as rule: less_induct)
+    case less
+    obtain xs ys zs y where decomp: "as = (xs @ y # ys) @ y # zs"
+      using not_distinct_decomp[OF less.prems] by auto
+    show ?case
+    proof (cases "distinct (xs @ y # ys)")
+      case True
+      with decomp have "dec as (xs @ y # ys) y zs" by (simp add: dec_def)
+      then show ?thesis by blast
+    next
+      case False
+      with less decomp obtain xs' y' ys' where "dec (xs @ y # ys) xs' y' ys'"
+        by atomize_elim auto
+      with decomp have "dec as xs' y' (ys' @ y # zs)" by (simp add: dec_def)
+      then show ?thesis by blast
+    qed
+  qed
+qed (auto simp: dec_def)
+
 lemma length_remdups_concat:
   "length (remdups (concat xss)) = card (\<Union>xs\<in>set xss. set xs)"
   by (simp add: distinct_card [symmetric])
--- a/src/HOL/Nominal/nominal_atoms.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Nominal/nominal_atoms.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -99,7 +99,7 @@
     
     val (_,thy1) = 
     fold_map (fn ak => fn thy => 
-          let val dt = ([], Binding.name ak, NoSyn, [(Binding.name ak, [@{typ nat}], NoSyn)])
+          let val dt = ((Binding.name ak, [], NoSyn), [(Binding.name ak, [@{typ nat}], NoSyn)])
               val (dt_names, thy1) = Datatype.add_datatype Datatype.default_config [dt] thy;
             
               val injects = maps (#inject o Datatype.the_info thy1) dt_names;
--- a/src/HOL/Nominal/nominal_datatype.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Nominal/nominal_datatype.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -6,9 +6,8 @@
 
 signature NOMINAL_DATATYPE =
 sig
-  val add_nominal_datatype : Datatype.config ->
-    (string list * binding * mixfix * (binding * string list * mixfix) list) list ->
-    theory -> theory
+  val nominal_datatype : Datatype.config -> Datatype.spec list -> theory -> theory
+  val nominal_datatype_cmd : Datatype.config -> Datatype.spec_cmd list -> theory -> theory
   type descr
   type nominal_datatype_info
   val get_nominal_datatypes : theory -> nominal_datatype_info Symtab.table
@@ -37,18 +36,17 @@
 val Collect_False_empty = @{thm empty_def [THEN sym, THEN eq_reflection]};
 val empty_iff = @{thm empty_iff};
 
-open Datatype_Aux;
 open NominalAtoms;
 
 (** FIXME: Datatype should export this function **)
 
 local
 
-fun dt_recs (DtTFree _) = []
-  | dt_recs (DtType (_, dts)) = maps dt_recs dts
-  | dt_recs (DtRec i) = [i];
+fun dt_recs (Datatype_Aux.DtTFree _) = []
+  | dt_recs (Datatype_Aux.DtType (_, dts)) = maps dt_recs dts
+  | dt_recs (Datatype_Aux.DtRec i) = [i];
 
-fun dt_cases (descr: descr) (_, args, constrs) =
+fun dt_cases (descr: Datatype_Aux.descr) (_, args, constrs) =
   let
     fun the_bname i = Long_Name.base_name (#1 (the (AList.lookup (op =) descr i)));
     val bnames = map the_bname (distinct op = (maps dt_recs args));
@@ -72,7 +70,9 @@
 
 (* theory data *)
 
-type descr = (int * (string * dtyp list * (string * (dtyp list * dtyp) list) list)) list;
+type descr =
+  (int * (string * Datatype_Aux.dtyp list *
+      (string * (Datatype_Aux.dtyp list * Datatype_Aux.dtyp) list) list)) list;
 
 type nominal_datatype_info =
   {index : int,
@@ -186,30 +186,16 @@
 fun fresh_star_const T U =
   Const ("Nominal.fresh_star", HOLogic.mk_setT T --> U --> HOLogic.boolT);
 
-fun gen_add_nominal_datatype prep_typ config dts thy =
+fun gen_nominal_datatype prep_specs config dts thy =
   let
-    val new_type_names = map (Binding.name_of o #2) dts;
-
+    val new_type_names = map (fn ((tname, _, _), _) => Binding.name_of tname) dts;
 
-    (* this theory is used just for parsing *)
-
-    val tmp_thy = thy |>
-      Theory.copy |>
-      Sign.add_types_global (map (fn (tvs, tname, mx, _) => (tname, length tvs, mx)) dts);
+    val (dts', _) = prep_specs dts thy;
 
     val atoms = atoms_of thy;
 
-    fun prep_constr (cname, cargs, mx) (constrs, sorts) =
-      let val (cargs', sorts') = fold_map (prep_typ tmp_thy) cargs sorts
-      in (constrs @ [(cname, cargs', mx)], sorts') end
-
-    fun prep_dt_spec (tvs, tname, mx, constrs) (dts, sorts) =
-      let val (constrs', sorts') = fold prep_constr constrs ([], sorts)
-      in (dts @ [(tvs, tname, mx, constrs')], sorts') end
-
-    val (dts', sorts) = fold prep_dt_spec dts ([], []);
-    val tyvars = map (map (fn s =>
-      (s, the (AList.lookup (op =) sorts s))) o #1) dts';
+    val tyvars = map (fn ((_, tvs, _), _) => tvs) dts';
+    val sorts = flat tyvars;
 
     fun inter_sort thy S S' = Sign.inter_sort thy (S, S');
     fun augment_sort_typ thy S =
@@ -219,12 +205,12 @@
       end;
     fun augment_sort thy S = map_types (augment_sort_typ thy S);
 
-    val types_syntax = map (fn (tvs, tname, mx, constrs) => (tname, mx)) dts';
-    val constr_syntax = map (fn (tvs, tname, mx, constrs) =>
+    val types_syntax = map (fn ((tname, tvs, mx), constrs) => (tname, mx)) dts';
+    val constr_syntax = map (fn (_, constrs) =>
       map (fn (cname, cargs, mx) => (cname, mx)) constrs) dts';
 
-    val ps = map (fn (_, n, _, _) =>
-      (Sign.full_name tmp_thy n, Sign.full_name tmp_thy (Binding.suffix_name "_Rep" n))) dts;
+    val ps = map (fn ((n, _, _), _) =>
+      (Sign.full_name thy n, Sign.full_name thy (Binding.suffix_name "_Rep" n))) dts;
     val rps = map Library.swap ps;
 
     fun replace_types (Type ("Nominal.ABS", [T, U])) =
@@ -233,8 +219,8 @@
           Type (the_default s (AList.lookup op = ps s), map replace_types Ts)
       | replace_types T = T;
 
-    val dts'' = map (fn (tvs, tname, mx, constrs) =>
-      (tvs, Binding.suffix_name "_Rep" tname, NoSyn,
+    val dts'' = map (fn ((tname, tvs, mx), constrs) =>
+      ((Binding.suffix_name "_Rep" tname, tvs, NoSyn),
         map (fn (cname, cargs, mx) => (Binding.suffix_name "_Rep" cname,
           map replace_types cargs, NoSyn)) constrs)) dts';
 
@@ -243,7 +229,7 @@
     val (full_new_type_names',thy1) = Datatype.add_datatype config dts'' thy;
 
     val {descr, induct, ...} = Datatype.the_info thy1 (hd full_new_type_names');
-    fun nth_dtyp i = typ_of_dtyp descr (DtRec i);
+    fun nth_dtyp i = Datatype_Aux.typ_of_dtyp descr (Datatype_Aux.DtRec i);
 
     val big_name = space_implode "_" new_type_names;
 
@@ -256,7 +242,7 @@
       let val T = nth_dtyp i
       in permT --> T --> T end) descr;
     val perm_names' = Datatype_Prop.indexify_names (map (fn (i, _) =>
-      "perm_" ^ name_of_typ (nth_dtyp i)) descr);
+      "perm_" ^ Datatype_Aux.name_of_typ (nth_dtyp i)) descr);
     val perm_names = replicate (length new_type_names) "Nominal.perm" @
       map (Sign.full_bname thy1) (List.drop (perm_names', length new_type_names));
     val perm_names_types = perm_names ~~ perm_types;
@@ -266,16 +252,16 @@
       let val T = nth_dtyp i
       in map (fn (cname, dts) =>
         let
-          val Ts = map (typ_of_dtyp descr) dts;
+          val Ts = map (Datatype_Aux.typ_of_dtyp descr) dts;
           val names = Name.variant_list ["pi"] (Datatype_Prop.make_tnames Ts);
           val args = map Free (names ~~ Ts);
           val c = Const (cname, Ts ---> T);
           fun perm_arg (dt, x) =
             let val T = type_of x
-            in if is_rec_type dt then
+            in if Datatype_Aux.is_rec_type dt then
                 let val Us = binder_types T
                 in list_abs (map (pair "x") Us,
-                  Free (nth perm_names_types' (body_index dt)) $ pi $
+                  Free (nth perm_names_types' (Datatype_Aux.body_index dt)) $ pi $
                     list_comb (x, map (fn (i, U) =>
                       Const ("Nominal.perm", permT --> U --> U) $
                         (Const ("List.rev", permT --> permT) $ pi) $
@@ -309,7 +295,7 @@
 
     val unfolded_perm_eq_thms =
       if length descr = length new_type_names then []
-      else map Drule.export_without_context (List.drop (split_conj_thm
+      else map Drule.export_without_context (List.drop (Datatype_Aux.split_conj_thm
         (Goal.prove_global thy2 [] []
           (HOLogic.mk_Trueprop (foldr1 HOLogic.mk_conj
             (map (fn (c as (s, T), x) =>
@@ -329,7 +315,7 @@
 
     val perm_empty_thms = maps (fn a =>
       let val permT = mk_permT (Type (a, []))
-      in map Drule.export_without_context (List.take (split_conj_thm
+      in map Drule.export_without_context (List.take (Datatype_Aux.split_conj_thm
         (Goal.prove_global thy2 [] []
           (augment_sort thy2 [pt_class_of thy2 a]
             (HOLogic.mk_Trueprop (foldr1 HOLogic.mk_conj
@@ -361,7 +347,7 @@
         val pt_inst = pt_inst_of thy2 a;
         val pt2' = pt_inst RS pt2;
         val pt2_ax = Global_Theory.get_thm thy2 (Long_Name.map_base_name (fn s => "pt_" ^ s ^ "2") a);
-      in List.take (map Drule.export_without_context (split_conj_thm
+      in List.take (map Drule.export_without_context (Datatype_Aux.split_conj_thm
         (Goal.prove_global thy2 [] []
            (augment_sort thy2 [pt_class_of thy2 a]
              (HOLogic.mk_Trueprop (foldr1 HOLogic.mk_conj
@@ -396,7 +382,7 @@
         val pt3' = pt_inst RS pt3;
         val pt3_rev' = at_inst RS (pt_inst RS pt3_rev);
         val pt3_ax = Global_Theory.get_thm thy2 (Long_Name.map_base_name (fn s => "pt_" ^ s ^ "3") a);
-      in List.take (map Drule.export_without_context (split_conj_thm
+      in List.take (map Drule.export_without_context (Datatype_Aux.split_conj_thm
         (Goal.prove_global thy2 [] []
           (augment_sort thy2 [pt_class_of thy2 a] (Logic.mk_implies
              (HOLogic.mk_Trueprop (Const ("Nominal.prm_eq",
@@ -447,7 +433,7 @@
                 at_inst RS (pt_inst RS pt_perm_compose_rev) RS sym]
             end))
         val sort = Sign.minimize_sort thy (Sign.certify_sort thy (cp_class :: pt_class));
-        val thms = split_conj_thm (Goal.prove_global thy [] []
+        val thms = Datatype_Aux.split_conj_thm (Goal.prove_global thy [] []
           (augment_sort thy sort
             (HOLogic.mk_Trueprop (foldr1 HOLogic.mk_conj
               (map (fn ((s, T), x) =>
@@ -499,24 +485,26 @@
 
     val _ = warning "representing sets";
 
-    val rep_set_names = Datatype_Prop.indexify_names
-      (map (fn (i, _) => name_of_typ (nth_dtyp i) ^ "_set") descr);
+    val rep_set_names =
+      Datatype_Prop.indexify_names
+        (map (fn (i, _) => Datatype_Aux.name_of_typ (nth_dtyp i) ^ "_set") descr);
     val big_rep_name =
       space_implode "_" (Datatype_Prop.indexify_names (map_filter
         (fn (i, ("Nominal.noption", _, _)) => NONE
-          | (i, _) => SOME (name_of_typ (nth_dtyp i))) descr)) ^ "_set";
+          | (i, _) => SOME (Datatype_Aux.name_of_typ (nth_dtyp i))) descr)) ^ "_set";
     val _ = warning ("big_rep_name: " ^ big_rep_name);
 
-    fun strip_option (dtf as DtType ("fun", [dt, DtRec i])) =
+    fun strip_option (dtf as Datatype_Aux.DtType ("fun", [dt, Datatype_Aux.DtRec i])) =
           (case AList.lookup op = descr i of
              SOME ("Nominal.noption", _, [(_, [dt']), _]) =>
                apfst (cons dt) (strip_option dt')
            | _ => ([], dtf))
-      | strip_option (DtType ("fun", [dt, DtType ("Nominal.noption", [dt'])])) =
+      | strip_option (Datatype_Aux.DtType ("fun",
+            [dt, Datatype_Aux.DtType ("Nominal.noption", [dt'])])) =
           apfst (cons dt) (strip_option dt')
       | strip_option dt = ([], dt);
 
-    val dt_atomTs = distinct op = (map (typ_of_dtyp descr)
+    val dt_atomTs = distinct op = (map (Datatype_Aux.typ_of_dtyp descr)
       (maps (fn (_, (_, _, cs)) => maps (maps (fst o strip_option) o snd) cs) descr));
     val dt_atoms = map (fst o dest_Type) dt_atomTs;
 
@@ -525,20 +513,20 @@
         fun mk_prem dt (j, j', prems, ts) =
           let
             val (dts, dt') = strip_option dt;
-            val (dts', dt'') = strip_dtyp dt';
-            val Ts = map (typ_of_dtyp descr) dts;
-            val Us = map (typ_of_dtyp descr) dts';
-            val T = typ_of_dtyp descr dt'';
-            val free = mk_Free "x" (Us ---> T) j;
-            val free' = app_bnds free (length Us);
+            val (dts', dt'') = Datatype_Aux.strip_dtyp dt';
+            val Ts = map (Datatype_Aux.typ_of_dtyp descr) dts;
+            val Us = map (Datatype_Aux.typ_of_dtyp descr) dts';
+            val T = Datatype_Aux.typ_of_dtyp descr dt'';
+            val free = Datatype_Aux.mk_Free "x" (Us ---> T) j;
+            val free' = Datatype_Aux.app_bnds free (length Us);
             fun mk_abs_fun T (i, t) =
               let val U = fastype_of t
               in (i + 1, Const ("Nominal.abs_fun", [T, U, T] --->
-                Type ("Nominal.noption", [U])) $ mk_Free "y" T i $ t)
+                Type ("Nominal.noption", [U])) $ Datatype_Aux.mk_Free "y" T i $ t)
               end
           in (j + 1, j' + length Ts,
             case dt'' of
-                DtRec k => list_all (map (pair "x") Us,
+                Datatype_Aux.DtRec k => list_all (map (pair "x") Us,
                   HOLogic.mk_Trueprop (Free (nth rep_set_names k,
                     T --> HOLogic.boolT) $ free')) :: prems
               | _ => prems,
@@ -584,7 +572,7 @@
       (perm_indnames ~~ descr);
 
     fun mk_perm_closed name = map (fn th => Drule.export_without_context (th RS mp))
-      (List.take (split_conj_thm (Goal.prove_global thy4 [] []
+      (List.take (Datatype_Aux.split_conj_thm (Goal.prove_global thy4 [] []
         (augment_sort thy4
           (pt_class_of thy4 name :: map (cp_class_of thy4 name) (remove (op =) name dt_atoms))
           (HOLogic.mk_Trueprop (foldr1 HOLogic.mk_conj (map
@@ -717,8 +705,8 @@
       (fn ((i, ("Nominal.noption", _, _)), p) => p
         | ((i, _), (ty_idxs, j)) => (ty_idxs @ [(i, j)], j + 1)) ([], 0) descr;
 
-    fun reindex (DtType (s, dts)) = DtType (s, map reindex dts)
-      | reindex (DtRec i) = DtRec (the (AList.lookup op = ty_idxs i))
+    fun reindex (Datatype_Aux.DtType (s, dts)) = Datatype_Aux.DtType (s, map reindex dts)
+      | reindex (Datatype_Aux.DtRec i) = Datatype_Aux.DtRec (the (AList.lookup op = ty_idxs i))
       | reindex dt = dt;
 
     fun strip_suffix i s = implode (List.take (raw_explode s, size s - i));  (* FIXME Symbol.explode (?) *)
@@ -754,14 +742,14 @@
       map (fn ((cname, cargs), idxs) => (cname, partition_cargs idxs cargs))
         (constrs ~~ idxss)))) (descr'' ~~ ndescr);
 
-    fun nth_dtyp' i = typ_of_dtyp descr'' (DtRec i);
+    fun nth_dtyp' i = Datatype_Aux.typ_of_dtyp descr'' (Datatype_Aux.DtRec i);
 
     val rep_names = map (fn s =>
       Sign.intern_const thy7 ("Rep_" ^ s)) new_type_names;
     val abs_names = map (fn s =>
       Sign.intern_const thy7 ("Abs_" ^ s)) new_type_names;
 
-    val recTs = get_rec_types descr'';
+    val recTs = Datatype_Aux.get_rec_types descr'';
     val newTs' = take (length new_type_names) recTs';
     val newTs = take (length new_type_names) recTs;
 
@@ -772,17 +760,18 @@
       let
         fun constr_arg (dts, dt) (j, l_args, r_args) =
           let
-            val xs = map (fn (dt, i) => mk_Free "x" (typ_of_dtyp descr'' dt) i)
-              (dts ~~ (j upto j + length dts - 1))
-            val x = mk_Free "x" (typ_of_dtyp descr'' dt) (j + length dts)
+            val xs =
+              map (fn (dt, i) => Datatype_Aux.mk_Free "x" (Datatype_Aux.typ_of_dtyp descr'' dt) i)
+                (dts ~~ (j upto j + length dts - 1))
+            val x = Datatype_Aux.mk_Free "x" (Datatype_Aux.typ_of_dtyp descr'' dt) (j + length dts)
           in
             (j + length dts + 1,
              xs @ x :: l_args,
              fold_rev mk_abs_fun xs
                (case dt of
-                  DtRec k => if k < length new_type_names then
-                      Const (nth rep_names k, typ_of_dtyp descr'' dt -->
-                        typ_of_dtyp descr dt) $ x
+                  Datatype_Aux.DtRec k => if k < length new_type_names then
+                      Const (nth rep_names k, Datatype_Aux.typ_of_dtyp descr'' dt -->
+                        Datatype_Aux.typ_of_dtyp descr dt) $ x
                     else error "nested recursion not (yet) supported"
                 | _ => x) :: r_args)
           end
@@ -900,10 +889,12 @@
 
           fun constr_arg (dts, dt) (j, l_args, r_args) =
             let
-              val Ts = map (typ_of_dtyp descr'') dts;
-              val xs = map (fn (T, i) => mk_Free "x" T i)
-                (Ts ~~ (j upto j + length dts - 1))
-              val x = mk_Free "x" (typ_of_dtyp descr'' dt) (j + length dts)
+              val Ts = map (Datatype_Aux.typ_of_dtyp descr'') dts;
+              val xs =
+                map (fn (T, i) => Datatype_Aux.mk_Free "x" T i)
+                  (Ts ~~ (j upto j + length dts - 1));
+              val x =
+                Datatype_Aux.mk_Free "x" (Datatype_Aux.typ_of_dtyp descr'' dt) (j + length dts);
             in
               (j + length dts + 1,
                xs @ x :: l_args,
@@ -950,11 +941,14 @@
 
           fun make_inj (dts, dt) (j, args1, args2, eqs) =
             let
-              val Ts_idx = map (typ_of_dtyp descr'') dts ~~ (j upto j + length dts - 1);
-              val xs = map (fn (T, i) => mk_Free "x" T i) Ts_idx;
-              val ys = map (fn (T, i) => mk_Free "y" T i) Ts_idx;
-              val x = mk_Free "x" (typ_of_dtyp descr'' dt) (j + length dts);
-              val y = mk_Free "y" (typ_of_dtyp descr'' dt) (j + length dts);
+              val Ts_idx =
+                map (Datatype_Aux.typ_of_dtyp descr'') dts ~~ (j upto j + length dts - 1);
+              val xs = map (fn (T, i) => Datatype_Aux.mk_Free "x" T i) Ts_idx;
+              val ys = map (fn (T, i) => Datatype_Aux.mk_Free "y" T i) Ts_idx;
+              val x =
+                Datatype_Aux.mk_Free "x" (Datatype_Aux.typ_of_dtyp descr'' dt) (j + length dts);
+              val y =
+                Datatype_Aux.mk_Free "y" (Datatype_Aux.typ_of_dtyp descr'' dt) (j + length dts);
             in
               (j + length dts + 1,
                xs @ (x :: args1), ys @ (y :: args2),
@@ -993,9 +987,11 @@
 
           fun process_constr (dts, dt) (j, args1, args2) =
             let
-              val Ts_idx = map (typ_of_dtyp descr'') dts ~~ (j upto j + length dts - 1);
-              val xs = map (fn (T, i) => mk_Free "x" T i) Ts_idx;
-              val x = mk_Free "x" (typ_of_dtyp descr'' dt) (j + length dts);
+              val Ts_idx =
+                map (Datatype_Aux.typ_of_dtyp descr'') dts ~~ (j upto j + length dts - 1);
+              val xs = map (fn (T, i) => Datatype_Aux.mk_Free "x" T i) Ts_idx;
+              val x =
+                Datatype_Aux.mk_Free "x" (Datatype_Aux.typ_of_dtyp descr'' dt) (j + length dts);
             in
               (j + length dts + 1,
                xs @ (x :: args1), fold_rev mk_abs_fun xs x :: args2)
@@ -1034,14 +1030,16 @@
 
     fun mk_indrule_lemma (((i, _), T), U) (prems, concls) =
       let
-        val Rep_t = Const (nth rep_names i, T --> U) $ mk_Free "x" T i;
+        val Rep_t = Const (nth rep_names i, T --> U) $ Datatype_Aux.mk_Free "x" T i;
 
         val Abs_t =  Const (nth abs_names i, U --> T);
 
-      in (prems @ [HOLogic.imp $
+      in
+        (prems @ [HOLogic.imp $
             (Const (nth rep_set_names'' i, U --> HOLogic.boolT) $ Rep_t) $
-              (mk_Free "P" (T --> HOLogic.boolT) (i + 1) $ (Abs_t $ Rep_t))],
-          concls @ [mk_Free "P" (T --> HOLogic.boolT) (i + 1) $ mk_Free "x" T i])
+              (Datatype_Aux.mk_Free "P" (T --> HOLogic.boolT) (i + 1) $ (Abs_t $ Rep_t))],
+         concls @
+           [Datatype_Aux.mk_Free "P" (T --> HOLogic.boolT) (i + 1) $ Datatype_Aux.mk_Free "x" T i])
       end;
 
     val (indrule_lemma_prems, indrule_lemma_concls) =
@@ -1049,8 +1047,8 @@
 
     val indrule_lemma = Goal.prove_global thy8 [] []
       (Logic.mk_implies
-        (HOLogic.mk_Trueprop (mk_conj indrule_lemma_prems),
-         HOLogic.mk_Trueprop (mk_conj indrule_lemma_concls))) (fn _ => EVERY
+        (HOLogic.mk_Trueprop (Datatype_Aux.mk_conj indrule_lemma_prems),
+         HOLogic.mk_Trueprop (Datatype_Aux.mk_conj indrule_lemma_concls))) (fn _ => EVERY
            [REPEAT (etac conjE 1),
             REPEAT (EVERY
               [TRY (rtac conjI 1), full_simp_tac (HOL_basic_ss addsimps Rep_inverse_thms) 1,
@@ -1090,7 +1088,7 @@
     val finite_supp_thms = map (fn atom =>
       let val atomT = Type (atom, [])
       in map Drule.export_without_context (List.take
-        (split_conj_thm (Goal.prove_global thy8 [] []
+        (Datatype_Aux.split_conj_thm (Goal.prove_global thy8 [] []
            (augment_sort thy8 (fs_class_of thy8 atom :: pt_cp_sort)
              (HOLogic.mk_Trueprop
                (foldr1 HOLogic.mk_conj (map (fn (s, T) =>
@@ -1160,11 +1158,11 @@
 
     fun make_ind_prem fsT f k T ((cname, cargs), idxs) =
       let
-        val recs = filter is_rec_type cargs;
-        val Ts = map (typ_of_dtyp descr'') cargs;
-        val recTs' = map (typ_of_dtyp descr'') recs;
+        val recs = filter Datatype_Aux.is_rec_type cargs;
+        val Ts = map (Datatype_Aux.typ_of_dtyp descr'') cargs;
+        val recTs' = map (Datatype_Aux.typ_of_dtyp descr'') recs;
         val tnames = Name.variant_list pnames (Datatype_Prop.make_tnames Ts);
-        val rec_tnames = map fst (filter (is_rec_type o snd) (tnames ~~ cargs));
+        val rec_tnames = map fst (filter (Datatype_Aux.is_rec_type o snd) (tnames ~~ cargs));
         val frees = tnames ~~ Ts;
         val frees' = partition_cargs idxs frees;
         val z = (singleton (Name.variant_list tnames) "z", fsT);
@@ -1172,9 +1170,12 @@
         fun mk_prem ((dt, s), T) =
           let
             val (Us, U) = strip_type T;
-            val l = length Us
-          in list_all (z :: map (pair "x") Us, HOLogic.mk_Trueprop
-            (make_pred fsT (body_index dt) U $ Bound l $ app_bnds (Free (s, T)) l))
+            val l = length Us;
+          in
+            list_all (z :: map (pair "x") Us,
+              HOLogic.mk_Trueprop
+                (make_pred fsT (Datatype_Aux.body_index dt) U $ Bound l $
+                  Datatype_Aux.app_bnds (Free (s, T)) l))
           end;
 
         val prems = map mk_prem (recs ~~ rec_tnames ~~ recTs');
@@ -1432,7 +1433,7 @@
           (1 upto (length descr''));
     val rec_set_names =  map (Sign.full_bname thy10) rec_set_names';
 
-    val rec_fns = map (uncurry (mk_Free "f"))
+    val rec_fns = map (uncurry (Datatype_Aux.mk_Free "f"))
       (rec_fn_Ts ~~ (1 upto (length rec_fn_Ts)));
     val rec_sets' = map (fn c => list_comb (Free c, rec_fns))
       (rec_set_names' ~~ rec_set_Ts);
@@ -1457,13 +1458,13 @@
     fun make_rec_intr T p rec_set ((cname, cargs), idxs)
         (rec_intr_ts, rec_prems, rec_prems', rec_eq_prems, l) =
       let
-        val Ts = map (typ_of_dtyp descr'') cargs;
+        val Ts = map (Datatype_Aux.typ_of_dtyp descr'') cargs;
         val frees = map (fn i => "x" ^ string_of_int i) (1 upto length Ts) ~~ Ts;
         val frees' = partition_cargs idxs frees;
         val binders = maps fst frees';
         val atomTs = distinct op = (maps (map snd o fst) frees');
         val recs = map_filter
-          (fn ((_, DtRec i), p) => SOME (i, p) | _ => NONE)
+          (fn ((_, Datatype_Aux.DtRec i), p) => SOME (i, p) | _ => NONE)
           (partition_cargs idxs cargs ~~ frees');
         val frees'' = map (fn i => "y" ^ string_of_int i) (1 upto length recs) ~~
           map (fn (i, _) => nth rec_result_Ts i) recs;
@@ -1525,7 +1526,7 @@
       let
         val permT = mk_permT aT;
         val pi = Free ("pi", permT);
-        val rec_fns_pi = map (mk_perm [] pi o uncurry (mk_Free "f"))
+        val rec_fns_pi = map (mk_perm [] pi o uncurry (Datatype_Aux.mk_Free "f"))
           (rec_fn_Ts ~~ (1 upto (length rec_fn_Ts)));
         val rec_sets_pi = map (fn c => list_comb (Const c, rec_fns_pi))
           (rec_set_names ~~ rec_set_Ts);
@@ -1536,7 +1537,7 @@
           in
             (R $ x $ y, R' $ mk_perm [] pi x $ mk_perm [] pi y)
           end) (recTs ~~ rec_result_Ts ~~ rec_sets ~~ rec_sets_pi ~~ (1 upto length recTs));
-        val ths = map (fn th => Drule.export_without_context (th RS mp)) (split_conj_thm
+        val ths = map (fn th => Drule.export_without_context (th RS mp)) (Datatype_Aux.split_conj_thm
           (Goal.prove_global thy11 [] []
             (augment_sort thy1 pt_cp_sort
               (HOLogic.mk_Trueprop (foldr1 HOLogic.mk_conj (map HOLogic.mk_imp ps))))
@@ -1568,7 +1569,7 @@
           (finite $ (Const ("Nominal.supp", T --> aset) $ f)))
             (rec_fns ~~ rec_fn_Ts)
       in
-        map (fn th => Drule.export_without_context (th RS mp)) (split_conj_thm
+        map (fn th => Drule.export_without_context (th RS mp)) (Datatype_Aux.split_conj_thm
           (Goal.prove_global thy11 []
             (map (augment_sort thy11 fs_cp_sort) fins)
             (augment_sort thy11 fs_cp_sort
@@ -1705,7 +1706,7 @@
         (Const ("Finite_Set.finite", HOLogic.mk_setT aT --> HOLogic.boolT) $
            (Const ("Nominal.supp", fsT' --> HOLogic.mk_setT aT) $ rec_ctxt))) dt_atomTs;
 
-    val rec_unique_thms = split_conj_thm (Goal.prove
+    val rec_unique_thms = Datatype_Aux.split_conj_thm (Goal.prove
       (Proof_Context.init_global thy11) (map fst rec_unique_frees)
       (map (augment_sort thy11 fs_cp_sort)
         (flat finite_premss @ finite_ctxt_prems @ rec_prems @ rec_prems'))
@@ -1718,7 +1719,7 @@
              apfst (pair T) (chop k xs)) dt_atomTs prems;
            val (finite_ctxt_ths, ths2) = chop (length dt_atomTs) ths1;
            val (P_ind_ths, fcbs) = chop k ths2;
-           val P_ths = map (fn th => th RS mp) (split_conj_thm
+           val P_ths = map (fn th => th RS mp) (Datatype_Aux.split_conj_thm
              (Goal.prove context
                (map fst (rec_unique_frees'' @ rec_unique_frees')) []
                (augment_sort thy11 fs_cp_sort
@@ -2044,7 +2045,7 @@
              resolve_tac rec_intrs 1,
              REPEAT (solve (prems @ rec_total_thms) prems 1)])
       end) (rec_eq_prems ~~
-        Datatype_Prop.make_primrecs new_type_names descr' thy12);
+        Datatype_Prop.make_primrecs reccomb_names descr' thy12);
 
     val dt_infos = map_index (make_dt_info pdescr induct reccomb_names rec_thms)
       (descr1 ~~ distinct_thms ~~ inject_thms);
@@ -2065,11 +2066,12 @@
     thy13
   end;
 
-val add_nominal_datatype = gen_add_nominal_datatype Datatype.read_typ;
+val nominal_datatype = gen_nominal_datatype Datatype.check_specs;
+val nominal_datatype_cmd = gen_nominal_datatype Datatype.read_specs;
 
 val _ =
-  Outer_Syntax.command "nominal_datatype" "define inductive datatypes" Keyword.thy_decl
-    (Parse.and_list1 Datatype.parse_decl
-      >> (Toplevel.theory o add_nominal_datatype Datatype.default_config));
+  Outer_Syntax.command "nominal_datatype" "define nominal datatypes" Keyword.thy_decl
+    (Parse.and_list1 Datatype.spec_cmd >>
+      (Toplevel.theory o nominal_datatype_cmd Datatype.default_config));
 
 end
--- a/src/HOL/RealDef.thy	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/RealDef.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -1767,7 +1767,7 @@
     (@{const_name uminus_real_inst.uminus_real}, @{const_name Nitpick.uminus_frac}),
     (@{const_name number_real_inst.number_of_real}, @{const_name Nitpick.number_of_frac}),
     (@{const_name inverse_real_inst.inverse_real}, @{const_name Nitpick.inverse_frac}),
-    (@{const_name ord_real_inst.less_real}, @{const_name Nitpick.less_eq_frac}),
+    (@{const_name ord_real_inst.less_real}, @{const_name Nitpick.less_frac}),
     (@{const_name ord_real_inst.less_eq_real}, @{const_name Nitpick.less_eq_frac})]
 *}
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Record_Benchmark/ROOT.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -0,0 +1,8 @@
+(*  Title:      HOL/Record_Benchmark/ROOT.ML
+
+Some benchmark on large record.
+*)
+
+Toplevel.timing := true;
+
+use_thys ["Record_Benchmark"];
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Record_Benchmark/Record_Benchmark.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -0,0 +1,390 @@
+(*  Title:      HOL/Record_Benchmark/Record_Benchmark.thy
+    Author:     Norbert Schirmer, DFKI
+*)
+
+header {* Benchmark for large record *}
+
+theory Record_Benchmark
+imports Main
+begin
+
+declare [[record_timing]]
+
+record many_A =
+A000::nat
+A001::nat
+A002::nat
+A003::nat
+A004::nat
+A005::nat
+A006::nat
+A007::nat
+A008::nat
+A009::nat
+A010::nat
+A011::nat
+A012::nat
+A013::nat
+A014::nat
+A015::nat
+A016::nat
+A017::nat
+A018::nat
+A019::nat
+A020::nat
+A021::nat
+A022::nat
+A023::nat
+A024::nat
+A025::nat
+A026::nat
+A027::nat
+A028::nat
+A029::nat
+A030::nat
+A031::nat
+A032::nat
+A033::nat
+A034::nat
+A035::nat
+A036::nat
+A037::nat
+A038::nat
+A039::nat
+A040::nat
+A041::nat
+A042::nat
+A043::nat
+A044::nat
+A045::nat
+A046::nat
+A047::nat
+A048::nat
+A049::nat
+A050::nat
+A051::nat
+A052::nat
+A053::nat
+A054::nat
+A055::nat
+A056::nat
+A057::nat
+A058::nat
+A059::nat
+A060::nat
+A061::nat
+A062::nat
+A063::nat
+A064::nat
+A065::nat
+A066::nat
+A067::nat
+A068::nat
+A069::nat
+A070::nat
+A071::nat
+A072::nat
+A073::nat
+A074::nat
+A075::nat
+A076::nat
+A077::nat
+A078::nat
+A079::nat
+A080::nat
+A081::nat
+A082::nat
+A083::nat
+A084::nat
+A085::nat
+A086::nat
+A087::nat
+A088::nat
+A089::nat
+A090::nat
+A091::nat
+A092::nat
+A093::nat
+A094::nat
+A095::nat
+A096::nat
+A097::nat
+A098::nat
+A099::nat
+A100::nat
+A101::nat
+A102::nat
+A103::nat
+A104::nat
+A105::nat
+A106::nat
+A107::nat
+A108::nat
+A109::nat
+A110::nat
+A111::nat
+A112::nat
+A113::nat
+A114::nat
+A115::nat
+A116::nat
+A117::nat
+A118::nat
+A119::nat
+A120::nat
+A121::nat
+A122::nat
+A123::nat
+A124::nat
+A125::nat
+A126::nat
+A127::nat
+A128::nat
+A129::nat
+A130::nat
+A131::nat
+A132::nat
+A133::nat
+A134::nat
+A135::nat
+A136::nat
+A137::nat
+A138::nat
+A139::nat
+A140::nat
+A141::nat
+A142::nat
+A143::nat
+A144::nat
+A145::nat
+A146::nat
+A147::nat
+A148::nat
+A149::nat
+A150::nat
+A151::nat
+A152::nat
+A153::nat
+A154::nat
+A155::nat
+A156::nat
+A157::nat
+A158::nat
+A159::nat
+A160::nat
+A161::nat
+A162::nat
+A163::nat
+A164::nat
+A165::nat
+A166::nat
+A167::nat
+A168::nat
+A169::nat
+A170::nat
+A171::nat
+A172::nat
+A173::nat
+A174::nat
+A175::nat
+A176::nat
+A177::nat
+A178::nat
+A179::nat
+A180::nat
+A181::nat
+A182::nat
+A183::nat
+A184::nat
+A185::nat
+A186::nat
+A187::nat
+A188::nat
+A189::nat
+A190::nat
+A191::nat
+A192::nat
+A193::nat
+A194::nat
+A195::nat
+A196::nat
+A197::nat
+A198::nat
+A199::nat
+A200::nat
+A201::nat
+A202::nat
+A203::nat
+A204::nat
+A205::nat
+A206::nat
+A207::nat
+A208::nat
+A209::nat
+A210::nat
+A211::nat
+A212::nat
+A213::nat
+A214::nat
+A215::nat
+A216::nat
+A217::nat
+A218::nat
+A219::nat
+A220::nat
+A221::nat
+A222::nat
+A223::nat
+A224::nat
+A225::nat
+A226::nat
+A227::nat
+A228::nat
+A229::nat
+A230::nat
+A231::nat
+A232::nat
+A233::nat
+A234::nat
+A235::nat
+A236::nat
+A237::nat
+A238::nat
+A239::nat
+A240::nat
+A241::nat
+A242::nat
+A243::nat
+A244::nat
+A245::nat
+A246::nat
+A247::nat
+A248::nat
+A249::nat
+A250::nat
+A251::nat
+A252::nat
+A253::nat
+A254::nat
+A255::nat
+A256::nat
+A257::nat
+A258::nat
+A259::nat
+A260::nat
+A261::nat
+A262::nat
+A263::nat
+A264::nat
+A265::nat
+A266::nat
+A267::nat
+A268::nat
+A269::nat
+A270::nat
+A271::nat
+A272::nat
+A273::nat
+A274::nat
+A275::nat
+A276::nat
+A277::nat
+A278::nat
+A279::nat
+A280::nat
+A281::nat
+A282::nat
+A283::nat
+A284::nat
+A285::nat
+A286::nat
+A287::nat
+A288::nat
+A289::nat
+A290::nat
+A291::nat
+A292::nat
+A293::nat
+A294::nat
+A295::nat
+A296::nat
+A297::nat
+A298::nat
+A299::nat
+
+lemma "A155 (r\<lparr>A255:=x\<rparr>) = A155 r"
+  by simp
+
+lemma "A155 (r\<lparr>A255:=x,A253:=y,A255:=z \<rparr>) = A155 r"
+  by simp
+
+lemma "(r\<lparr>A255:=x,A253:=y,A255:=z \<rparr>) = r\<lparr>A253:=y,A255:=z\<rparr>"
+  by simp
+
+lemma "(r\<lparr>A255:=x,A253:=y,A255:=z \<rparr>) = r\<lparr>A253:=y,A255:=z\<rparr>"
+  apply (tactic {* simp_tac (HOL_basic_ss addsimprocs [Record.upd_simproc]) 1*})
+  done
+
+lemma "(\<forall>r. P (A155 r)) \<longrightarrow> (\<forall>x. P x)"
+  apply (tactic {* simp_tac (HOL_basic_ss addsimprocs [Record.split_simproc (K ~1)]) 1*})
+  apply simp
+  done
+
+lemma "(\<forall>r. P (A155 r)) \<longrightarrow> (\<forall>x. P x)"
+  apply (tactic {* Record.split_simp_tac [] (K ~1) 1*})
+  apply simp
+  done
+
+lemma "(\<exists>r. P (A155 r)) \<longrightarrow> (\<exists>x. P x)"
+  apply (tactic {* simp_tac (HOL_basic_ss addsimprocs [Record.split_simproc (K ~1)]) 1*})
+  apply simp
+  done
+
+lemma "(\<exists>r. P (A155 r)) \<longrightarrow> (\<exists>x. P x)"
+  apply (tactic {* Record.split_simp_tac [] (K ~1) 1*})
+  apply simp
+  done
+
+lemma "\<And>r. P (A155 r) \<Longrightarrow> (\<exists>x. P x)"
+  apply (tactic {* simp_tac (HOL_basic_ss addsimprocs [Record.split_simproc (K ~1)]) 1*})
+  apply auto
+  done
+
+lemma "\<And>r. P (A155 r) \<Longrightarrow> (\<exists>x. P x)"
+  apply (tactic {* Record.split_simp_tac [] (K ~1) 1*})
+  apply auto
+  done
+
+lemma "P (A155 r) \<Longrightarrow> (\<exists>x. P x)"
+  apply (tactic {* Record.split_simp_tac [] (K ~1) 1*})
+  apply auto
+  done
+
+lemma fixes r shows "P (A155 r) \<Longrightarrow> (\<exists>x. P x)"
+  apply (tactic {* Record.split_simp_tac [] (K ~1) 1*})
+  apply auto
+  done
+
+
+lemma True
+proof -
+  {
+    fix P r
+    assume pre: "P (A155 r)"
+    have "\<exists>x. P x"
+      using pre
+      apply -
+      apply (tactic {* Record.split_simp_tac [] (K ~1) 1*})
+      apply auto 
+      done
+  }
+  show ?thesis ..
+qed
+
+
+lemma "\<exists>r. A155 r = x"
+  apply (tactic {*simp_tac (HOL_basic_ss addsimprocs [Record.ex_sel_eq_simproc]) 1*})
+  done
+
+
+end
\ No newline at end of file
--- a/src/HOL/SPARK/Tools/spark_vcs.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/SPARK/Tools/spark_vcs.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -306,8 +306,7 @@
               in
                 (thy |>
                  Datatype.add_datatype {strict = true, quiet = true}
-                   [([], tyb, NoSyn,
-                     map (fn s => (Binding.name s, [], NoSyn)) els)] |> snd |>
+                   [((tyb, [], NoSyn), map (fn s => (Binding.name s, [], NoSyn)) els)] |> snd |>
                  add_enum_type s tyname,
                  tyname)
               end
--- a/src/HOL/Tools/ATP/atp_proof.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Tools/ATP/atp_proof.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -267,7 +267,7 @@
 fun list_app (f, args) =
   fold (fn arg => fn f => ATerm (tptp_app, [f, arg])) args f
 
-(* We ignore TFF and THF types for now. *)
+(* We currently ignore TFF and THF types. *)
 fun parse_type_stuff x =
   Scan.repeat (($$ tptp_has_type || $$ tptp_fun_type) |-- parse_arg) x
 and parse_arg x =
@@ -275,28 +275,17 @@
    || scan_general_id --| parse_type_stuff
         -- Scan.optional ($$ "(" |-- parse_terms --| $$ ")") []
       >> ATerm) x
-and parse_app x =
+and parse_term x =
   (parse_arg -- Scan.repeat ($$ tptp_app |-- parse_arg) >> list_app) x
-and parse_term x =
-  (parse_app -- Scan.option (Scan.option ($$ tptp_not_infix) --| $$ tptp_equal
-                             -- parse_app)
-   >> (fn (u1, NONE) => u1
-        | (u1, SOME (NONE, u2)) => ATerm ("equal", [u1, u2])
-        | (u1, SOME (SOME _, u2)) =>
-          ATerm (tptp_not, [ATerm ("equal", [u1, u2])]))) x
 and parse_terms x =
   (parse_term ::: Scan.repeat ($$ "," |-- parse_term)) x
 
-(* TODO: Avoid duplication with "parse_term" above. *)
 fun parse_atom x =
   (parse_term -- Scan.option (Scan.option ($$ tptp_not_infix) --| $$ tptp_equal
                               -- parse_term)
    >> (fn (u1, NONE) => AAtom u1
-        | (u1, SOME (NONE, u2)) => AAtom (ATerm ("equal", [u1, u2]))
-        | (u1, SOME (SOME _, u2)) =>
-          mk_anot (AAtom (ATerm ("equal", [u1, u2]))))) x
-
-fun ho_term_head (ATerm (s, _)) = s
+        | (u1, SOME (neg, u2)) =>
+          AAtom (ATerm ("equal", [u1, u2])) |> is_some neg ? mk_anot)) x
 
 (* TPTP formulas are fully parenthesized, so we don't need to worry about
    operator precedence. *)
@@ -328,7 +317,7 @@
    --| $$ "[" -- parse_terms --| $$ "]" --| $$ ":" -- parse_literal
    >> (fn ((q, ts), phi) =>
           (* We ignore TFF and THF types for now. *)
-          AQuant (q, map (rpair NONE o ho_term_head) ts, phi))) x
+          AQuant (q, map (fn ATerm (s, _) => (s, NONE)) ts, phi))) x
 
 val parse_tstp_extra_arguments =
   Scan.optional ($$ "," |-- parse_source --| Scan.option ($$ "," |-- skip_term))
--- a/src/HOL/Tools/ATP/atp_reconstruct.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Tools/ATP/atp_reconstruct.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -10,7 +10,6 @@
 sig
   type ('a, 'b) ho_term = ('a, 'b) ATP_Problem.ho_term
   type ('a, 'b, 'c) formula = ('a, 'b, 'c) ATP_Problem.formula
-  type step_name = ATP_Proof.step_name
   type 'a proof = 'a ATP_Proof.proof
   type locality = ATP_Translate.locality
 
@@ -76,6 +75,12 @@
 open ATP_Proof
 open ATP_Translate
 
+structure String_Redirect =
+  ATP_Redirect(type key = step_name
+               val ord = fast_string_ord o pairself fst
+               val string_of = fst)
+open String_Redirect
+
 datatype reconstructor =
   Metis of string * string |
   SMT
@@ -285,9 +290,9 @@
 val indent_size = 2
 val no_label = ("", ~1)
 
-val raw_prefix = "X"
-val assum_prefix = "A"
-val have_prefix = "F"
+val raw_prefix = "x"
+val assum_prefix = "a"
+val have_prefix = "f"
 
 fun raw_label_for_name (num, ss) =
   case resolve_conjecture ss of
@@ -664,9 +669,6 @@
 
 (** Isar proof construction and manipulation **)
 
-fun merge_fact_sets (ls1, ss1) (ls2, ss2) =
-  (union (op =) ls1 ls2, union (op =) ss1 ss2)
-
 type label = string * int
 type facts = label list * string list
 
@@ -676,13 +678,10 @@
   Fix of (string * typ) list |
   Let of term * term |
   Assume of label * term |
-  Have of isar_qualifier list * label * term * byline
+  Prove of isar_qualifier list * label * term * byline
 and byline =
-  ByMetis of facts |
-  CaseSplit of isar_step list list * facts
-
-fun smart_case_split [] facts = ByMetis facts
-  | smart_case_split proofs facts = CaseSplit (proofs, facts)
+  By_Metis of facts |
+  Case_Split of isar_step list list * facts
 
 fun add_fact_from_dependency fact_names (name as (_, ss)) =
   if is_fact fact_names ss then
@@ -690,14 +689,6 @@
   else
     apfst (insert (op =) (raw_label_for_name name))
 
-fun step_for_line _ _ (Definition (_, t1, t2)) = Let (t1, t2)
-  | step_for_line _ _ (Inference (name, t, _, [])) =
-    Assume (raw_label_for_name name, t)
-  | step_for_line fact_names j (Inference (name, t, _, deps)) =
-    Have (if j = 1 then [Show] else [], raw_label_for_name name,
-          fold_rev forall_of (map Var (Term.add_vars t [])) t,
-          ByMetis (fold (add_fact_from_dependency fact_names) deps ([], [])))
-
 fun repair_name "$true" = "c_True"
   | repair_name "$false" = "c_False"
   | repair_name "$$e" = tptp_equal (* seen in Vampire proofs *)
@@ -709,167 +700,7 @@
     else
       s
 
-fun isar_proof_from_atp_proof pool ctxt isar_shrink_factor fact_names sym_tab
-                              params frees atp_proof =
-  let
-    val lines =
-      atp_proof
-      |> clean_up_atp_proof_dependencies
-      |> nasty_atp_proof pool
-      |> map_term_names_in_atp_proof repair_name
-      |> decode_lines ctxt sym_tab
-      |> rpair [] |-> fold_rev (add_line fact_names)
-      |> rpair [] |-> fold_rev add_nontrivial_line
-      |> rpair (0, [])
-      |-> fold_rev (add_desired_line isar_shrink_factor fact_names frees)
-      |> snd
-  in
-    (if null params then [] else [Fix params]) @
-    map2 (step_for_line fact_names) (length lines downto 1) lines
-  end
-
-(* When redirecting proofs, we keep information about the labels seen so far in
-   the "backpatches" data structure. The first component indicates which facts
-   should be associated with forthcoming proof steps. The second component is a
-   pair ("assum_ls", "drop_ls"), where "assum_ls" are the labels that should
-   become assumptions and "drop_ls" are the labels that should be dropped in a
-   case split. *)
-type backpatches = (label * facts) list * (label list * label list)
-
-fun used_labels_of_step (Have (_, _, _, by)) =
-    (case by of
-       ByMetis (ls, _) => ls
-     | CaseSplit (proofs, (ls, _)) =>
-       fold (union (op =) o used_labels_of) proofs ls)
-  | used_labels_of_step _ = []
-and used_labels_of proof = fold (union (op =) o used_labels_of_step) proof []
-
-fun new_labels_of_step (Fix _) = []
-  | new_labels_of_step (Let _) = []
-  | new_labels_of_step (Assume (l, _)) = [l]
-  | new_labels_of_step (Have (_, l, _, _)) = [l]
-val new_labels_of = maps new_labels_of_step
-
-val join_proofs =
-  let
-    fun aux _ [] = NONE
-      | aux proof_tail (proofs as (proof1 :: _)) =
-        if exists null proofs then
-          NONE
-        else if forall (curry (op =) (hd proof1) o hd) (tl proofs) then
-          aux (hd proof1 :: proof_tail) (map tl proofs)
-        else case hd proof1 of
-          Have ([], l, t, _) => (* FIXME: should we really ignore the "by"? *)
-          if forall (fn Have ([], l', t', _) :: _ => (l, t) = (l', t')
-                      | _ => false) (tl proofs) andalso
-             not (exists (member (op =) (maps new_labels_of proofs))
-                         (used_labels_of proof_tail)) then
-            SOME (l, t, map rev proofs, proof_tail)
-          else
-            NONE
-        | _ => NONE
-  in aux [] o map rev end
-
-fun case_split_qualifiers proofs =
-  case length proofs of
-    0 => []
-  | 1 => [Then]
-  | _ => [Ultimately]
-
-fun redirect_proof hyp_ts concl_t proof =
-  let
-    (* The first pass outputs those steps that are independent of the negated
-       conjecture. The second pass flips the proof by contradiction to obtain a
-       direct proof, introducing case splits when an inference depends on
-       several facts that depend on the negated conjecture. *)
-     val concl_l = (conjecture_prefix, length hyp_ts)
-     fun first_pass ([], contra) = ([], contra)
-       | first_pass ((step as Fix _) :: proof, contra) =
-         first_pass (proof, contra) |>> cons step
-       | first_pass ((step as Let _) :: proof, contra) =
-         first_pass (proof, contra) |>> cons step
-       | first_pass ((step as Assume (l as (_, j), _)) :: proof, contra) =
-         if l = concl_l then first_pass (proof, contra ||> cons step)
-         else first_pass (proof, contra) |>> cons (Assume (l, nth hyp_ts j))
-       | first_pass (Have (qs, l, t, ByMetis (ls, ss)) :: proof, contra) =
-         let val step = Have (qs, l, t, ByMetis (ls, ss)) in
-           if exists (member (op =) (fst contra)) ls then
-             first_pass (proof, contra |>> cons l ||> cons step)
-           else
-             first_pass (proof, contra) |>> cons step
-         end
-       | first_pass _ = raise Fail "malformed proof"
-    val (proof_top, (contra_ls, contra_proof)) =
-      first_pass (proof, ([concl_l], []))
-    val backpatch_label = the_default ([], []) oo AList.lookup (op =) o fst
-    fun backpatch_labels patches ls =
-      fold merge_fact_sets (map (backpatch_label patches) ls) ([], [])
-    fun second_pass end_qs ([], assums, patches) =
-        ([Have (end_qs, no_label, concl_t,
-                ByMetis (backpatch_labels patches (map snd assums)))], patches)
-      | second_pass end_qs (Assume (l, t) :: proof, assums, patches) =
-        second_pass end_qs (proof, (t, l) :: assums, patches)
-      | second_pass end_qs (Have (qs, l, t, ByMetis (ls, ss)) :: proof, assums,
-                            patches) =
-        (if member (op =) (snd (snd patches)) l andalso
-            not (member (op =) (fst (snd patches)) l) andalso
-            not (AList.defined (op =) (fst patches) l) then
-           second_pass end_qs (proof, assums, patches ||> apsnd (append ls))
-         else case List.partition (member (op =) contra_ls) ls of
-           ([contra_l], co_ls) =>
-           if member (op =) qs Show then
-             second_pass end_qs (proof, assums,
-                                 patches |>> cons (contra_l, (co_ls, ss)))
-           else
-             second_pass end_qs
-                         (proof, assums,
-                          patches |>> cons (contra_l, (l :: co_ls, ss)))
-             |>> cons (if member (op =) (fst (snd patches)) l then
-                         Assume (l, s_not t)
-                       else
-                         Have (qs, l, s_not t,
-                               ByMetis (backpatch_label patches l)))
-         | (contra_ls as _ :: _, co_ls) =>
-           let
-             val proofs =
-               map_filter
-                   (fn l =>
-                       if l = concl_l then
-                         NONE
-                       else
-                         let
-                           val drop_ls = filter (curry (op <>) l) contra_ls
-                         in
-                           second_pass []
-                               (proof, assums,
-                                patches ||> apfst (insert (op =) l)
-                                        ||> apsnd (union (op =) drop_ls))
-                           |> fst |> SOME
-                         end) contra_ls
-             val (assumes, facts) =
-               if member (op =) (fst (snd patches)) l then
-                 ([Assume (l, s_not t)], (l :: co_ls, ss))
-               else
-                 ([], (co_ls, ss))
-           in
-             (case join_proofs proofs of
-                SOME (l, t, proofs, proof_tail) =>
-                Have (case_split_qualifiers proofs @
-                      (if null proof_tail then end_qs else []), l, t,
-                      smart_case_split proofs facts) :: proof_tail
-              | NONE =>
-                [Have (case_split_qualifiers proofs @ end_qs, no_label,
-                       concl_t, smart_case_split proofs facts)],
-              patches)
-             |>> append assumes
-           end
-         | _ => raise Fail "malformed proof")
-       | second_pass _ _ = raise Fail "malformed proof"
-    val proof_bottom =
-      second_pass [Show] (contra_proof, [], ([], ([], []))) |> fst
-  in proof_top @ proof_bottom end
-
-(* FIXME: Still needed? Probably not. *)
+(* FIXME: Still needed? Try with SPASS proofs perhaps. *)
 val kill_duplicate_assumptions_in_proof =
   let
     fun relabel_facts subst =
@@ -878,46 +709,37 @@
         (case AList.lookup (op aconv) assums t of
            SOME l' => (proof, (l, l') :: subst, assums)
          | NONE => (step :: proof, subst, (t, l) :: assums))
-      | do_step (Have (qs, l, t, by)) (proof, subst, assums) =
-        (Have (qs, l, t,
-               case by of
-                 ByMetis facts => ByMetis (relabel_facts subst facts)
-               | CaseSplit (proofs, facts) =>
-                 CaseSplit (map do_proof proofs, relabel_facts subst facts)) ::
+      | do_step (Prove (qs, l, t, by)) (proof, subst, assums) =
+        (Prove (qs, l, t,
+                case by of
+                  By_Metis facts => By_Metis (relabel_facts subst facts)
+                | Case_Split (proofs, facts) =>
+                  Case_Split (map do_proof proofs,
+                              relabel_facts subst facts)) ::
          proof, subst, assums)
       | do_step step (proof, subst, assums) = (step :: proof, subst, assums)
     and do_proof proof = fold do_step proof ([], [], []) |> #1 |> rev
   in do_proof end
 
-val then_chain_proof =
-  let
-    fun aux _ [] = []
-      | aux _ ((step as Assume (l, _)) :: proof) = step :: aux l proof
-      | aux l' (Have (qs, l, t, by) :: proof) =
-        (case by of
-           ByMetis (ls, ss) =>
-           Have (if member (op =) ls l' then
-                   (Then :: qs, l, t,
-                    ByMetis (filter_out (curry (op =) l') ls, ss))
-                 else
-                   (qs, l, t, ByMetis (ls, ss)))
-         | CaseSplit (proofs, facts) =>
-           Have (qs, l, t, CaseSplit (map (aux no_label) proofs, facts))) ::
-        aux l proof
-      | aux _ (step :: proof) = step :: aux no_label proof
-  in aux no_label end
+fun used_labels_of_step (Prove (_, _, _, by)) =
+    (case by of
+       By_Metis (ls, _) => ls
+     | Case_Split (proofs, (ls, _)) =>
+       fold (union (op =) o used_labels_of) proofs ls)
+  | used_labels_of_step _ = []
+and used_labels_of proof = fold (union (op =) o used_labels_of_step) proof []
 
 fun kill_useless_labels_in_proof proof =
   let
     val used_ls = used_labels_of proof
     fun do_label l = if member (op =) used_ls l then l else no_label
     fun do_step (Assume (l, t)) = Assume (do_label l, t)
-      | do_step (Have (qs, l, t, by)) =
-        Have (qs, do_label l, t,
-              case by of
-                CaseSplit (proofs, facts) =>
-                CaseSplit (map (map do_step) proofs, facts)
-              | _ => by)
+      | do_step (Prove (qs, l, t, by)) =
+        Prove (qs, do_label l, t,
+               case by of
+                 Case_Split (proofs, facts) =>
+                 Case_Split (map (map do_step) proofs, facts)
+               | _ => by)
       | do_step step = step
   in map do_step proof end
 
@@ -934,7 +756,8 @@
             Assume (l', t) ::
             aux ((l, l') :: subst) depth (next_assum + 1, next_fact) proof
           end
-      | aux subst depth (next_assum, next_fact) (Have (qs, l, t, by) :: proof) =
+      | aux subst depth (next_assum, next_fact)
+            (Prove (qs, l, t, by) :: proof) =
         let
           val (l', subst, next_fact) =
             if l = no_label then
@@ -947,13 +770,12 @@
             apfst (maps (the_list o AList.lookup (op =) subst))
           val by =
             case by of
-              ByMetis facts => ByMetis (relabel_facts facts)
-            | CaseSplit (proofs, facts) =>
-              CaseSplit (map (aux subst (depth + 1) (1, 1)) proofs,
-                         relabel_facts facts)
+              By_Metis facts => By_Metis (relabel_facts facts)
+            | Case_Split (proofs, facts) =>
+              Case_Split (map (aux subst (depth + 1) (1, 1)) proofs,
+                          relabel_facts facts)
         in
-          Have (qs, l', t, by) ::
-          aux subst depth (next_assum, next_fact) proof
+          Prove (qs, l', t, by) :: aux subst depth (next_assum, next_fact) proof
         end
       | aux subst depth nextp (step :: proof) =
         step :: aux subst depth nextp proof
@@ -992,12 +814,12 @@
         do_indent ind ^ "let " ^ do_term t1 ^ " = " ^ do_term t2 ^ "\n"
       | do_step ind (Assume (l, t)) =
         do_indent ind ^ "assume " ^ do_label l ^ do_term t ^ "\n"
-      | do_step ind (Have (qs, l, t, ByMetis facts)) =
+      | do_step ind (Prove (qs, l, t, By_Metis facts)) =
         do_indent ind ^ do_have qs ^ " " ^
         do_label l ^ do_term t ^ " " ^ do_facts facts ^ "\n"
-      | do_step ind (Have (qs, l, t, CaseSplit (proofs, facts))) =
-        space_implode (do_indent ind ^ "moreover\n")
-                      (map (do_block ind) proofs) ^
+      | do_step ind (Prove (qs, l, t, Case_Split (proofs, facts))) =
+        implode (map (prefix (do_indent ind ^ "moreover\n") o do_block ind)
+                     proofs) ^
         do_indent ind ^ do_have qs ^ " " ^ do_label l ^ do_term t ^ " " ^
         do_facts facts ^ "\n"
     and do_steps prefix suffix ind steps =
@@ -1010,7 +832,7 @@
     and do_block ind proof = do_steps "{ " " }" (ind + 1) proof
     (* One-step proofs are pointless; better use the Metis one-liner
        directly. *)
-    and do_proof [Have (_, _, _, ByMetis _)] = ""
+    and do_proof [Prove (_, _, _, By_Metis _)] = ""
       | do_proof proof =
         (if i <> 1 then "prefer " ^ string_of_int i ^ "\n" else "") ^
         do_indent 0 ^ "proof -\n" ^ do_steps "" "" 1 proof ^ do_indent 0 ^
@@ -1030,35 +852,92 @@
       if is_typed_helper_used_in_atp_proof atp_proof then full_typesN
       else partial_typesN
     val lam_trans = lam_trans_from_atp_proof atp_proof metis_default_lam_trans
-    fun isar_proof_for () =
-      case atp_proof
-           |> isar_proof_from_atp_proof pool ctxt isar_shrink_factor fact_names
-                                        sym_tab params frees
-           |> redirect_proof hyp_ts concl_t
+
+    fun isar_proof_of () =
+      let
+        val atp_proof =
+          atp_proof
+          |> clean_up_atp_proof_dependencies
+          |> nasty_atp_proof pool
+          |> map_term_names_in_atp_proof repair_name
+          |> decode_lines ctxt sym_tab
+          |> rpair [] |-> fold_rev (add_line fact_names)
+          |> rpair [] |-> fold_rev add_nontrivial_line
+          |> rpair (0, [])
+          |-> fold_rev (add_desired_line isar_shrink_factor fact_names frees)
+          |> snd
+        val conj_name = conjecture_prefix ^ string_of_int (length hyp_ts)
+        val conjs =
+          atp_proof
+          |> map_filter (fn Inference (name as (_, ss), _, _, []) =>
+                            if member (op =) ss conj_name then SOME name else NONE
+                          | _ => NONE)
+        fun dep_of_step (Definition _) = NONE
+          | dep_of_step (Inference (name, _, _, from)) = SOME (from, name)
+        val ref_graph = atp_proof |> map_filter dep_of_step |> make_ref_graph
+        val axioms = axioms_of_ref_graph ref_graph conjs
+        val tainted = tainted_atoms_of_ref_graph ref_graph conjs
+        val props =
+          Symtab.empty
+          |> fold (fn Definition _ => I (* FIXME *)
+                    | Inference ((s, _), t, _, _) =>
+                      Symtab.update_new (s,
+                          t |> member (op = o apsnd fst) tainted s ? s_not))
+                  atp_proof
+        (* FIXME: add "fold_rev forall_of (map Var (Term.add_vars t []))"? *)
+        fun prop_of_clause c =
+          fold (curry s_disj) (map_filter (Symtab.lookup props o fst) c)
+               @{term False}
+        fun label_of_clause c = (space_implode "___" (map fst c), 0)
+        fun maybe_show outer c =
+          (outer andalso length c = 1 andalso subset (op =) (c, conjs))
+          ? cons Show
+        fun do_have outer qs (gamma, c) =
+          Prove (maybe_show outer c qs, label_of_clause c, prop_of_clause c,
+                 By_Metis (fold (add_fact_from_dependency fact_names
+                                 o the_single) gamma ([], [])))
+        fun do_inf outer (Have z) = do_have outer [] z
+          | do_inf outer (Hence z) = do_have outer [Then] z
+          | do_inf outer (Cases cases) =
+            let val c = succedent_of_cases cases in
+              Prove (maybe_show outer c [Ultimately], label_of_clause c,
+                     prop_of_clause c,
+                     Case_Split (map (do_case false) cases, ([], [])))
+            end
+        and do_case outer (c, infs) =
+          Assume (label_of_clause c, prop_of_clause c) ::
+          map (do_inf outer) infs
+        val isar_proof =
+          (if null params then [] else [Fix params]) @
+          (ref_graph
+           |> redirect_graph axioms tainted
+           |> chain_direct_proof
+           |> map (do_inf true)
            |> kill_duplicate_assumptions_in_proof
-           |> then_chain_proof
            |> kill_useless_labels_in_proof
-           |> relabel_proof
-           |> string_for_proof ctxt type_enc lam_trans subgoal subgoal_count of
-        "" =>
-        if isar_proof_requested then
-          "\nNo structured proof available (proof too short)."
-        else
-          ""
-      | proof =>
-        "\n\n" ^ (if isar_proof_requested then "Structured proof"
-                  else "Perhaps this will work") ^
-        ":\n" ^ Markup.markup Isabelle_Markup.sendback proof
+           |> relabel_proof)
+          |> string_for_proof ctxt type_enc lam_trans subgoal subgoal_count
+      in
+        case isar_proof of
+          "" =>
+          if isar_proof_requested then
+            "\nNo structured proof available (proof too short)."
+          else
+            ""
+        | _ =>
+          "\n\n" ^ (if isar_proof_requested then "Structured proof"
+                    else "Perhaps this will work") ^
+          ":\n" ^ Markup.markup Isabelle_Markup.sendback isar_proof
+      end
     val isar_proof =
       if debug then
-        isar_proof_for ()
-      else
-        case try isar_proof_for () of
-          SOME s => s
-        | NONE => if isar_proof_requested then
-                    "\nWarning: The Isar proof construction failed."
-                  else
-                    ""
+        isar_proof_of ()
+      else case try isar_proof_of () of
+        SOME s => s
+      | NONE => if isar_proof_requested then
+                  "\nWarning: The Isar proof construction failed."
+                else
+                  ""
   in one_line_proof ^ isar_proof end
 
 fun proof_text ctxt isar_proof isar_params
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/HOL/Tools/ATP/atp_redirect.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -0,0 +1,223 @@
+(*  Title:      HOL/Tools/ATP/atp_redirect.ML
+    Author:     Jasmin Blanchette, TU Muenchen
+
+Transformation of a proof by contradiction into a direct proof.
+*)
+
+signature ATP_ATOM =
+sig
+  type key
+  val ord : key * key -> order
+  val string_of : key -> string
+end;
+
+signature ATP_REDIRECT =
+sig
+  type atom
+
+  structure Atom_Graph : GRAPH
+
+  type ref_sequent = atom list * atom
+  type ref_graph = unit Atom_Graph.T
+
+  type clause = atom list
+  type direct_sequent = atom list * clause
+  type direct_graph = unit Atom_Graph.T
+
+  type rich_sequent = clause list * clause
+
+  datatype direct_inference =
+    Have of rich_sequent |
+    Hence of rich_sequent |
+    Cases of (clause * direct_inference list) list
+
+  type direct_proof = direct_inference list
+
+  val make_ref_graph : (atom list * atom) list -> ref_graph
+  val axioms_of_ref_graph : ref_graph -> atom list -> atom list
+  val tainted_atoms_of_ref_graph : ref_graph -> atom list -> atom list
+  val sequents_of_ref_graph : ref_graph -> ref_sequent list
+  val redirect_sequent : atom list -> atom -> ref_sequent -> direct_sequent
+  val direct_graph : direct_sequent list -> direct_graph
+  val redirect_graph : atom list -> atom list -> ref_graph -> direct_proof
+  val succedent_of_cases : (clause * direct_inference list) list -> clause
+  val chain_direct_proof : direct_proof -> direct_proof
+  val string_of_direct_proof : direct_proof -> string
+end;
+
+functor ATP_Redirect(Atom : ATP_ATOM): ATP_REDIRECT =
+struct
+
+type atom = Atom.key
+
+structure Atom_Graph = Graph(Atom)
+
+type ref_sequent = atom list * atom
+type ref_graph = unit Atom_Graph.T
+
+type clause = atom list
+type direct_sequent = atom list * clause
+type direct_graph = unit Atom_Graph.T
+
+type rich_sequent = clause list * clause
+
+datatype direct_inference =
+  Have of rich_sequent |
+  Hence of rich_sequent |
+  Cases of (clause * direct_inference list) list
+
+type direct_proof = direct_inference list
+
+fun atom_eq p = (Atom.ord p = EQUAL)
+fun clause_eq (c, d) = (length c = length d andalso forall atom_eq (c ~~ d))
+fun direct_sequent_eq ((gamma, c), (delta, d)) =
+  clause_eq (gamma, delta) andalso clause_eq (c, d)
+
+fun make_ref_graph infers =
+  let
+    fun add_edge to from =
+      Atom_Graph.default_node (from, ())
+      #> Atom_Graph.default_node (to, ())
+      #> Atom_Graph.add_edge_acyclic (from, to)
+    fun add_infer (froms, to) = fold (add_edge to) froms
+  in Atom_Graph.empty |> fold add_infer infers end
+
+fun axioms_of_ref_graph ref_graph conjs =
+  subtract atom_eq conjs (Atom_Graph.minimals ref_graph)
+fun tainted_atoms_of_ref_graph ref_graph = Atom_Graph.all_succs ref_graph
+
+fun sequents_of_ref_graph ref_graph =
+  map (`(Atom_Graph.immediate_preds ref_graph))
+      (filter_out (Atom_Graph.is_minimal ref_graph) (Atom_Graph.keys ref_graph))
+
+fun redirect_sequent tainted bot (gamma, c) =
+  if member atom_eq tainted c then
+    gamma |> List.partition (not o member atom_eq tainted)
+          |>> not (atom_eq (c, bot)) ? cons c
+  else
+    (gamma, [c])
+
+fun direct_graph seqs =
+  let
+    fun add_edge from to =
+      Atom_Graph.default_node (from, ())
+      #> Atom_Graph.default_node (to, ())
+      #> Atom_Graph.add_edge_acyclic (from, to)
+    fun add_seq (gamma, c) = fold (fn l => fold (add_edge l) c) gamma
+  in Atom_Graph.empty |> fold add_seq seqs end
+
+fun disj cs = fold (union atom_eq) cs [] |> sort Atom.ord
+
+fun succedent_of_inference (Have (_, c)) = c
+  | succedent_of_inference (Hence (_, c)) = c
+  | succedent_of_inference (Cases cases) = succedent_of_cases cases
+and succedent_of_case (c, []) = c
+  | succedent_of_case (_, infs) = succedent_of_inference (List.last infs)
+and succedent_of_cases cases = disj (map succedent_of_case cases)
+
+fun dest_Have (Have z) = z
+  | dest_Have _ = raise Fail "non-Have"
+
+fun enrich_Have nontrivs trivs (cs, c) =
+  (cs |> map (fn c => if member clause_eq nontrivs c then disj (c :: trivs)
+                      else c),
+   disj (c :: trivs))
+  |> Have
+
+fun s_cases cases =
+  case cases |> List.partition (null o snd) of
+    (trivs, nontrivs as [(nontriv0, proof)]) =>
+    if forall (can dest_Have) proof then
+      let val seqs = proof |> map dest_Have in
+        seqs |> map (enrich_Have (nontriv0 :: map snd seqs) (map fst trivs))
+      end
+    else
+      [Cases nontrivs]
+  | (_, nontrivs) => [Cases nontrivs]
+
+fun descendants direct_graph =
+  these o try (Atom_Graph.all_succs direct_graph) o single
+
+fun zones_of 0 _ = []
+  | zones_of n (bs :: bss) =
+    (fold (subtract atom_eq) bss) bs :: zones_of (n - 1) (bss @ [bs])
+
+fun redirect_graph axioms tainted ref_graph =
+  let
+    val [bot] = Atom_Graph.maximals ref_graph
+    val seqs =
+      map (redirect_sequent tainted bot) (sequents_of_ref_graph ref_graph)
+    val direct_graph = direct_graph seqs
+
+    fun redirect c proved seqs =
+      if null seqs then
+        []
+      else if length c < 2 then
+        let
+          val proved = c @ proved
+          val provable =
+            filter (fn (gamma, _) => subset atom_eq (gamma, proved)) seqs
+          val horn_provable = filter (fn (_, [_]) => true | _ => false) provable
+          val seq as (gamma, c) = hd (horn_provable @ provable)
+        in
+          Have (map single gamma, c) ::
+          redirect c proved (filter (curry (not o direct_sequent_eq) seq) seqs)
+        end
+      else
+        let
+          fun subsequents seqs zone =
+            filter (fn (gamma, _) => subset atom_eq (gamma, zone @ proved)) seqs
+          val zones = zones_of (length c) (map (descendants direct_graph) c)
+          val subseqss = map (subsequents seqs) zones
+          val seqs = fold (subtract direct_sequent_eq) subseqss seqs
+          val cases =
+            map2 (fn l => fn subseqs => ([l], redirect [l] proved subseqs))
+                 c subseqss
+        in s_cases cases @ redirect (succedent_of_cases cases) proved seqs end
+  in redirect [] axioms seqs end
+
+val chain_direct_proof =
+  let
+    fun chain_inf cl0 (seq as Have (cs, c)) =
+        if member clause_eq cs cl0 then
+          Hence (filter_out (curry clause_eq cl0) cs, c)
+        else
+          seq
+      | chain_inf _ (Cases cases) = Cases (map chain_case cases)
+    and chain_case (c, is) = (c, chain_proof (SOME c) is)
+    and chain_proof _ [] = []
+      | chain_proof (SOME prev) (i :: is) =
+        chain_inf prev i :: chain_proof (SOME (succedent_of_inference i)) is
+      | chain_proof _ (i :: is) =
+        i :: chain_proof (SOME (succedent_of_inference i)) is
+  in chain_proof NONE end
+
+fun indent 0 = ""
+  | indent n = "  " ^ indent (n - 1)
+
+fun string_of_clause [] = "\<bottom>"
+  | string_of_clause ls = space_implode " \<or> " (map Atom.string_of ls)
+
+fun string_of_rich_sequent ch ([], c) = ch ^ " " ^ string_of_clause c
+  | string_of_rich_sequent ch (cs, c) =
+    commas (map string_of_clause cs) ^ " " ^ ch ^ " " ^ string_of_clause c
+
+fun string_of_case depth (c, proof) =
+  indent (depth + 1) ^ "[" ^ string_of_clause c ^ "]"
+  |> not (null proof) ? suffix ("\n" ^ string_of_subproof (depth + 1) proof)
+
+and string_of_inference depth (Have seq) =
+    indent depth ^ string_of_rich_sequent "\<triangleright>" seq
+  | string_of_inference depth (Hence seq) =
+    indent depth ^ string_of_rich_sequent "\<guillemotright>" seq
+  | string_of_inference depth (Cases cases) =
+    indent depth ^ "[\n" ^
+    space_implode ("\n" ^ indent depth ^ "|\n")
+                  (map (string_of_case depth) cases) ^ "\n" ^
+    indent depth ^ "]"
+
+and string_of_subproof depth = cat_lines o map (string_of_inference depth)
+
+val string_of_direct_proof = string_of_subproof 0
+
+end;
--- a/src/HOL/Tools/ATP/atp_systems.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Tools/ATP/atp_systems.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -339,8 +339,8 @@
                        sosN))),
       (0.333, (false, (300, DFG DFG_Unsorted, "poly_tags??", lam_liftingN,
                        sosN))),
-      (0.334, (true, (50, DFG DFG_Unsorted, "mono_tags??", lam_liftingN,
-                      no_sosN)))]
+      (0.334, (false, (50, DFG DFG_Unsorted, "mono_tags??", lam_liftingN,
+                       no_sosN)))]
      |> (if Config.get ctxt force_sos then hd #> apfst (K 1.0) #> single
          else I)}
 
--- a/src/HOL/Tools/ATP/atp_translate.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Tools/ATP/atp_translate.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -2338,11 +2338,14 @@
    symbols (with "$i" as the type of individuals), but some provers (e.g.,
    SNARK) require explicit declarations. The situation is similar for THF. *)
 
-fun default_type pred_sym s =
+fun default_type type_enc pred_sym s =
   let
     val ind =
-      if String.isPrefix type_const_prefix s then atype_of_types
-      else individual_atype
+      case type_enc of
+        Simple_Types _ =>
+        if String.isPrefix type_const_prefix s then atype_of_types
+        else individual_atype
+      | _ => individual_atype
     fun typ 0 = if pred_sym then bool_atype else ind
       | typ ary = AFun (ind, typ (ary - 1))
   in typ end
@@ -2350,7 +2353,7 @@
 fun nary_type_constr_type n =
   funpow n (curry AFun atype_of_types) atype_of_types
 
-fun undeclared_syms_in_problem problem =
+fun undeclared_syms_in_problem type_enc problem =
   let
     val declared = declared_syms_in_problem problem
     fun do_sym name ty =
@@ -2363,7 +2366,7 @@
       | do_type (ATyAbs (_, ty)) = do_type ty
     fun do_term pred_sym (ATerm (name as (s, _), tms)) =
         is_tptp_user_symbol s
-        ? do_sym name (fn _ => default_type pred_sym s (length tms))
+        ? do_sym name (fn _ => default_type type_enc pred_sym s (length tms))
         #> fold (do_term false) tms
       | do_term _ (AAbs ((_, ty), tm)) = do_type ty #> do_term false tm
     fun do_formula (AQuant (_, xs, phi)) =
@@ -2377,11 +2380,11 @@
     |> filter_out (is_built_in_tptp_symbol o fst o fst)
   end
 
-fun declare_undeclared_syms_in_atp_problem problem =
+fun declare_undeclared_syms_in_atp_problem type_enc problem =
   let
     val decls =
       problem
-      |> undeclared_syms_in_problem
+      |> undeclared_syms_in_problem type_enc
       |> sort_wrt (fst o fst)
       |> map (fn (x as (s, _), ty) => Decl (type_decl_prefix ^ s, x, ty ()))
   in (implicit_declsN, decls) :: problem end
@@ -2464,7 +2467,7 @@
           | FOF => I
           | TFF (_, TPTP_Implicit) => I
           | THF (_, TPTP_Implicit, _) => I
-          | _ => declare_undeclared_syms_in_atp_problem)
+          | _ => declare_undeclared_syms_in_atp_problem type_enc)
     val (problem, pool) = problem |> nice_atp_problem readable_names
     fun add_sym_ary (s, {min_ary, ...} : sym_info) =
       min_ary > 0 ? Symtab.insert (op =) (s, min_ary)
--- a/src/HOL/Tools/Datatype/datatype.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Tools/Datatype/datatype.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -10,13 +10,17 @@
 signature DATATYPE =
 sig
   include DATATYPE_DATA
-  val add_datatype: config ->
-    (string list * binding * mixfix * (binding * typ list * mixfix) list) list ->
-    theory -> string list * theory
-  val add_datatype_cmd:
-    (string list * binding * mixfix * (binding * string list * mixfix) list) list ->
-    theory -> theory
-  val parse_decl: (string list * binding * mixfix * (binding * string list * mixfix) list) parser
+  type spec =
+    (binding * (string * sort) list * mixfix) *
+    (binding * typ list * mixfix) list
+  type spec_cmd =
+    (binding * (string * string option) list * mixfix) *
+    (binding * string list * mixfix) list
+  val read_specs: spec_cmd list -> theory -> spec list * Proof.context
+  val check_specs: spec list -> theory -> spec list * Proof.context
+  val add_datatype: config -> spec list -> theory -> string list * theory
+  val add_datatype_cmd: config -> spec_cmd list -> theory -> string list * theory
+  val spec_cmd: spec_cmd parser
 end;
 
 structure Datatype : DATATYPE =
@@ -31,15 +35,6 @@
 fun exh_thm_of (dt_info : Datatype_Aux.info Symtab.table) tname =
   #exhaust (the (Symtab.lookup dt_info tname));
 
-val node_name = @{type_name "Datatype.node"};
-val In0_name = @{const_name "Datatype.In0"};
-val In1_name = @{const_name "Datatype.In1"};
-val Scons_name = @{const_name "Datatype.Scons"};
-val Leaf_name = @{const_name "Datatype.Leaf"};
-val Lim_name = @{const_name "Datatype.Lim"};
-val Suml_name = @{const_name "Sum_Type.Suml"};
-val Sumr_name = @{const_name "Sum_Type.Sumr"};
-
 val In0_inject = @{thm In0_inject};
 val In1_inject = @{thm In1_inject};
 val Scons_inject = @{thm Scons_inject};
@@ -88,15 +83,15 @@
     val sumT =
       if null leafTs then HOLogic.unitT
       else Balanced_Tree.make (fn (T, U) => Type (@{type_name Sum_Type.sum}, [T, U])) leafTs;
-    val Univ_elT = HOLogic.mk_setT (Type (node_name, [sumT, branchT]));
+    val Univ_elT = HOLogic.mk_setT (Type (@{type_name Datatype.node}, [sumT, branchT]));
     val UnivT = HOLogic.mk_setT Univ_elT;
     val UnivT' = Univ_elT --> HOLogic.boolT;
     val Collect = Const (@{const_name Collect}, UnivT' --> UnivT);
 
-    val In0 = Const (In0_name, Univ_elT --> Univ_elT);
-    val In1 = Const (In1_name, Univ_elT --> Univ_elT);
-    val Leaf = Const (Leaf_name, sumT --> Univ_elT);
-    val Lim = Const (Lim_name, (branchT --> Univ_elT) --> Univ_elT);
+    val In0 = Const (@{const_name Datatype.In0}, Univ_elT --> Univ_elT);
+    val In1 = Const (@{const_name Datatype.In1}, Univ_elT --> Univ_elT);
+    val Leaf = Const (@{const_name Datatype.Leaf}, sumT --> Univ_elT);
+    val Lim = Const (@{const_name Datatype.Lim}, (branchT --> Univ_elT) --> Univ_elT);
 
     (* make injections needed for embedding types in leaves *)
 
@@ -122,7 +117,7 @@
         right = fn t => In1 $ t,
         init =
           if ts = [] then Const (@{const_name undefined}, Univ_elT)
-          else foldr1 (HOLogic.mk_binop Scons_name) ts};
+          else foldr1 (HOLogic.mk_binop @{const_name Datatype.Scons}) ts};
 
     (* function spaces *)
 
@@ -136,8 +131,8 @@
               val Type (_, [T1, T2]) = T;
               fun mkT U = (U --> Univ_elT) --> T --> Univ_elT;
             in
-              if i <= n2 then Const (Suml_name, mkT T1) $ mk_inj T1 n2 i
-              else Const (Sumr_name, mkT T2) $ mk_inj T2 (n - n2) (i - n2)
+              if i <= n2 then Const (@{const_name Sum_Type.Suml}, mkT T1) $ mk_inj T1 n2 i
+              else Const (@{const_name Sum_Type.Sumr}, mkT T2) $ mk_inj T2 (n - n2) (i - n2)
             end;
       in mk_inj branchT (length branchTs) (1 + find_index (fn T'' => T'' = T') branchTs) end;
 
@@ -198,16 +193,15 @@
             (rtac exI 1 THEN rtac CollectI 1 THEN
               QUIET_BREADTH_FIRST (has_fewer_prems 1)
               (resolve_tac rep_intrs 1)))
-        (types_syntax ~~ tyvars ~~ (take (length newTs) rep_set_names))
+        (types_syntax ~~ tyvars ~~ take (length newTs) rep_set_names)
       ||> Sign.add_path big_name;
 
     (*********************** definition of constructors ***********************)
 
-    val big_rep_name = space_implode "_" new_type_names ^ "_Rep_";
-    val rep_names = map (prefix "Rep_") new_type_names;
+    val big_rep_name = big_name ^ "_Rep_";
     val rep_names' = map (fn i => big_rep_name ^ string_of_int i) (1 upto length (flat (tl descr)));
     val all_rep_names =
-      map (Sign.intern_const thy3) rep_names @
+      map (#Rep_name o #1 o #2) typedefs @
       map (Sign.full_bname thy3) rep_names';
 
     (* isomorphism declarations *)
@@ -217,7 +211,8 @@
 
     (* constructor definitions *)
 
-    fun make_constr_def tname T n ((cname, cargs), (cname', mx)) (thy, defs, eqns, i) =
+    fun make_constr_def tname (typedef: Typedef.info) T n
+        ((cname, cargs), (cname', mx)) (thy, defs, eqns, i) =
       let
         fun constr_arg dt (j, l_args, r_args) =
           let
@@ -229,19 +224,18 @@
                 (j + 1, free_t :: l_args, mk_lim
                   (Const (nth all_rep_names m, U --> Univ_elT) $
                     Datatype_Aux.app_bnds free_t (length Us)) Us :: r_args)
-            | _ => (j + 1, free_t::l_args, (Leaf $ mk_inj T free_t)::r_args))
+            | _ => (j + 1, free_t :: l_args, (Leaf $ mk_inj T free_t) :: r_args))
           end;
 
         val (_, l_args, r_args) = fold_rev constr_arg cargs (1, [], []);
         val constrT = map (Datatype_Aux.typ_of_dtyp descr') cargs ---> T;
-        val abs_name = Sign.intern_const thy ("Abs_" ^ tname);
-        val rep_name = Sign.intern_const thy ("Rep_" ^ tname);
+        val ({Abs_name, Rep_name, ...}, _) = typedef;
         val lhs = list_comb (Const (cname, constrT), l_args);
         val rhs = mk_univ_inj r_args n i;
-        val def = Logic.mk_equals (lhs, Const (abs_name, Univ_elT --> T) $ rhs);
+        val def = Logic.mk_equals (lhs, Const (Abs_name, Univ_elT --> T) $ rhs);
         val def_name = Long_Name.base_name cname ^ "_def";
         val eqn =
-          HOLogic.mk_Trueprop (HOLogic.mk_eq (Const (rep_name, T --> Univ_elT) $ lhs, rhs));
+          HOLogic.mk_Trueprop (HOLogic.mk_eq (Const (Rep_name, T --> Univ_elT) $ lhs, rhs));
         val ([def_thm], thy') =
           thy
           |> Sign.add_consts_i [(cname', constrT, mx)]
@@ -251,12 +245,11 @@
 
     (* constructor definitions for datatype *)
 
-    fun dt_constr_defs ((((_, (_, _, constrs)), tname), T), constr_syntax)
+    fun dt_constr_defs (((((_, (_, _, constrs)), tname), typedef: Typedef.info), T), constr_syntax)
         (thy, defs, eqns, rep_congs, dist_lemmas) =
       let
         val _ $ (_ $ (cong_f $ _) $ _) = concl_of arg_cong;
-        val rep_const =
-          cterm_of thy (Const (Sign.intern_const thy ("Rep_" ^ tname), T --> Univ_elT));
+        val rep_const = cterm_of thy (Const (#Rep_name (#1 typedef), T --> Univ_elT));
         val cong' =
           Drule.export_without_context
             (cterm_instantiate [(cterm_of thy cong_f, rep_const)] arg_cong);
@@ -264,7 +257,7 @@
           Drule.export_without_context
             (cterm_instantiate [(cterm_of thy distinct_f, rep_const)] distinct_lemma);
         val (thy', defs', eqns', _) =
-          fold ((make_constr_def tname T) (length constrs))
+          fold (make_constr_def tname typedef T (length constrs))
             (constrs ~~ constr_syntax) (Sign.add_path tname thy, defs, [], 1);
       in
         (Sign.parent_path thy', defs', eqns @ [eqns'],
@@ -273,7 +266,7 @@
 
     val (thy4, constr_defs, constr_rep_eqns, rep_congs, dist_lemmas) =
       fold dt_constr_defs
-        (hd descr ~~ new_type_names ~~ newTs ~~ constr_syntax)
+        (hd descr ~~ new_type_names ~~ map #2 typedefs ~~ newTs ~~ constr_syntax)
         (thy3 |> Sign.add_consts_i iso_decls |> Sign.parent_path, [], [], [], []);
 
 
@@ -281,12 +274,11 @@
 
     val _ = Datatype_Aux.message config "Proving isomorphism properties ...";
 
-    val newT_iso_axms = map (fn (_, (_, td)) =>
-      (collect_simp (#Abs_inverse td), #Rep_inverse td,
-       collect_simp (#Rep td))) typedefs;
+    val newT_iso_axms = typedefs |> map (fn (_, (_, {Abs_inverse, Rep_inverse, Rep, ...})) =>
+      (collect_simp Abs_inverse, Rep_inverse, collect_simp Rep));
 
-    val newT_iso_inj_thms = map (fn (_, (_, td)) =>
-      (collect_simp (#Abs_inject td) RS iffD1, #Rep_inject td RS iffD1)) typedefs;
+    val newT_iso_inj_thms = typedefs |> map (fn (_, (_, {Abs_inject, Rep_inject, ...})) =>
+      (collect_simp Abs_inject RS iffD1, Rep_inject RS iffD1));
 
     (********* isomorphisms between existing types and "unfolded" types *******)
 
@@ -305,8 +297,7 @@
       let
         val argTs = map (Datatype_Aux.typ_of_dtyp descr') cargs;
         val T = nth recTs k;
-        val rep_name = nth all_rep_names k;
-        val rep_const = Const (rep_name, T --> Univ_elT);
+        val rep_const = Const (nth all_rep_names k, T --> Univ_elT);
         val constr = Const (cname, argTs ---> T);
 
         fun process_arg ks' dt (i2, i2', ts, Ts) =
@@ -330,7 +321,7 @@
         val (i2, i2', ts, Ts) = fold (process_arg ks) cargs (1, 1, [], []);
         val xs = map (uncurry (Datatype_Aux.mk_Free "x")) (argTs ~~ (1 upto (i2 - 1)));
         val ys = map (uncurry (Datatype_Aux.mk_Free "y")) (Ts ~~ (1 upto (i2' - 1)));
-        val f = fold_rev (absfree o dest_Free) (xs @ ys) (mk_univ_inj ts n i);
+        val f = fold_rev lambda (xs @ ys) (mk_univ_inj ts n i);
 
         val (_, _, ts', _) = fold (process_arg []) cargs (1, 1, [], []);
         val eqn = HOLogic.mk_Trueprop (HOLogic.mk_eq
@@ -346,7 +337,7 @@
         val (_, (tname, _, _)) = hd ds;
         val {rec_rewrites, rec_names, ...} = the (Symtab.lookup dt_info tname);
 
-        fun process_dt (k, (tname, _, constrs)) (fs, eqns, isos) =
+        fun process_dt (k, (_, _, constrs)) (fs, eqns, isos) =
           let
             val (fs', eqns', _) = fold (make_iso_def k ks (length constrs)) constrs (fs, eqns, 1);
             val iso = (nth recTs k, nth all_rep_names k);
@@ -364,7 +355,7 @@
 
         (* prove characteristic equations *)
 
-        val rewrites = def_thms @ (map mk_meta_eq rec_rewrites);
+        val rewrites = def_thms @ map mk_meta_eq rec_rewrites;
         val char_thms' =
           map (fn eqn => Skip_Proof.prove_global thy' [] [] eqn
             (fn _ => EVERY [rewrite_goals_tac rewrites, rtac refl 1])) eqns;
@@ -414,14 +405,14 @@
             val T = nth recTs i;
             val Rep_t = Const (nth all_rep_names i, T --> Univ_elT);
             val rep_set_name = nth rep_set_names i;
-          in
-            (HOLogic.all_const T $ Abs ("y", T, HOLogic.imp $
+            val concl1 =
+              HOLogic.all_const T $ Abs ("y", T, HOLogic.imp $
                 HOLogic.mk_eq (Rep_t $ Datatype_Aux.mk_Free "x" T i, Rep_t $ Bound 0) $
-                  HOLogic.mk_eq (Datatype_Aux.mk_Free "x" T i, Bound 0)),
-              Const (rep_set_name, UnivT') $ (Rep_t $ Datatype_Aux.mk_Free "x" T i))
-          end;
+                  HOLogic.mk_eq (Datatype_Aux.mk_Free "x" T i, Bound 0));
+            val concl2 = Const (rep_set_name, UnivT') $ (Rep_t $ Datatype_Aux.mk_Free "x" T i);
+          in (concl1, concl2) end;
 
-        val (ind_concl1, ind_concl2) = ListPair.unzip (map mk_ind_concl ds);
+        val (ind_concl1, ind_concl2) = split_list (map mk_ind_concl ds);
 
         val rewrites = map mk_meta_eq iso_char_thms;
         val inj_thms' = map snd newT_iso_inj_thms @ map (fn r => r RS @{thm injD}) inj_thms;
@@ -461,7 +452,7 @@
                 REPEAT ((resolve_tac rep_intrs THEN_ALL_NEW
                   ((REPEAT o etac allE) THEN' ares_tac elem_thms)) 1)]);
 
-      in (inj_thms'' @ inj_thms, elem_thms @ (Datatype_Aux.split_conj_thm elem_thm)) end;
+      in (inj_thms'' @ inj_thms, elem_thms @ Datatype_Aux.split_conj_thm elem_thm) end;
 
     val (iso_inj_thms_unfolded, iso_elem_thms) =
       fold_rev prove_iso_thms (tl descr) ([], map #3 newT_iso_axms);
@@ -520,7 +511,7 @@
     fun prove_constr_rep_thm eqn =
       let
         val inj_thms = map fst newT_iso_inj_thms;
-        val rewrites = @{thm o_def} :: constr_defs @ (map (mk_meta_eq o #2) newT_iso_axms);
+        val rewrites = @{thm o_def} :: constr_defs @ map (mk_meta_eq o #2) newT_iso_axms;
       in
         Skip_Proof.prove_global thy5 [] [] eqn
         (fn _ => EVERY
@@ -540,7 +531,7 @@
 
     val dist_rewrites =
       map (fn (rep_thms, dist_lemma) =>
-        dist_lemma::(rep_thms @ [In0_eq, In1_eq, In0_not_In1, In1_not_In0]))
+        dist_lemma :: (rep_thms @ [In0_eq, In1_eq, In0_not_In1, In1_not_In0]))
           (constr_rep_thms ~~ dist_lemmas);
 
     fun prove_distinct_thms dist_rewrites' (k, ts) =
@@ -597,28 +588,26 @@
       map (fn r => r RS @{thm the_inv_f_f} RS subst) iso_inj_thms_unfolded;
     val Rep_inverse_thms' = map (fn r => r RS @{thm the_inv_f_f}) iso_inj_thms_unfolded;
 
-    fun mk_indrule_lemma ((i, _), T) (prems, concls) =
+    fun mk_indrule_lemma (i, _) T =
       let
-        val Rep_t =
-          Const (nth all_rep_names i, T --> Univ_elT) $ Datatype_Aux.mk_Free "x" T i;
-
+        val Rep_t = Const (nth all_rep_names i, T --> Univ_elT) $ Datatype_Aux.mk_Free "x" T i;
         val Abs_t =
           if i < length newTs then
-            Const (Sign.intern_const thy6 ("Abs_" ^ nth new_type_names i), Univ_elT --> T)
-          else Const (@{const_name the_inv_into},
+            Const (#Abs_name (#1 (#2 (nth typedefs i))), Univ_elT --> T)
+          else
+            Const (@{const_name the_inv_into},
               [HOLogic.mk_setT T, T --> Univ_elT, Univ_elT] ---> T) $
             HOLogic.mk_UNIV T $ Const (nth all_rep_names i, T --> Univ_elT);
-      in
-        (prems @
-          [HOLogic.imp $
+        val prem =
+          HOLogic.imp $
             (Const (nth rep_set_names i, UnivT') $ Rep_t) $
-              (Datatype_Aux.mk_Free "P" (T --> HOLogic.boolT) (i + 1) $ (Abs_t $ Rep_t))],
-         concls @
-          [Datatype_Aux.mk_Free "P" (T --> HOLogic.boolT) (i + 1) $ Datatype_Aux.mk_Free "x" T i])
-      end;
+              (Datatype_Aux.mk_Free "P" (T --> HOLogic.boolT) (i + 1) $ (Abs_t $ Rep_t));
+        val concl =
+          Datatype_Aux.mk_Free "P" (T --> HOLogic.boolT) (i + 1) $ Datatype_Aux.mk_Free "x" T i;
+      in (prem, concl) end;
 
     val (indrule_lemma_prems, indrule_lemma_concls) =
-      fold mk_indrule_lemma (descr' ~~ recTs) ([], []);
+      split_list (map2 mk_indrule_lemma descr' recTs);
 
     val cert = cterm_of thy6;
 
@@ -653,7 +642,7 @@
              [REPEAT (eresolve_tac Abs_inverse_thms 1),
               simp_tac (HOL_basic_ss addsimps (Thm.symmetric r :: Rep_inverse_thms')) 1,
               DEPTH_SOLVE_1 (ares_tac [prem] 1 ORELSE etac allE 1)]))
-                  (prems ~~ (constr_defs @ (map mk_meta_eq iso_char_thms))))]);
+                  (prems ~~ (constr_defs @ map mk_meta_eq iso_char_thms)))]);
 
     val ([dt_induct'], thy7) =
       thy6
@@ -670,27 +659,74 @@
 
 (** datatype definition **)
 
-fun gen_add_datatype prep_typ config dts thy =
+(* specifications *)
+
+type spec = (binding * (string * sort) list * mixfix) * (binding * typ list * mixfix) list;
+
+type spec_cmd =
+  (binding * (string * string option) list * mixfix) * (binding * string list * mixfix) list;
+
+local
+
+fun parse_spec ctxt ((b, args, mx), constrs) =
+  ((b, map (apsnd (Typedecl.read_constraint ctxt)) args, mx),
+    constrs |> map (fn (c, Ts, mx') => (c, map (Syntax.parse_typ ctxt) Ts, mx')));
+
+fun check_specs ctxt (specs: spec list) =
+  let
+    fun prep_spec ((tname, args, mx), constrs) tys =
+      let
+        val (args', tys1) = chop (length args) tys;
+        val (constrs', tys3) = (constrs, tys1) |-> fold_map (fn (cname, cargs, mx') => fn tys2 =>
+          let val (cargs', tys3) = chop (length cargs) tys2;
+          in ((cname, cargs', mx'), tys3) end);
+      in (((tname, map dest_TFree args', mx), constrs'), tys3) end;
+
+    val all_tys =
+      specs |> maps (fn ((_, args, _), cs) => map TFree args @ maps #2 cs)
+      |> Syntax.check_typs ctxt;
+
+  in #1 (fold_map prep_spec specs all_tys) end;
+
+fun prep_specs parse raw_specs thy =
+  let
+    val ctxt = thy
+      |> Theory.copy
+      |> Sign.add_types_global (map (fn ((b, args, mx), _) => (b, length args, mx)) raw_specs)
+      |> Proof_Context.init_global
+      |> fold (fn ((_, args, _), _) => fold (fn (a, _) =>
+          Variable.declare_typ (TFree (a, dummyS))) args) raw_specs;
+    val specs = check_specs ctxt (map (parse ctxt) raw_specs);
+  in (specs, ctxt) end;
+
+in
+
+val read_specs = prep_specs parse_spec;
+val check_specs = prep_specs (K I);
+
+end;
+
+
+(* main commands *)
+
+fun gen_add_datatype prep_specs config raw_specs thy =
   let
     val _ = Theory.requires thy "Datatype" "datatype definitions";
 
-    (* this theory is used just for parsing *)
-    val tmp_thy = thy
-      |> Theory.copy
-      |> Sign.add_types_global (map (fn (tvs, tname, mx, _) => (tname, length tvs, mx)) dts);
-    val tmp_ctxt = Proof_Context.init_global tmp_thy;
+    val (dts, spec_ctxt) = prep_specs raw_specs thy;
+    val ((_, tyvars, _), _) :: _ = dts;
+    val string_of_tyvar = Syntax.string_of_typ spec_ctxt o TFree;
 
-    val (tyvars, _, _, _) ::_ = dts;
-    val (new_dts, types_syntax) = ListPair.unzip (map (fn (tvs, tname, mx, _) =>
-      let val full_tname = Sign.full_name tmp_thy tname in
+    val (new_dts, types_syntax) = dts |> map (fn ((tname, tvs, mx), _) =>
+      let val full_tname = Sign.full_name thy tname in
         (case duplicates (op =) tvs of
           [] =>
             if eq_set (op =) (tyvars, tvs) then ((full_tname, tvs), (tname, mx))
-            else error ("Mutually recursive datatypes must have same type parameters")
+            else error "Mutually recursive datatypes must have same type parameters"
         | dups =>
             error ("Duplicate parameter(s) for datatype " ^ Binding.print tname ^
-              " : " ^ commas dups))
-      end) dts);
+              " : " ^ commas (map string_of_tyvar dups)))
+      end) |> split_list;
     val dt_names = map fst new_dts;
 
     val _ =
@@ -698,45 +734,37 @@
         [] => ()
       | dups => error ("Duplicate datatypes: " ^ commas_quote dups));
 
-    fun prep_dt_spec (tvs, tname, mx, constrs) (dts', constr_syntax, sorts, i) =
+    fun prep_dt_spec ((tname, tvs, mx), constrs) (dts', constr_syntax, i) =
       let
-        fun prep_constr (cname, cargs, mx') (constrs, constr_syntax', sorts') =
+        fun prep_constr (cname, cargs, mx') (constrs, constr_syntax') =
           let
-            val (cargs', sorts'') = fold_map (prep_typ tmp_thy) cargs sorts';
             val _ =
-              (case subtract (op =) tvs (fold Term.add_tfree_namesT cargs' []) of
+              (case subtract (op =) tvs (fold Term.add_tfreesT cargs []) of
                 [] => ()
-              | vs => error ("Extra type variables on rhs: " ^ commas vs));
-            val c = Sign.full_name_path tmp_thy (Binding.name_of tname) cname;
+              | vs => error ("Extra type variables on rhs: " ^ commas (map string_of_tyvar vs)));
+            val c = Sign.full_name_path thy (Binding.name_of tname) cname;
           in
-            (constrs @ [(c, map (Datatype_Aux.dtyp_of_typ new_dts) cargs')],
-              constr_syntax' @ [(cname, mx')], sorts'')
+            (constrs @ [(c, map (Datatype_Aux.dtyp_of_typ new_dts) cargs)],
+              constr_syntax' @ [(cname, mx')])
           end handle ERROR msg =>
             cat_error msg ("The error above occurred in constructor " ^ Binding.print cname ^
               " of datatype " ^ Binding.print tname);
 
-        val (constrs', constr_syntax', sorts') = fold prep_constr constrs ([], [], sorts);
+        val (constrs', constr_syntax') = fold prep_constr constrs ([], []);
       in
         (case duplicates (op =) (map fst constrs') of
           [] =>
-            (dts' @ [(i, (Sign.full_name tmp_thy tname, tvs, constrs'))],
-              constr_syntax @ [constr_syntax'], sorts', i + 1)
+            (dts' @ [(i, (Sign.full_name thy tname, map Datatype_Aux.DtTFree tvs, constrs'))],
+              constr_syntax @ [constr_syntax'], i + 1)
         | dups =>
             error ("Duplicate constructors " ^ commas_quote dups ^
               " in datatype " ^ Binding.print tname))
       end;
 
-    val (dts0, constr_syntax, sorts', i) = fold prep_dt_spec dts ([], [], [], 0);
-    val tmp_ctxt' = tmp_ctxt |> fold (Variable.declare_typ o TFree) sorts';
-
-    val dts' = dts0 |> map (fn (i, (name, tvs, cs)) =>
-      let
-        val args = tvs |>
-          map (fn a => Datatype_Aux.DtTFree (a, Proof_Context.default_sort tmp_ctxt' (a, ~1)));
-      in (i, (name, args, cs)) end);
+    val (dts', constr_syntax, i) = fold prep_dt_spec dts ([], [], 0);
 
     val dt_info = Datatype_Data.get_all thy;
-    val (descr, _) = Datatype_Aux.unfold_datatypes tmp_ctxt dts' dt_info dts' i;
+    val (descr, _) = Datatype_Aux.unfold_datatypes spec_ctxt dts' dt_info dts' i;
     val _ =
       Datatype_Aux.check_nonempty descr
         handle (exn as Datatype_Aux.Datatype_Empty s) =>
@@ -745,7 +773,7 @@
 
     val _ =
       Datatype_Aux.message config
-        ("Constructing datatype(s) " ^ commas_quote (map (Binding.name_of o #2) dts));
+        ("Constructing datatype(s) " ^ commas_quote (map (Binding.name_of o #1 o #1) dts));
   in
     thy
     |> representation_proofs config dt_info descr types_syntax constr_syntax
@@ -754,20 +782,21 @@
       Datatype_Data.derive_datatype_props config dt_names descr induct inject distinct)
   end;
 
-val add_datatype = gen_add_datatype Datatype_Data.cert_typ;
-val add_datatype_cmd = snd oo gen_add_datatype Datatype_Data.read_typ Datatype_Aux.default_config;
+val add_datatype = gen_add_datatype check_specs;
+val add_datatype_cmd = gen_add_datatype read_specs;
 
 
-(* concrete syntax *)
+(* outer syntax *)
 
-val parse_decl =
-  Parse.type_args -- Parse.binding -- Parse.opt_mixfix --
+val spec_cmd =
+  Parse.type_args_constrained -- Parse.binding -- Parse.opt_mixfix --
   (Parse.$$$ "=" |-- Parse.enum1 "|" (Parse.binding -- Scan.repeat Parse.typ -- Parse.opt_mixfix))
-  >> (fn (((vs, t), mx), cons) => (vs, t, mx, map Parse.triple1 cons));
+  >> (fn (((vs, t), mx), cons) => ((t, vs, mx), map Parse.triple1 cons));
 
 val _ =
   Outer_Syntax.command "datatype" "define inductive datatypes" Keyword.thy_decl
-    (Parse.and_list1 parse_decl >> (Toplevel.theory o add_datatype_cmd));
+    (Parse.and_list1 spec_cmd
+      >> (Toplevel.theory o (snd oo add_datatype_cmd Datatype_Aux.default_config)));
 
 
 open Datatype_Data;
--- a/src/HOL/Tools/Datatype/datatype_abs_proofs.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Tools/Datatype/datatype_abs_proofs.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -16,13 +16,13 @@
       thm -> theory -> (string list * thm list) * theory
   val prove_case_thms : config -> string list -> descr list ->
     string list -> thm list -> theory -> (thm list list * string list) * theory
-  val prove_split_thms : config -> string list -> descr list ->
+  val prove_split_thms : config -> string list -> string list -> descr list ->
     thm list list -> thm list list -> thm list -> thm list list -> theory ->
     (thm * thm) list * theory
   val prove_nchotomys : config -> string list -> descr list ->
     thm list -> theory -> thm list * theory
-  val prove_weak_case_congs : string list -> descr list -> theory -> thm list * theory
-  val prove_case_congs : string list -> descr list ->
+  val prove_weak_case_congs : string list -> string list -> descr list -> theory -> thm list * theory
+  val prove_case_congs : string list -> string list -> descr list ->
     thm list -> thm list list -> theory -> thm list * theory
 end;
 
@@ -262,7 +262,7 @@
              resolve_tac rec_unique_thms 1,
              resolve_tac rec_intrs 1,
              REPEAT (rtac allI 1 ORELSE resolve_tac rec_total_thms 1)]))
-       (Datatype_Prop.make_primrecs new_type_names descr thy2);
+       (Datatype_Prop.make_primrecs reccomb_names descr thy2);
   in
     thy2
     |> Sign.add_path (space_implode "_" new_type_names)
@@ -338,7 +338,7 @@
           Skip_Proof.prove_global thy2 [] [] t
             (fn _ =>
               EVERY [rewrite_goals_tac (case_defs @ map mk_meta_eq primrec_thms), rtac refl 1]))
-        (Datatype_Prop.make_cases new_type_names descr thy2);
+        (Datatype_Prop.make_cases case_names descr thy2);
   in
     thy2
     |> Context.theory_map ((fold o fold) Nitpick_Simps.add_thm case_thms)
@@ -351,7 +351,7 @@
 (******************************* case splitting *******************************)
 
 fun prove_split_thms (config : Datatype_Aux.config)
-    new_type_names descr constr_inject dist_rewrites casedist_thms case_thms thy =
+    new_type_names case_names descr constr_inject dist_rewrites casedist_thms case_thms thy =
   let
     val _ = Datatype_Aux.message config "Proving equations for case splitting ...";
 
@@ -374,10 +374,10 @@
 
     val split_thm_pairs =
       map prove_split_thms
-        (Datatype_Prop.make_splits new_type_names descr thy ~~ constr_inject ~~
+        (Datatype_Prop.make_splits case_names descr thy ~~ constr_inject ~~
           dist_rewrites ~~ casedist_thms ~~ case_thms ~~ newTs);
 
-    val (split_thms, split_asm_thms) = ListPair.unzip split_thm_pairs
+    val (split_thms, split_asm_thms) = split_list split_thm_pairs
 
   in
     thy
@@ -386,14 +386,14 @@
     |-> (fn (thms1, thms2) => pair (thms1 ~~ thms2))
   end;
 
-fun prove_weak_case_congs new_type_names descr thy =
+fun prove_weak_case_congs new_type_names case_names descr thy =
   let
     fun prove_weak_case_cong t =
      Skip_Proof.prove_global thy [] (Logic.strip_imp_prems t) (Logic.strip_imp_concl t)
-       (fn {prems, ...} => EVERY [rtac ((hd prems) RS arg_cong) 1]);
+       (fn {prems, ...} => EVERY [rtac (hd prems RS arg_cong) 1]);
 
     val weak_case_congs =
-      map prove_weak_case_cong (Datatype_Prop.make_weak_case_congs new_type_names descr thy);
+      map prove_weak_case_cong (Datatype_Prop.make_weak_case_congs case_names descr thy);
 
   in thy |> Datatype_Aux.store_thms "weak_case_cong" new_type_names weak_case_congs end;
 
@@ -421,7 +421,7 @@
 
   in thy |> Datatype_Aux.store_thms "nchotomy" new_type_names nchotomys end;
 
-fun prove_case_congs new_type_names descr nchotomys case_thms thy =
+fun prove_case_congs new_type_names case_names descr nchotomys case_thms thy =
   let
     fun prove_case_cong ((t, nchotomy), case_rewrites) =
       let
@@ -444,8 +444,8 @@
       end;
 
     val case_congs =
-      map prove_case_cong (Datatype_Prop.make_case_congs
-        new_type_names descr thy ~~ nchotomys ~~ case_thms);
+      map prove_case_cong
+        (Datatype_Prop.make_case_congs case_names descr thy ~~ nchotomys ~~ case_thms);
 
   in thy |> Datatype_Aux.store_thms "case_cong" new_type_names case_congs end;
 
--- a/src/HOL/Tools/Datatype/datatype_aux.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Tools/Datatype/datatype_aux.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -57,7 +57,7 @@
   exception Datatype
   exception Datatype_Empty of string
   val name_of_typ : typ -> string
-  val dtyp_of_typ : (string * string list) list -> typ -> dtyp
+  val dtyp_of_typ : (string * (string * sort) list) list -> typ -> dtyp
   val mk_Free : string -> typ -> int -> term
   val is_rec_type : dtyp -> bool
   val typ_of_dtyp : descr -> dtyp -> typ
@@ -242,7 +242,7 @@
       (case AList.lookup (op =) new_dts tname of
         NONE => DtType (tname, map (dtyp_of_typ new_dts) Ts)
       | SOME vs =>
-          if map (try (fst o dest_TFree)) Ts = map SOME vs then
+          if map (try dest_TFree) Ts = map SOME vs then
             DtRec (find_index (curry op = tname o fst) new_dts)
           else error ("Illegal occurrence of recursive type " ^ quote tname));
 
--- a/src/HOL/Tools/Datatype/datatype_data.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Tools/Datatype/datatype_data.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -11,7 +11,8 @@
     thm -> thm list list -> thm list list -> theory -> string list * theory
   val rep_datatype : config -> (string list -> Proof.context -> Proof.context) ->
     term list -> theory -> Proof.state
-  val rep_datatype_cmd : string list -> theory -> Proof.state
+  val rep_datatype_cmd : config -> (string list -> Proof.context -> Proof.context) ->
+    string list -> theory -> Proof.state
   val get_info : theory -> string -> info option
   val the_info : theory -> string -> info
   val the_descr : theory -> string list ->
@@ -28,8 +29,6 @@
   val make_case :  Proof.context -> Datatype_Case.config -> string list -> term ->
     (term * term) list -> term
   val strip_case : Proof.context -> bool -> term -> (term * (term * term) list) option
-  val read_typ: theory -> string -> (string * sort) list -> typ * (string * sort) list
-  val cert_typ: theory -> typ -> (string * sort) list -> typ * (string * sort) list
   val mk_case_names_induct: descr -> attribute
   val setup: theory -> theory
 end;
@@ -171,27 +170,6 @@
 
 (** various auxiliary **)
 
-(* prepare datatype specifications *)
-
-fun read_typ thy str sorts =
-  let
-    val ctxt = Proof_Context.init_global thy
-      |> fold (Variable.declare_typ o TFree) sorts;
-    val T = Syntax.read_typ ctxt str;
-  in (T, Term.add_tfreesT T sorts) end;
-
-fun cert_typ sign raw_T sorts =
-  let
-    val T = Type.no_tvars (Sign.certify_typ sign raw_T)
-      handle TYPE (msg, _, _) => error msg;
-    val sorts' = Term.add_tfreesT T sorts;
-    val _ =
-      (case duplicates (op =) (map fst sorts') of
-        [] => ()
-      | dups => error ("Inconsistent sort constraints for " ^ commas dups));
-  in (T, sorts') end;
-
-
 (* case names *)
 
 local
@@ -323,12 +301,13 @@
     val ((case_rewrites, case_names), thy6) = thy5
       |> Datatype_Abs_Proofs.prove_case_thms config new_type_names descr rec_names rec_rewrites;
     val (case_congs, thy7) = thy6
-      |> Datatype_Abs_Proofs.prove_case_congs new_type_names descr nchotomys case_rewrites;
+      |> Datatype_Abs_Proofs.prove_case_congs new_type_names case_names descr
+        nchotomys case_rewrites;
     val (weak_case_congs, thy8) = thy7
-      |> Datatype_Abs_Proofs.prove_weak_case_congs new_type_names descr;
+      |> Datatype_Abs_Proofs.prove_weak_case_congs new_type_names case_names descr;
     val (splits, thy9) = thy8
       |> Datatype_Abs_Proofs.prove_split_thms
-        config new_type_names descr inject distinct exhaust case_rewrites;
+        config new_type_names case_names descr inject distinct exhaust case_rewrites;
 
     val inducts = Project_Rule.projections (Proof_Context.init_global thy2) induct;
     val dt_infos = map_index
@@ -427,8 +406,7 @@
         (TFree o (the o AList.lookup (op =) (map fst raw_vs ~~ vs)) o fst o dest_TFree) T);
 
     val cs = map (apsnd (map norm_constr)) raw_cs;
-    val dtyps_of_typ =
-      map (Datatype_Aux.dtyp_of_typ (map (rpair (map fst vs) o fst) cs)) o binder_types;
+    val dtyps_of_typ = map (Datatype_Aux.dtyp_of_typ (map (rpair vs o fst) cs)) o binder_types;
     val dt_names = map fst cs;
 
     fun mk_spec (i, (tyco, constr)) =
@@ -455,7 +433,7 @@
   end;
 
 val rep_datatype = gen_rep_datatype Sign.cert_term;
-val rep_datatype_cmd = gen_rep_datatype Syntax.read_term_global Datatype_Aux.default_config (K I);
+val rep_datatype_cmd = gen_rep_datatype Syntax.read_term_global;
 
 
 
@@ -474,7 +452,8 @@
 val _ =
   Outer_Syntax.command "rep_datatype" "represent existing types inductively" Keyword.thy_goal
     (Scan.repeat1 Parse.term >> (fn ts =>
-      Toplevel.print o Toplevel.theory_to_proof (rep_datatype_cmd ts)));
+      Toplevel.print o
+      Toplevel.theory_to_proof (rep_datatype_cmd Datatype_Aux.default_config (K I) ts)));
 
 
 open Datatype_Aux;
--- a/src/HOL/Tools/Datatype/datatype_prop.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Tools/Datatype/datatype_prop.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -204,7 +204,7 @@
 
   in (rec_result_Ts, reccomb_fn_Ts) end;
 
-fun make_primrecs new_type_names descr thy =
+fun make_primrecs reccomb_names descr thy =
   let
     val descr' = flat descr;
     val recTs = Datatype_Aux.get_rec_types descr';
@@ -216,11 +216,6 @@
       map (uncurry (Datatype_Aux.mk_Free "f"))
         (reccomb_fn_Ts ~~ (1 upto (length reccomb_fn_Ts)));
 
-    val big_reccomb_name = space_implode "_" new_type_names ^ "_rec";
-    val reccomb_names =
-      map (Sign.intern_const thy)
-        (if length descr' = 1 then [big_reccomb_name]
-         else (map (prefix (big_reccomb_name ^ "_") o string_of_int) (1 upto length descr')));
     val reccombs =
       map (fn ((name, T), T') => list_comb (Const (name, reccomb_fn_Ts @ [T] ---> T'), rec_fns))
         (reccomb_names ~~ recTs ~~ rec_result_Ts);
@@ -256,7 +251,7 @@
 
 (****************** make terms of form  t_case f1 ... fn  *********************)
 
-fun make_case_combs new_type_names descr thy fname =
+fun make_case_combs case_names descr thy fname =
   let
     val descr' = flat descr;
     val recTs = Datatype_Aux.get_rec_types descr';
@@ -268,8 +263,6 @@
       map (fn (_, cargs) =>
         let val Ts = map (Datatype_Aux.typ_of_dtyp descr') cargs
         in Ts ---> T' end) constrs) (hd descr);
-
-    val case_names = map (fn s => Sign.intern_const thy (s ^ "_case")) new_type_names;
   in
     map (fn ((name, Ts), T) => list_comb
       (Const (name, Ts @ [T] ---> T'),
@@ -279,7 +272,7 @@
 
 (**************** characteristic equations for case combinator ****************)
 
-fun make_cases new_type_names descr thy =
+fun make_cases case_names descr thy =
   let
     val descr' = flat descr;
     val recTs = Datatype_Aux.get_rec_types descr';
@@ -296,14 +289,14 @@
       end;
   in
     map (fn (((_, (_, _, constrs)), T), comb_t) =>
-      map (make_case T comb_t) (constrs ~~ (snd (strip_comb comb_t))))
-        ((hd descr) ~~ newTs ~~ (make_case_combs new_type_names descr thy "f"))
+      map (make_case T comb_t) (constrs ~~ snd (strip_comb comb_t)))
+        (hd descr ~~ newTs ~~ make_case_combs case_names descr thy "f")
   end;
 
 
 (*************************** the "split" - equations **************************)
 
-fun make_splits new_type_names descr thy =
+fun make_splits case_names descr thy =
   let
     val descr' = flat descr;
     val recTs = Datatype_Aux.get_rec_types descr';
@@ -338,14 +331,14 @@
       end
 
   in
-    map make_split (hd descr ~~ newTs ~~ make_case_combs new_type_names descr thy "f")
+    map make_split (hd descr ~~ newTs ~~ make_case_combs case_names descr thy "f")
   end;
 
 (************************* additional rules for TFL ***************************)
 
-fun make_weak_case_congs new_type_names descr thy =
+fun make_weak_case_congs case_names descr thy =
   let
-    val case_combs = make_case_combs new_type_names descr thy "f";
+    val case_combs = make_case_combs case_names descr thy "f";
 
     fun mk_case_cong comb =
       let
@@ -372,10 +365,10 @@
  *      (ty_case f1..fn M = ty_case g1..gn M')
  *---------------------------------------------------------------------------*)
 
-fun make_case_congs new_type_names descr thy =
+fun make_case_congs case_names descr thy =
   let
-    val case_combs = make_case_combs new_type_names descr thy "f";
-    val case_combs' = make_case_combs new_type_names descr thy "g";
+    val case_combs = make_case_combs case_names descr thy "f";
+    val case_combs' = make_case_combs case_names descr thy "g";
 
     fun mk_case_cong ((comb, comb'), (_, (_, _, constrs))) =
       let
--- a/src/HOL/Tools/Metis/metis_tactic.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Tools/Metis/metis_tactic.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -83,7 +83,7 @@
         if Meson_Clausify.is_quasi_lambda_free (term_of ct) then
           Thm.reflexive ct
         else case term_of ct of
-          t as Abs (_, _, u) =>
+          Abs (_, _, u) =>
           if first then
             case add_vars_and_frees u [] of
               [] =>
--- a/src/HOL/Tools/Predicate_Compile/predicate_compile_aux.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Tools/Predicate_Compile/predicate_compile_aux.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -903,9 +903,9 @@
 fun datatype_names_of_case_name thy case_name =
   map (#1 o #2) (#descr (the (Datatype_Data.info_of_case thy case_name)))
 
-fun make_case_distribs new_type_names descr thy =
+fun make_case_distribs case_names descr thy =
   let
-    val case_combs = Datatype_Prop.make_case_combs new_type_names descr thy "f";
+    val case_combs = Datatype_Prop.make_case_combs case_names descr thy "f";
     fun make comb =
       let
         val Type ("fun", [T, T']) = fastype_of comb;
@@ -932,10 +932,10 @@
 
 fun case_rewrites thy Tcon =
   let
-    val {descr, ...} = Datatype.the_info thy Tcon
+    val {descr, case_name, ...} = Datatype.the_info thy Tcon
   in
     map (Drule.export_without_context o Skip_Proof.make_thm thy o HOLogic.mk_Trueprop)
-      (make_case_distribs [Tcon] [descr] thy)
+      (make_case_distribs [case_name] [descr] thy)
   end
 
 fun instantiated_case_rewrites thy Tcon =
--- a/src/HOL/Tools/Quotient/quotient_type.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Tools/Quotient/quotient_type.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -277,6 +277,7 @@
 val quotspec_parser =
   Parse.and_list1
     ((Parse.type_args -- Parse.binding) --
+      (* FIXME Parse.type_args_constrained and standard treatment of sort constraints *)
       Parse.opt_mixfix -- (Parse.$$$ "=" |-- Parse.typ) --
         (Parse.$$$ "/" |-- (partial -- Parse.term))  --
         Scan.option (Parse.$$$ "morphisms" |-- Parse.!!! (Parse.binding -- Parse.binding)))
--- a/src/HOL/Tools/inductive_realizer.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Tools/inductive_realizer.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -69,8 +69,9 @@
         filter_out (equal Extraction.nullT) (map
           (Logic.unvarifyT_global o Extraction.etype_of thy vs []) (prems_of intr)),
             NoSyn);
-  in (map (fn a => "'" ^ a) vs @ map (fst o fst) iTs, tname, NoSyn,
-    map constr_of_intr intrs)
+  in
+    ((tname, map (rpair dummyS) (map (fn a => "'" ^ a) vs @ map (fst o fst) iTs), NoSyn),
+      map constr_of_intr intrs)
   end;
 
 fun mk_rlz T = Const ("realizes", [T, HOLogic.boolT] ---> HOLogic.boolT);
@@ -233,8 +234,9 @@
       end) concls rec_names)
   end;
 
-fun add_dummy name dname (x as (_, (vs, s, mfx, cs))) =
-  if Binding.eq_name (name, s) then (true, (vs, s, mfx, (dname, [HOLogic.unitT], NoSyn) :: cs))
+fun add_dummy name dname (x as (_, ((s, vs, mx), cs))) =
+  if Binding.eq_name (name, s)
+  then (true, ((s, vs, mx), (dname, [HOLogic.unitT], NoSyn) :: cs))
   else x;
 
 fun add_dummies f [] _ thy =
--- a/src/HOL/Tools/record.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Tools/record.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -49,8 +49,6 @@
   val updateN: string
   val ext_typeN: string
   val extN: string
-  val read_typ: Proof.context -> string -> (string * sort) list -> typ * (string * sort) list
-  val cert_typ: Proof.context -> typ -> (string * sort) list -> typ * (string * sort) list
   val setup: theory -> theory
 end;
 
@@ -1489,24 +1487,6 @@
 
 (** theory extender interface **)
 
-(* prepare arguments *)
-
-fun read_typ ctxt raw_T env =
-  let
-    val ctxt' = fold (Variable.declare_typ o TFree) env ctxt;
-    val T = Syntax.read_typ ctxt' raw_T;
-    val env' = Term.add_tfreesT T env;
-  in (T, env') end;
-
-fun cert_typ ctxt raw_T env =
-  let
-    val thy = Proof_Context.theory_of ctxt;
-    val T = Type.no_tvars (Sign.certify_typ thy raw_T)
-      handle TYPE (msg, _, _) => error msg;
-    val env' = Term.add_tfreesT T env;
-  in (T, env') end;
-
-
 (* attributes *)
 
 fun case_names_fields x = Rule_Cases.case_names ["fields"] x;
--- a/src/HOL/Word/Bit_Int.thy	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Word/Bit_Int.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -54,7 +54,7 @@
 lemma bin_rec_Bit:
   "f = bin_rec f1 f2 f3  ==> f3 Int.Pls (0::bit) f1 = f1 ==> 
     f3 Int.Min (1::bit) f2 = f2 ==> f (w BIT b) = f3 w b (f w)"
-  by (cases b, simp add: bin_rec_Bit0, simp add: bin_rec_Bit1)
+  by (cases b, simp add: bin_rec_Bit0 BIT_simps, simp add: bin_rec_Bit1 BIT_simps)
 
 lemmas bin_rec_simps = refl [THEN bin_rec_Bit] bin_rec_Pls bin_rec_Min
   bin_rec_Bit0 bin_rec_Bit1
@@ -95,7 +95,8 @@
   "NOT (Int.Bit0 w) = Int.Bit1 (NOT w)"
   "NOT (Int.Bit1 w) = Int.Bit0 (NOT w)"
   "NOT (w BIT b) = (NOT w) BIT (NOT b)"
-  unfolding int_not_def Pls_def [symmetric] Min_def [symmetric] by (simp_all add: bin_rec_simps)
+  unfolding int_not_def Pls_def [symmetric] Min_def [symmetric]
+  by (simp_all add: bin_rec_simps BIT_simps)
 
 lemma int_xor_Pls [simp]: 
   "Int.Pls XOR x = x"
@@ -133,7 +134,8 @@
 
 lemma int_or_Bits [simp]: 
   "(x BIT b) OR (y BIT c) = (x OR y) BIT (b OR c)"
-  unfolding int_or_def Pls_def [symmetric] Min_def [symmetric] by (simp add: bin_rec_simps)
+  unfolding int_or_def Pls_def [symmetric] Min_def [symmetric]
+  by (simp add: bin_rec_simps BIT_simps)
 
 lemma int_or_Bits2 [simp]: 
   "(Int.Bit0 x) OR (Int.Bit0 y) = Int.Bit0 (x OR y)"
@@ -144,7 +146,7 @@
 
 lemma int_and_Pls [simp]:
   "Int.Pls AND x = Int.Pls"
-  unfolding int_and_def by (simp add: bin_rec_PM)
+  unfolding int_and_def Pls_def [symmetric] by (simp add: bin_rec_PM)
 
 lemma int_and_Min [simp]:
   "Int.Min AND x = x"
@@ -152,7 +154,8 @@
 
 lemma int_and_Bits [simp]: 
   "(x BIT b) AND (y BIT c) = (x AND y) BIT (b AND c)" 
-  unfolding int_and_def Pls_def [symmetric] Min_def [symmetric] by (simp add: bin_rec_simps)
+  unfolding int_and_def Pls_def [symmetric] Min_def [symmetric]
+  by (simp add: bin_rec_simps BIT_simps)
 
 lemma int_and_Bits2 [simp]: 
   "(Int.Bit0 x) AND (Int.Bit0 y) = Int.Bit0 (x AND y)"
@@ -322,7 +325,7 @@
   apply (case_tac x rule: bin_exhaust)
   apply (case_tac b)
    apply (case_tac [!] bit)
-     apply (auto simp: less_eq_int_code)
+     apply (auto simp: less_eq_int_code BIT_simps)
   done
 
 lemmas int_and_le =
@@ -334,7 +337,7 @@
   apply (induct x rule: bin_induct)
     apply clarsimp
    apply clarsimp
-  apply (case_tac bit, auto)
+  apply (case_tac bit, auto simp: BIT_simps)
   done
 
 subsubsection {* Truncating results of bit-wise operations *}
@@ -447,10 +450,10 @@
   done
 
 lemma bin_sc_FP [simp]: "bin_sc n 0 Int.Pls = Int.Pls"
-  by (induct n) auto
+  by (induct n) (auto simp: BIT_simps)
 
 lemma bin_sc_TM [simp]: "bin_sc n 1 Int.Min = Int.Min"
-  by (induct n) auto
+  by (induct n) (auto simp: BIT_simps)
   
 lemmas bin_sc_simps = bin_sc.Z bin_sc.Suc bin_sc_TM bin_sc_FP
 
@@ -566,7 +569,7 @@
 
 lemma bin_split_Pls [simp]:
   "bin_split n Int.Pls = (Int.Pls, Int.Pls)"
-  by (induct n) (auto simp: Let_def split: ls_splits)
+  by (induct n) (auto simp: Let_def BIT_simps split: ls_splits)
 
 lemma bin_split_Min [simp]:
   "bin_split n Int.Min = (Int.Min, bintrunc n Int.Min)"
@@ -578,7 +581,7 @@
   apply (induct n, clarsimp)
   apply (simp add: bin_rest_trunc Let_def split: ls_splits)
   apply (case_tac m)
-   apply (auto simp: Let_def split: ls_splits)
+   apply (auto simp: Let_def BIT_simps split: ls_splits)
   done
 
 lemma bin_split_trunc1:
@@ -587,7 +590,7 @@
   apply (induct n, clarsimp)
   apply (simp add: bin_rest_trunc Let_def split: ls_splits)
   apply (case_tac m)
-   apply (auto simp: Let_def split: ls_splits)
+   apply (auto simp: Let_def BIT_simps split: ls_splits)
   done
 
 lemma bin_cat_num:
@@ -598,7 +601,7 @@
 
 lemma bin_split_num:
   "!!b. bin_split n b = (b div 2 ^ n, b mod 2 ^ n)"
-  apply (induct n, clarsimp)
+  apply (induct n, simp add: Pls_def)
   apply (simp add: bin_rest_def zdiv_zmult2_eq)
   apply (case_tac b rule: bin_exhaust)
   apply simp
--- a/src/HOL/Word/Bit_Representation.thy	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Word/Bit_Representation.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -23,45 +23,66 @@
 definition Bit :: "int \<Rightarrow> bit \<Rightarrow> int" (infixl "BIT" 90) where
   "k BIT b = bitval b + k + k"
 
-lemma BIT_B0_eq_Bit0 [simp]: "w BIT 0 = Int.Bit0 w"
+definition bin_last :: "int \<Rightarrow> bit" where
+  "bin_last w = (if w mod 2 = 0 then (0::bit) else (1::bit))"
+
+definition bin_rest :: "int \<Rightarrow> int" where
+  "bin_rest w = w div 2"
+
+lemma bin_rl_simp [simp]:
+  "bin_rest w BIT bin_last w = w"
+  unfolding bin_rest_def bin_last_def Bit_def
+  using mod_div_equality [of w 2]
+  by (cases "w mod 2 = 0", simp_all)
+
+lemma bin_rest_BIT: "bin_rest (x BIT b) = x"
+  unfolding bin_rest_def Bit_def
+  by (cases b, simp_all)
+
+lemma bin_last_BIT: "bin_last (x BIT b) = b"
+  unfolding bin_last_def Bit_def
+  by (cases b, simp_all add: z1pmod2)
+
+lemma BIT_eq_iff [iff]: "u BIT b = v BIT c \<longleftrightarrow> u = v \<and> b = c"
+  by (metis bin_rest_BIT bin_last_BIT)
+
+lemma BIT_bin_simps [simp]:
+  "number_of w BIT 0 = number_of (Int.Bit0 w)"
+  "number_of w BIT 1 = number_of (Int.Bit1 w)"
+  unfolding Bit_def number_of_is_id numeral_simps by simp_all
+
+lemma BIT_special_simps [simp]:
+  shows "0 BIT 0 = 0" and "0 BIT 1 = 1" and "1 BIT 0 = 2" and "1 BIT 1 = 3"
+  unfolding Bit_def by simp_all
+
+lemma bin_last_numeral_simps [simp]:
+  "bin_last 0 = 0"
+  "bin_last 1 = 1"
+  "bin_last -1 = 1"
+  "bin_last (number_of (Int.Bit0 w)) = 0"
+  "bin_last (number_of (Int.Bit1 w)) = 1"
+  unfolding bin_last_def by simp_all
+
+lemma bin_rest_numeral_simps [simp]:
+  "bin_rest 0 = 0"
+  "bin_rest 1 = 0"
+  "bin_rest -1 = -1"
+  "bin_rest (number_of (Int.Bit0 w)) = number_of w"
+  "bin_rest (number_of (Int.Bit1 w)) = number_of w"
+  unfolding bin_rest_def by simp_all
+
+lemma BIT_B0_eq_Bit0: "w BIT 0 = Int.Bit0 w"
   unfolding Bit_def Bit0_def by simp
 
-lemma BIT_B1_eq_Bit1 [simp]: "w BIT 1 = Int.Bit1 w"
+lemma BIT_B1_eq_Bit1: "w BIT 1 = Int.Bit1 w"
   unfolding Bit_def Bit1_def by simp
 
 lemmas BIT_simps = BIT_B0_eq_Bit0 BIT_B1_eq_Bit1
 
-lemma Min_ne_Pls [iff]:  
-  "Int.Min ~= Int.Pls"
-  unfolding Min_def Pls_def by auto
-
-lemmas Pls_ne_Min [iff] = Min_ne_Pls [symmetric]
-
-lemmas PlsMin_defs [intro!] = 
-  Pls_def Min_def Pls_def [symmetric] Min_def [symmetric]
-
-lemmas PlsMin_simps [simp] = PlsMin_defs [THEN Eq_TrueI]
-
 lemma number_of_False_cong: 
   "False \<Longrightarrow> number_of x = number_of y"
   by (rule FalseE)
 
-(** ways in which type Bin resembles a datatype **)
-
-lemma BIT_eq: "u BIT b = v BIT c ==> u = v & b = c"
-  apply (cases b) apply (simp_all)
-  apply (cases c) apply (simp_all)
-  apply (cases c) apply (simp_all)
-  done
-     
-lemmas BIT_eqE [elim!] = BIT_eq [THEN conjE]
-
-lemma BIT_eq_iff [simp]: 
-  "(u BIT b = v BIT c) = (u = v \<and> b = c)"
-  by (rule iffI) auto
-
-lemmas BIT_eqI [intro!] = conjI [THEN BIT_eq_iff [THEN iffD2]]
-
 lemma less_Bits: 
   "(v BIT b < w BIT c) = (v < w | v <= w & b = (0::bit) & c = (1::bit))"
   unfolding Bit_def by (auto simp add: bitval_def split: bit.split)
@@ -127,21 +148,11 @@
 
 subsection {* Destructors for binary integers *}
 
-definition bin_last :: "int \<Rightarrow> bit" where
-  "bin_last w = (if w mod 2 = 0 then (0::bit) else (1::bit))"
-
-definition bin_rest :: "int \<Rightarrow> int" where
-  "bin_rest w = w div 2"
-
 definition bin_rl :: "int \<Rightarrow> int \<times> bit" where 
   "bin_rl w = (bin_rest w, bin_last w)"
 
 lemma bin_rl_char: "bin_rl w = (r, l) \<longleftrightarrow> r BIT l = w"
-  apply (cases l)
-  apply (auto simp add: bin_rl_def bin_last_def bin_rest_def)
-  unfolding Pls_def Min_def Bit0_def Bit1_def number_of_is_id
-  apply arith+
-  done
+  unfolding bin_rl_def by (auto simp: bin_rest_BIT bin_last_BIT)
 
 primrec bin_nth where
   Z: "bin_nth w 0 = (bin_last w = (1::bit))"
@@ -153,11 +164,7 @@
   "bin_rl (Int.Bit0 r) = (r, (0::bit))"
   "bin_rl (Int.Bit1 r) = (r, (1::bit))"
   "bin_rl (r BIT b) = (r, b)"
-  unfolding bin_rl_char by simp_all
-
-lemma bin_rl_simp [simp]:
-  "bin_rest w BIT bin_last w = w"
-  by (simp add: iffD1 [OF bin_rl_char bin_rl_def])
+  unfolding bin_rl_char by (simp_all add: BIT_simps)
 
 lemma bin_abs_lem:
   "bin = (w BIT b) ==> ~ bin = Int.Min --> ~ bin = Int.Pls -->
@@ -193,11 +200,11 @@
    apply (rule Min)
   apply (case_tac bit)
    apply (case_tac "bin = Int.Pls")
-    apply simp
-   apply (simp add: Bit0)
+    apply (simp add: BIT_simps)
+   apply (simp add: Bit0 BIT_simps)
   apply (case_tac "bin = Int.Min")
-   apply simp
-  apply (simp add: Bit1)
+   apply (simp add: BIT_simps)
+  apply (simp add: Bit1 BIT_simps)
   done
 
 lemma bin_rest_simps [simp]: 
@@ -216,25 +223,14 @@
   "bin_last (w BIT b) = b"
   using bin_rl_simps bin_rl_def by auto
 
-lemma bin_r_l_extras [simp]:
-  "bin_last 0 = (0::bit)"
-  "bin_last (- 1) = (1::bit)"
-  "bin_last -1 = (1::bit)"
-  "bin_last 1 = (1::bit)"
-  "bin_rest 1 = 0"
-  "bin_rest 0 = 0"
-  "bin_rest (- 1) = - 1"
-  "bin_rest -1 = -1"
-  by (simp_all add: bin_last_def bin_rest_def)
-
 lemma Bit_div2 [simp]: "(w BIT b) div 2 = w"
   unfolding bin_rest_def [symmetric] by auto
 
 lemma Bit0_div2 [simp]: "(Int.Bit0 w) div 2 = w"
-  using Bit_div2 [where b="(0::bit)"] by simp
+  using Bit_div2 [where b="(0::bit)"] by (simp add: BIT_simps)
 
 lemma Bit1_div2 [simp]: "(Int.Bit1 w) div 2 = w"
-  using Bit_div2 [where b="(1::bit)"] by simp
+  using Bit_div2 [where b="(1::bit)"] by (simp add: BIT_simps)
 
 lemma bin_nth_lem [rule_format]:
   "ALL y. bin_nth x = bin_nth y --> x = y"
@@ -246,20 +242,20 @@
       apply (drule_tac x=0 in fun_cong, force)
      apply (erule notE, rule ext, 
             drule_tac x="Suc x" in fun_cong, force)
-    apply (drule_tac x=0 in fun_cong, force)
+    apply (drule_tac x=0 in fun_cong, force simp: BIT_simps)
    apply (erule rev_mp)
    apply (induct_tac y rule: bin_induct)
      apply (safe del: subset_antisym)
      apply (drule_tac x=0 in fun_cong, force)
     apply (erule notE, rule ext, 
            drule_tac x="Suc x" in fun_cong, force)
-   apply (drule_tac x=0 in fun_cong, force)
+   apply (drule_tac x=0 in fun_cong, force simp: BIT_simps)
   apply (case_tac y rule: bin_exhaust)
   apply clarify
   apply (erule allE)
   apply (erule impE)
    prefer 2
-   apply (erule BIT_eqI)
+   apply (erule conjI)
    apply (drule_tac x=0 in fun_cong, force)
   apply (rule ext)
   apply (drule_tac x="Suc ?x" in fun_cong, force)
@@ -273,6 +269,9 @@
 lemma bin_eq_iff: "x = y \<longleftrightarrow> (\<forall>n. bin_nth x n = bin_nth y n)"
   by (auto intro!: bin_nth_lem del: equalityI)
 
+lemma bin_nth_zero [simp]: "\<not> bin_nth 0 n"
+  by (induct n) auto
+
 lemma bin_nth_Pls [simp]: "~ bin_nth Int.Pls n"
   by (induct n) auto
 
@@ -290,11 +289,11 @@
 
 lemma bin_nth_minus_Bit0 [simp]:
   "0 < n ==> bin_nth (Int.Bit0 w) n = bin_nth w (n - 1)"
-  using bin_nth_minus [where b="(0::bit)"] by simp
+  using bin_nth_minus [where b="(0::bit)"] by (simp add: BIT_simps)
 
 lemma bin_nth_minus_Bit1 [simp]:
   "0 < n ==> bin_nth (Int.Bit1 w) n = bin_nth w (n - 1)"
-  using bin_nth_minus [where b="(1::bit)"] by simp
+  using bin_nth_minus [where b="(1::bit)"] by (simp add: BIT_simps)
 
 lemmas bin_nth_0 = bin_nth.simps(1)
 lemmas bin_nth_Suc = bin_nth.simps(2)
@@ -306,16 +305,21 @@
 
 subsection {* Truncating binary integers *}
 
-definition
+definition bin_sign :: "int \<Rightarrow> int" where
   bin_sign_def: "bin_sign k = (if k \<ge> 0 then 0 else - 1)"
 
 lemma bin_sign_simps [simp]:
+  "bin_sign 0 = 0"
+  "bin_sign -1 = -1"
+  "bin_sign (number_of (Int.Bit0 w)) = bin_sign (number_of w)"
+  "bin_sign (number_of (Int.Bit1 w)) = bin_sign (number_of w)"
   "bin_sign Int.Pls = Int.Pls"
   "bin_sign Int.Min = Int.Min"
   "bin_sign (Int.Bit0 w) = bin_sign w"
   "bin_sign (Int.Bit1 w) = bin_sign w"
   "bin_sign (w BIT b) = bin_sign w"
-  by (unfold bin_sign_def numeral_simps Bit_def bitval_def) (simp_all split: bit.split)
+  unfolding bin_sign_def numeral_simps Bit_def bitval_def number_of_is_id
+  by (simp_all split: bit.split)
 
 lemma bin_sign_rest [simp]: 
   "bin_sign (bin_rest w) = bin_sign w"
@@ -334,7 +338,9 @@
   "sbintrunc 0 bin = 
     (case bin_last bin of (1::bit) => - 1 | (0::bit) => 0)"
   "sbintrunc (Suc n) bin = sbintrunc n (bin_rest bin) BIT (bin_last bin)"
-  apply simp_all apply (cases "bin_last bin") apply simp apply (unfold Min_def number_of_is_id) apply simp done
+  apply simp_all
+  apply (simp only: Pls_def Min_def)
+  done
 
 lemma sign_bintr:
   "!!w. bin_sign (bintrunc n w) = Int.Pls"
@@ -342,7 +348,8 @@
 
 lemma bintrunc_mod2p:
   "!!w. bintrunc n w = (w mod 2 ^ n :: int)"
-  apply (induct n, clarsimp)
+  apply (induct n)
+  apply (simp add: Pls_def)
   apply (simp add: bin_last_def bin_rest_def Bit_def zmod_zmult2_eq
               cong: number_of_False_cong)
   done
@@ -354,7 +361,7 @@
    apply (subst mod_add_left_eq)
    apply (simp add: bin_last_def)
    apply (simp add: number_of_eq)
-  apply clarsimp
+  apply (simp add: Pls_def)
   apply (simp add: bin_last_def bin_rest_def Bit_def 
               cong: number_of_False_cong)
   apply (clarsimp simp: mod_mult_mult1 [symmetric] 
@@ -366,6 +373,34 @@
 
 subsection "Simplifications for (s)bintrunc"
 
+lemma bintrunc_n_0 [simp]: "bintrunc n 0 = 0"
+  by (induct n) (auto simp add: Int.Pls_def)
+
+lemma sbintrunc_n_0 [simp]: "sbintrunc n 0 = 0"
+  by (induct n) (auto simp add: Int.Pls_def)
+
+lemma sbintrunc_n_minus1 [simp]: "sbintrunc n -1 = -1"
+  by (induct n) (auto, simp add: number_of_is_id)
+
+lemma bintrunc_Suc_numeral:
+  "bintrunc (Suc n) 1 = 1"
+  "bintrunc (Suc n) -1 = bintrunc n -1 BIT 1"
+  "bintrunc (Suc n) (number_of (Int.Bit0 w)) = bintrunc n (number_of w) BIT 0"
+  "bintrunc (Suc n) (number_of (Int.Bit1 w)) = bintrunc n (number_of w) BIT 1"
+  by simp_all
+
+lemma sbintrunc_0_numeral [simp]:
+  "sbintrunc 0 1 = -1"
+  "sbintrunc 0 (number_of (Int.Bit0 w)) = 0"
+  "sbintrunc 0 (number_of (Int.Bit1 w)) = -1"
+  by (simp_all, unfold Pls_def number_of_is_id, simp_all)
+
+lemma sbintrunc_Suc_numeral:
+  "sbintrunc (Suc n) 1 = 1"
+  "sbintrunc (Suc n) (number_of (Int.Bit0 w)) = sbintrunc n (number_of w) BIT 0"
+  "sbintrunc (Suc n) (number_of (Int.Bit1 w)) = sbintrunc n (number_of w) BIT 1"
+  by simp_all
+
 lemma bit_bool:
   "(b = (b' = (1::bit))) = (b' = (if b then (1::bit) else (0::bit)))"
   by (cases b') auto
@@ -399,11 +434,11 @@
 
 lemma bin_nth_Bit0:
   "bin_nth (Int.Bit0 w) n = (EX m. n = Suc m & bin_nth w m)"
-  using bin_nth_Bit [where b="(0::bit)"] by simp
+  using bin_nth_Bit [where b="(0::bit)"] by (simp add: BIT_simps)
 
 lemma bin_nth_Bit1:
   "bin_nth (Int.Bit1 w) n = (n = 0 | (EX m. n = Suc m & bin_nth w m))"
-  using bin_nth_Bit [where b="(1::bit)"] by simp
+  using bin_nth_Bit [where b="(1::bit)"] by (simp add: BIT_simps)
 
 lemma bintrunc_bintrunc_l:
   "n <= m ==> (bintrunc m (bintrunc n w) = bintrunc n w)"
@@ -440,14 +475,15 @@
 
 lemma bintrunc_Bit0 [simp]:
   "bintrunc (Suc n) (Int.Bit0 w) = Int.Bit0 (bintrunc n w)"
-  using bintrunc_BIT [where b="(0::bit)"] by simp
+  using bintrunc_BIT [where b="(0::bit)"] by (simp add: BIT_simps)
 
 lemma bintrunc_Bit1 [simp]:
   "bintrunc (Suc n) (Int.Bit1 w) = Int.Bit1 (bintrunc n w)"
-  using bintrunc_BIT [where b="(1::bit)"] by simp
+  using bintrunc_BIT [where b="(1::bit)"] by (simp add: BIT_simps)
 
 lemmas bintrunc_Sucs = bintrunc_Pls bintrunc_Min bintrunc_BIT
   bintrunc_Bit0 bintrunc_Bit1
+  bintrunc_Suc_numeral
 
 lemmas sbintrunc_Suc_Pls = 
   sbintrunc.Suc [where bin="Int.Pls", simplified bin_last_simps bin_rest_simps]
@@ -460,14 +496,15 @@
 
 lemma sbintrunc_Suc_Bit0 [simp]:
   "sbintrunc (Suc n) (Int.Bit0 w) = Int.Bit0 (sbintrunc n w)"
-  using sbintrunc_Suc_BIT [where b="(0::bit)"] by simp
+  using sbintrunc_Suc_BIT [where b="(0::bit)"] by (simp add: BIT_simps)
 
 lemma sbintrunc_Suc_Bit1 [simp]:
   "sbintrunc (Suc n) (Int.Bit1 w) = Int.Bit1 (sbintrunc n w)"
-  using sbintrunc_Suc_BIT [where b="(1::bit)"] by simp
+  using sbintrunc_Suc_BIT [where b="(1::bit)"] by (simp add: BIT_simps)
 
 lemmas sbintrunc_Sucs = sbintrunc_Suc_Pls sbintrunc_Suc_Min sbintrunc_Suc_BIT
   sbintrunc_Suc_Bit0 sbintrunc_Suc_Bit1
+  sbintrunc_Suc_numeral
 
 lemmas sbintrunc_Pls = 
   sbintrunc.Z [where bin="Int.Pls", 
@@ -513,12 +550,12 @@
 
 lemma bintrunc_n_Pls [simp]:
   "bintrunc n Int.Pls = Int.Pls"
-  by (induct n) auto
+  by (induct n) (auto simp: BIT_simps)
 
 lemma sbintrunc_n_PM [simp]:
   "sbintrunc n Int.Pls = Int.Pls"
   "sbintrunc n Int.Min = Int.Min"
-  by (induct n) auto
+  by (induct n) (auto simp: BIT_simps)
 
 lemmas thobini1 = arg_cong [where f = "%w. w BIT b"] for b
 
@@ -531,9 +568,9 @@
 lemmas bintrunc_BIT_minus_I = bmsts(3)
 
 lemma bintrunc_0_Min: "bintrunc 0 Int.Min = Int.Pls"
-  by auto
+  by (fact bintrunc.Z) (* FIXME: delete *)
 lemma bintrunc_0_BIT: "bintrunc 0 (w BIT b) = Int.Pls"
-  by auto
+  by (fact bintrunc.Z) (* FIXME: delete *)
 
 lemma bintrunc_Suc_lem:
   "bintrunc (Suc n) x = y ==> m = Suc n ==> bintrunc m x = y"
--- a/src/HOL/Word/Bool_List_Representation.thy	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Word/Bool_List_Representation.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -184,7 +184,7 @@
   done
 
 lemma bl_to_bin_False: "bl_to_bin (False # bl) = bl_to_bin bl"
-  unfolding bl_to_bin_def by auto
+  unfolding bl_to_bin_def by (auto simp: BIT_simps)
   
 lemma bl_to_bin_Nil: "bl_to_bin [] = Int.Pls"
   unfolding bl_to_bin_def by auto
@@ -231,8 +231,10 @@
     apply auto
   done
 
-lemmas bin_to_bl_bintr = 
-  bin_to_bl_aux_bintr [where bl = "[]", folded bin_to_bl_def]
+lemma bin_to_bl_bintr:
+  "bin_to_bl n (bintrunc m bin) =
+    replicate (n - m) False @ bin_to_bl (min n m) bin"
+  unfolding bin_to_bl_def by (rule bin_to_bl_aux_bintr)
 
 lemma bl_to_bin_rep_False: "bl_to_bin (replicate n False) = Int.Pls"
   by (induct n) auto
@@ -301,7 +303,8 @@
   apply arith
   done
 
-lemmas nth_rev_alt = nth_rev [where xs = "rev ys", simplified] for ys
+lemma nth_rev_alt: "n < length ys \<Longrightarrow> ys ! n = rev ys ! (length ys - Suc n)"
+  by (simp add: nth_rev)
 
 lemma nth_bin_to_bl_aux [rule_format] : 
   "ALL w n bl. n < m + length bl --> (bin_to_bl_aux m w bl) ! n = 
@@ -323,7 +326,8 @@
   apply clarsimp
   apply safe
   apply (erule allE, erule xtr8 [rotated],
-         simp add: numeral_simps algebra_simps cong add : number_of_False_cong)+
+         simp add: numeral_simps algebra_simps BIT_simps
+         cong add: number_of_False_cong)+
   done
 
 lemma bl_to_bin_lt2p: "bl_to_bin bs < (2 ^ length bs)"
@@ -341,14 +345,15 @@
   apply clarsimp
   apply safe
    apply (erule allE, erule preorder_class.order_trans [rotated],
-          simp add: numeral_simps algebra_simps cong add : number_of_False_cong)+
+          simp add: numeral_simps algebra_simps BIT_simps
+          cong add: number_of_False_cong)+
   done
 
 lemma bl_to_bin_ge0: "bl_to_bin bs >= 0"
   apply (unfold bl_to_bin_def)
   apply (rule xtr4)
    apply (rule bl_to_bin_ge2p_aux)
-  apply simp
+  apply (simp add: Pls_def)
   done
 
 lemma butlast_rest_bin: 
@@ -360,8 +365,9 @@
   apply (auto simp add: bin_to_bl_aux_alt)
   done
 
-lemmas butlast_bin_rest = butlast_rest_bin
-  [where w="bl_to_bin bl" and n="length bl", simplified] for bl
+lemma butlast_bin_rest:
+  "butlast bl = bin_to_bl (length bl - Suc 0) (bin_rest (bl_to_bin bl))"
+  using butlast_rest_bin [where w="bl_to_bin bl" and n="length bl"] by simp
 
 lemma butlast_rest_bl2bin_aux:
   "bl ~= [] \<Longrightarrow>
@@ -384,13 +390,13 @@
   apply safe
    apply (case_tac "m - size list")
     apply (simp add : diff_is_0_eq [THEN iffD1, THEN Suc_diff_le])
-   apply simp
+   apply (simp add: BIT_simps)
    apply (rule_tac f = "%nat. bl_to_bin_aux list (Int.Bit1 (bintrunc nat w))" 
                    in arg_cong) 
    apply simp
   apply (case_tac "m - size list")
    apply (simp add: diff_is_0_eq [THEN iffD1, THEN Suc_diff_le])
-  apply simp
+  apply (simp add: BIT_simps)
   apply (rule_tac f = "%nat. bl_to_bin_aux list (Int.Bit0 (bintrunc nat w))" 
                   in arg_cong) 
   apply simp
@@ -400,8 +406,9 @@
   "bintrunc m (bl_to_bin bl) = bl_to_bin (drop (length bl - m) bl)"
   unfolding bl_to_bin_def by (simp add : trunc_bl2bin_aux)
   
-lemmas trunc_bl2bin_len [simp] =
-  trunc_bl2bin [of "length bl" bl, simplified] for bl
+lemma trunc_bl2bin_len [simp]:
+  "bintrunc (length bl) (bl_to_bin bl) = bl_to_bin bl"
+  by (simp add: trunc_bl2bin)
 
 lemma bl2bin_drop: 
   "bl_to_bin (drop k bl) = bintrunc (length bl - k) (bl_to_bin bl)"
@@ -491,17 +498,20 @@
   apply clarsimp
   done
 
-lemmas bl_not_bin = bl_not_aux_bin
-  [where cs = "[]", unfolded bin_to_bl_def [symmetric] map.simps]
+lemma bl_not_bin: "map Not (bin_to_bl n w) = bin_to_bl n (NOT w)"
+  unfolding bin_to_bl_def by (simp add: bl_not_aux_bin)
 
-lemmas bl_and_bin = bl_and_aux_bin [where bs="[]" and cs="[]", 
-                                    unfolded map2_Nil, folded bin_to_bl_def]
+lemma bl_and_bin:
+  "map2 (op \<and>) (bin_to_bl n v) (bin_to_bl n w) = bin_to_bl n (v AND w)"
+  unfolding bin_to_bl_def by (simp add: bl_and_aux_bin)
 
-lemmas bl_or_bin = bl_or_aux_bin [where bs="[]" and cs="[]", 
-                                  unfolded map2_Nil, folded bin_to_bl_def]
+lemma bl_or_bin:
+  "map2 (op \<or>) (bin_to_bl n v) (bin_to_bl n w) = bin_to_bl n (v OR w)"
+  unfolding bin_to_bl_def by (simp add: bl_or_aux_bin)
 
-lemmas bl_xor_bin = bl_xor_aux_bin [where bs="[]" and cs="[]", 
-                                    unfolded map2_Nil, folded bin_to_bl_def]
+lemma bl_xor_bin:
+  "map2 (\<lambda>x y. x \<noteq> y) (bin_to_bl n v) (bin_to_bl n w) = bin_to_bl n (v XOR w)"
+  unfolding bin_to_bl_def by (simp only: bl_xor_aux_bin map2_Nil)
 
 lemma drop_bin2bl_aux [rule_format] : 
   "ALL m bin bs. drop m (bin_to_bl_aux n bin bs) = 
@@ -615,10 +625,11 @@
 lemma bl_bin_bl_rtf:
   "bin_to_bl n (bl_to_bin bl) = rev (takefill False n (rev bl))"
   by (simp add : takefill_bintrunc)
-  
-lemmas bl_bin_bl_rep_drop = 
-  bl_bin_bl_rtf [simplified takefill_alt,
-                 simplified, simplified rev_take, simplified]
+
+lemma bl_bin_bl_rep_drop:
+  "bin_to_bl n (bl_to_bin bl) =
+    replicate (n - length bl) False @ drop (length bl - n) bl"
+  by (simp add: bl_bin_bl_rtf takefill_alt rev_take)
 
 lemma tf_rev:
   "n + k = m + length bl ==> takefill x m (rev (takefill y n bl)) = 
@@ -660,12 +671,15 @@
     bin_to_bl_aux nv v (bin_to_bl_aux nw w bs)"
   by (induct nw) auto 
 
-lemmas bl_to_bin_aux_alt = 
-  bl_to_bin_aux_cat [where nv = "0" and v = "Int.Pls", 
-    simplified bl_to_bin_def [symmetric], simplified]
+lemma bl_to_bin_aux_alt:
+  "bl_to_bin_aux bs w = bin_cat w (length bs) (bl_to_bin bs)"
+  using bl_to_bin_aux_cat [where nv = "0" and v = "Int.Pls"]
+  unfolding bl_to_bin_def [symmetric] by simp
 
-lemmas bin_to_bl_cat =
-  bin_to_bl_aux_cat [where bs = "[]", folded bin_to_bl_def]
+lemma bin_to_bl_cat:
+  "bin_to_bl (nv + nw) (bin_cat v nw w) =
+    bin_to_bl_aux nv v (bin_to_bl nw w)"
+  unfolding bin_to_bl_def by (simp add: bin_to_bl_aux_cat)
 
 lemmas bl_to_bin_aux_app_cat = 
   trans [OF bl_to_bin_aux_append bl_to_bin_aux_alt]
@@ -673,11 +687,13 @@
 lemmas bin_to_bl_aux_cat_app =
   trans [OF bin_to_bl_aux_cat bin_to_bl_aux_alt]
 
-lemmas bl_to_bin_app_cat = bl_to_bin_aux_app_cat
-  [where w = "Int.Pls", folded bl_to_bin_def]
+lemma bl_to_bin_app_cat:
+  "bl_to_bin (bsa @ bs) = bin_cat (bl_to_bin bsa) (length bs) (bl_to_bin bs)"
+  by (simp only: bl_to_bin_aux_app_cat bl_to_bin_def)
 
-lemmas bin_to_bl_cat_app = bin_to_bl_aux_cat_app
-  [where bs = "[]", folded bin_to_bl_def]
+lemma bin_to_bl_cat_app:
+  "bin_to_bl (n + nw) (bin_cat w nw wa) = bin_to_bl n w @ bin_to_bl nw wa"
+  by (simp only: bin_to_bl_def bin_to_bl_aux_cat_app)
 
 (* bl_to_bin_app_cat_alt and bl_to_bin_app_cat are easily interderivable *)
 lemma bl_to_bin_app_cat_alt: 
@@ -688,10 +704,10 @@
     Int.succ (bl_to_bin (replicate n True))"
   apply (unfold bl_to_bin_def)
   apply (induct n)
-   apply simp
+   apply (simp add: BIT_simps)
   apply (simp only: Suc_eq_plus1 replicate_add
                     append_Cons [symmetric] bl_to_bin_aux_append)
-  apply simp
+  apply (simp add: BIT_simps)
   done
 
 (* function bl_of_nth *)
@@ -725,7 +741,8 @@
   apply simp
   done
 
-lemmas bl_of_nth_nth [simp] = order_refl [THEN bl_of_nth_nth_le, simplified]
+lemma bl_of_nth_nth [simp]: "bl_of_nth (length xs) (op ! (rev xs)) = xs"
+  by (simp add: bl_of_nth_nth_le)
 
 lemma size_rbl_pred: "length (rbl_pred bl) = length bl"
   by (induct bl) auto
@@ -754,7 +771,7 @@
   apply clarsimp
   apply (case_tac bin rule: bin_exhaust)
   apply (case_tac b)
-   apply (clarsimp simp: bin_to_bl_aux_alt)+
+   apply (clarsimp simp: bin_to_bl_aux_alt BIT_simps)+
   done
 
 lemma rbl_succ: 
@@ -764,7 +781,7 @@
   apply clarsimp
   apply (case_tac bin rule: bin_exhaust)
   apply (case_tac b)
-   apply (clarsimp simp: bin_to_bl_aux_alt)+
+   apply (clarsimp simp: bin_to_bl_aux_alt BIT_simps)+
   done
 
 lemma rbl_add: 
@@ -777,7 +794,7 @@
   apply (case_tac binb rule: bin_exhaust)
   apply (case_tac b)
    apply (case_tac [!] "ba")
-     apply (auto simp: rbl_succ succ_def bin_to_bl_aux_alt Let_def add_ac)
+     apply (auto simp: rbl_succ succ_def bin_to_bl_aux_alt Let_def add_ac BIT_simps)
   done
 
 lemma rbl_add_app2: 
@@ -863,8 +880,8 @@
   apply (case_tac binb rule: bin_exhaust)
   apply (case_tac b)
    apply (case_tac [!] "ba")
-     apply (auto simp: bin_to_bl_aux_alt Let_def)
-     apply (auto simp: rbbl_Cons rbl_mult_Suc rbl_add)
+     apply (auto simp: bin_to_bl_aux_alt Let_def BIT_simps)
+     apply (auto simp: rbbl_Cons rbl_mult_Suc rbl_add BIT_simps)
   done
 
 lemma rbl_add_split: 
@@ -927,8 +944,9 @@
 
 lemmas if_simps = if_x_Not if_Not_x if_cancel if_True if_False if_bool_simps
 
-lemmas seqr = eq_reflection [where x = "size w"] for w
+lemmas seqr = eq_reflection [where x = "size w"] for w (* FIXME: delete *)
 
+(* TODO: move name bindings to List.thy *)
 lemmas tl_Nil = tl.simps (1)
 lemmas tl_Cons = tl.simps (2)
 
--- a/src/HOL/Word/Word.thy	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/HOL/Word/Word.thy	Thu Dec 15 09:13:32 2011 +0100
@@ -1255,7 +1255,7 @@
 
 lemma word_sp_01 [simp] : 
   "word_succ -1 = 0 & word_succ 0 = 1 & word_pred 0 = -1 & word_pred 1 = 0"
-  by (unfold word_0_no word_1_no) auto
+  by (unfold word_0_no word_1_no) (auto simp: BIT_simps)
 
 (* alternative approach to lifting arithmetic equalities *)
 lemma word_of_int_Ex:
@@ -2541,10 +2541,10 @@
    apply clarsimp
    apply (case_tac "n")
     apply (clarsimp simp add : word_1_wi [symmetric])
-   apply (clarsimp simp add : word_0_wi [symmetric])
+   apply (clarsimp simp add : word_0_wi [symmetric] BIT_simps)
   apply (drule word_gt_0 [THEN iffD1])
   apply (safe intro!: word_eqI bin_nth_lem ext)
-     apply (auto simp add: test_bit_2p nth_2p_bin word_test_bit_def [symmetric])
+     apply (auto simp add: test_bit_2p nth_2p_bin word_test_bit_def [symmetric] BIT_simps)
   done
 
 lemma word_of_int_2p: "(word_of_int (2 ^ n) :: 'a :: len word) = 2 ^ n" 
@@ -2556,7 +2556,7 @@
    apply (rule box_equals) 
      apply (rule_tac [2] bintr_ariths (1))+ 
    apply (clarsimp simp add : number_of_is_id)
-  apply simp 
+  apply (simp add: BIT_simps)
   done
 
 lemma bang_is_le: "x !! m \<Longrightarrow> 2 ^ m <= (x :: 'a :: len word)" 
@@ -2599,9 +2599,10 @@
   done
 
 lemma shiftl1_0 [simp] : "shiftl1 0 = 0"
-  unfolding word_0_no shiftl1_number by auto
-
-lemmas shiftl1_def_u = shiftl1_def [folded word_number_of_def]
+  unfolding word_0_no shiftl1_number by (auto simp: BIT_simps)
+
+lemma shiftl1_def_u: "shiftl1 w = number_of (uint w BIT 0)"
+  by (simp only: word_number_of_def shiftl1_def)
 
 lemma shiftl1_def_s: "shiftl1 w = number_of (sint w BIT 0)"
   by (rule trans [OF _ shiftl1_number]) simp
@@ -2920,13 +2921,13 @@
 (* note - the following results use 'a :: len word < number_ring *)
 
 lemma shiftl1_2t: "shiftl1 (w :: 'a :: len word) = 2 * w"
-  apply (simp add: shiftl1_def_u)
+  apply (simp add: shiftl1_def_u BIT_simps)
   apply (simp only:  double_number_of_Bit0 [symmetric])
   apply simp
   done
 
 lemma shiftl1_p: "shiftl1 (w :: 'a :: len word) = w + w"
-  apply (simp add: shiftl1_def_u)
+  apply (simp add: shiftl1_def_u BIT_simps)
   apply (simp only: double_number_of_Bit0 [symmetric])
   apply simp
   done
@@ -4599,4 +4600,7 @@
 
 setup {* SMT_Word.setup *}
 
+text {* Legacy simp rules *}
+declare BIT_simps [simp]
+
 end
--- a/src/LCF/IsaMakefile	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/LCF/IsaMakefile	Thu Dec 15 09:13:32 2011 +0100
@@ -8,6 +8,7 @@
 images: LCF
 test: LCF-ex
 all: images test
+full: all
 smlnj: all
 
 
--- a/src/Pure/IsaMakefile	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/Pure/IsaMakefile	Thu Dec 15 09:13:32 2011 +0100
@@ -8,6 +8,7 @@
 images: Pure
 test: RAW
 all: images test
+full: all
 smlnj: all
 
 
--- a/src/Pure/Isar/isar_syn.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/Pure/Isar/isar_syn.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -113,13 +113,11 @@
     (Parse.type_args -- Parse.binding -- Parse.opt_mixfix
       >> (fn ((args, a), mx) => Typedecl.typedecl (a, map (rpair dummyS) args, mx) #> snd));
 
-val type_abbrev =
-  Parse.type_args -- Parse.binding --
-    (Parse.$$$ "=" |-- Parse.!!! (Parse.typ -- Parse.opt_mixfix'));
-
 val _ =
   Outer_Syntax.local_theory "type_synonym" "declare type abbreviation" Keyword.thy_decl
-    (type_abbrev >> (fn ((args, a), (rhs, mx)) => snd o Typedecl.abbrev_cmd (a, args, mx) rhs));
+    (Parse.type_args -- Parse.binding --
+      (Parse.$$$ "=" |-- Parse.!!! (Parse.typ -- Parse.opt_mixfix'))
+      >> (fn ((args, a), (rhs, mx)) => snd o Typedecl.abbrev_cmd (a, args, mx) rhs));
 
 val _ =
   Outer_Syntax.command "nonterminal"
--- a/src/Sequents/IsaMakefile	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/Sequents/IsaMakefile	Thu Dec 15 09:13:32 2011 +0100
@@ -8,6 +8,7 @@
 images: Sequents
 test: Sequents-LK
 all: images test
+full: all
 smlnj: all
 
 
--- a/src/Tools/WWW_Find/IsaMakefile	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/Tools/WWW_Find/IsaMakefile	Thu Dec 15 09:13:32 2011 +0100
@@ -9,6 +9,7 @@
 images:
 test: Pure-WWW_Find
 all: images test
+full: all
 smlnj: all
 
 
--- a/src/Tools/misc_legacy.ML	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/Tools/misc_legacy.ML	Thu Dec 15 09:13:32 2011 +0100
@@ -22,7 +22,6 @@
   val term_frees: term -> term list
   val mk_defpair: term * term -> string * term
   val get_def: theory -> xstring -> thm
-  val simple_read_term: theory -> typ -> string -> term
   val METAHYPS: (thm list -> tactic) -> int -> tactic
 end;
 
@@ -104,15 +103,6 @@
 fun get_def thy = Thm.axiom thy o Name_Space.intern (Theory.axiom_space thy) o Thm.def_name;
 
 
-fun simple_read_term thy T s =
-  let
-    val ctxt = Proof_Context.init_global thy
-      |> Proof_Context.allow_dummies
-      |> Proof_Context.set_mode Proof_Context.mode_schematic;
-    val parse = if T = propT then Syntax.parse_prop else Syntax.parse_term;
-  in parse ctxt s |> Type.constraint T |> Syntax.check_term ctxt end;
-
-
 (**** METAHYPS -- tactical for using hypotheses as meta-level assumptions
        METAHYPS (fn prems => tac prems) i
 
--- a/src/ZF/IsaMakefile	Thu Dec 15 09:13:23 2011 +0100
+++ b/src/ZF/IsaMakefile	Thu Dec 15 09:13:32 2011 +0100
@@ -10,6 +10,7 @@
 #Note: keep targets sorted
 test: ZF-AC ZF-Coind ZF-Constructible ZF-IMP ZF-Induct ZF-Resid ZF-UNITY ZF-ex
 all: images test
+full: all
 smlnj: all