# HG changeset patch # User wenzelm # Date 1519585505 -3600 # Node ID 0cd2fd0c2dcfd49d1e3da7f87d985c13f000ed2e # Parent b342f96e47b5380ddc8a0fc1d5e338c9392828e9# Parent e6cd1fd4eb1980480703f7a2205d7cc827ff6924 merged diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Doc/Isar_Ref/Spec.thy --- a/src/Doc/Isar_Ref/Spec.thy Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Doc/Isar_Ref/Spec.thy Sun Feb 25 20:05:05 2018 +0100 @@ -66,7 +66,7 @@ ; keyword_decls: (@{syntax string} +) ('::' @{syntax name} @{syntax tags})? ; - abbrevs: @'abbrevs' ((text '=' text) + @'and') + abbrevs: @'abbrevs' (((text+) '=' (text+)) + @'and') ; @@{command thy_deps} (thy_bounds thy_bounds?)? ; diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/HOL/Library/Simps_Case_Conv.thy --- a/src/HOL/Library/Simps_Case_Conv.thy Sun Feb 25 12:55:07 2018 +0000 +++ b/src/HOL/Library/Simps_Case_Conv.thy Sun Feb 25 20:05:05 2018 +0100 @@ -5,8 +5,7 @@ theory Simps_Case_Conv imports Main keywords "simps_of_case" "case_of_simps" :: thy_decl - abbrevs "simps_of_case" = "" - and "case_of_simps" = "" + abbrevs "simps_of_case" "case_of_simps" = "" begin ML_file "simps_case_conv.ML" diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/HOL/Tools/Metis/metis_reconstruct.ML --- a/src/HOL/Tools/Metis/metis_reconstruct.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/HOL/Tools/Metis/metis_reconstruct.ML Sun Feb 25 20:05:05 2018 +0100 @@ -472,7 +472,7 @@ one of the premises. Unfortunately, this sometimes yields "Variable has two distinct types" errors. To avoid this, we instantiate the variables before applying "assume_tac". Typical constraints are of the form - ?SK_a_b_c_x SK_d_e_f_y ... SK_a_b_c_x ... SK_g_h_i_z =?= SK_a_b_c_x, + ?SK_a_b_c_x SK_d_e_f_y ... SK_a_b_c_x ... SK_g_h_i_z \\<^sup>? SK_a_b_c_x, where the nonvariables are goal parameters. *) fun unify_first_prem_with_concl ctxt i th = let diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/HOL/Transitive_Closure.thy --- a/src/HOL/Transitive_Closure.thy Sun Feb 25 12:55:07 2018 +0000 +++ b/src/HOL/Transitive_Closure.thy Sun Feb 25 20:05:05 2018 +0100 @@ -7,6 +7,9 @@ theory Transitive_Closure imports Relation + abbrevs "^*" = "\<^sup>*" "\<^sup>*\<^sup>*" + and "^+" = "\<^sup>+" "\<^sup>+\<^sup>+" + and "^=" = "\<^sup>=" "\<^sup>=\<^sup>=" begin ML_file "~~/src/Provers/trancl.ML" diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/Isar/local_defs.ML --- a/src/Pure/Isar/local_defs.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/Isar/local_defs.ML Sun Feb 25 20:05:05 2018 +0100 @@ -82,7 +82,7 @@ (* - [x, x == a] + [x, x \ a] : B x ----------- @@ -133,7 +133,7 @@ (* specific export -- result based on educated guessing *) (* - [xs, xs == as] + [xs, xs \ as] : B xs -------------- @@ -158,11 +158,11 @@ in (apply2 (map #1) (List.partition #2 defs_asms), Assumption.export false inner outer th) end; (* - [xs, xs == as] + [xs, xs \ as] : TERM b xs -------------- and -------------- - TERM b as b xs == b as + TERM b as b xs \ b as *) fun export_cterm inner outer ct = export inner outer (Drule.mk_term ct) ||> Drule.dest_term; diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/Isar/obtain.ML --- a/src/Pure/Isar/obtain.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/Isar/obtain.ML Sun Feb 25 20:05:05 2018 +0100 @@ -144,11 +144,11 @@ (** consider: generalized elimination and cases rule **) (* - consider (a) x where "A x" | (b) y where "B y" | ... == + consider (a) x where "A x" | (b) y where "B y" | ... \ have thesis - if a [intro?]: "!!x. A x ==> thesis" - and b [intro?]: "!!y. B y ==> thesis" + if a [intro?]: "\x. A x \ thesis" + and b [intro?]: "\y. B y \ thesis" and ... for thesis apply (insert that) @@ -185,9 +185,9 @@ (** obtain: augmented context based on generalized existence rule **) (* - obtain (a) x where "A x" == + obtain (a) x where "A x" \ - have thesis if a [intro?]: "!!x. A x ==> thesis" for thesis + have thesis if a [intro?]: "\x. A x \ thesis" for thesis apply (insert that) fix x assm <> "A x" @@ -279,15 +279,15 @@ (* - guess x == + guess x \ { fix thesis have "PROP ?guess" - apply magic -- {* turn goal into "thesis ==> #thesis" *} + apply magic \ \turn goal into \thesis \ #thesis\\ - apply_end magic -- {* turn final "(!!x. P x ==> thesis) ==> #thesis" into - "#((!!x. A x ==> thesis) ==> thesis)" which is a finished goal state *} + apply_end magic \ \turn final \(\x. P x \ thesis) \ #thesis\ into\ + \ \\#((\x. A x \ thesis) \ thesis)\ which is a finished goal state\ } fix x assm <> "A x" diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/Isar/proof.ML --- a/src/Pure/Isar/proof.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/Isar/proof.ML Sun Feb 25 20:05:05 2018 +0100 @@ -171,7 +171,7 @@ {statement: (string * Position.T) * term list list * term, (*goal kind and statement (starting with vars), initial proposition*) using: thm list, (*goal facts*) - goal: thm, (*subgoals ==> statement*) + goal: thm, (*subgoals \ statement*) before_qed: Method.text option, after_qed: (context * thm list list -> state -> state) * diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/Isar/subgoal.ML --- a/src/Pure/Isar/subgoal.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/Isar/subgoal.ML Sun Feb 25 20:05:05 2018 +0100 @@ -108,9 +108,9 @@ (* [x, A x] : - B x ==> C + B x \ C ------------------ - [!!x. A x ==> B x] + [\x. A x \ B x] : C *) diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/Proof/extraction.ML --- a/src/Pure/Proof/extraction.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/Proof/extraction.ML Sun Feb 25 20:05:05 2018 +0100 @@ -40,10 +40,10 @@ [(Binding.make ("Type", \<^here>), 0, NoSyn), (Binding.make ("Null", \<^here>), 0, NoSyn)] #> Sign.add_consts - [(Binding.make ("typeof", \<^here>), typ "'b => Type", NoSyn), - (Binding.make ("Type", \<^here>), typ "'a itself => Type", NoSyn), + [(Binding.make ("typeof", \<^here>), typ "'b \ Type", NoSyn), + (Binding.make ("Type", \<^here>), typ "'a itself \ Type", NoSyn), (Binding.make ("Null", \<^here>), typ "Null", NoSyn), - (Binding.make ("realizes", \<^here>), typ "'a => 'b => 'b", NoSyn)]; + (Binding.make ("realizes", \<^here>), typ "'a \ 'b \ 'b", NoSyn)]; val nullT = Type ("Null", []); val nullt = Const ("Null", nullT); @@ -430,45 +430,45 @@ (add_types [("prop", ([], NONE))] #> add_typeof_eqns - ["(typeof (PROP P)) == (Type (TYPE(Null))) ==> \ - \ (typeof (PROP Q)) == (Type (TYPE('Q))) ==> \ - \ (typeof (PROP P ==> PROP Q)) == (Type (TYPE('Q)))", + ["(typeof (PROP P)) \ (Type (TYPE(Null))) \ \ + \ (typeof (PROP Q)) \ (Type (TYPE('Q))) \ \ + \ (typeof (PROP P \ PROP Q)) \ (Type (TYPE('Q)))", - "(typeof (PROP Q)) == (Type (TYPE(Null))) ==> \ - \ (typeof (PROP P ==> PROP Q)) == (Type (TYPE(Null)))", + "(typeof (PROP Q)) \ (Type (TYPE(Null))) \ \ + \ (typeof (PROP P \ PROP Q)) \ (Type (TYPE(Null)))", - "(typeof (PROP P)) == (Type (TYPE('P))) ==> \ - \ (typeof (PROP Q)) == (Type (TYPE('Q))) ==> \ - \ (typeof (PROP P ==> PROP Q)) == (Type (TYPE('P => 'Q)))", + "(typeof (PROP P)) \ (Type (TYPE('P))) \ \ + \ (typeof (PROP Q)) \ (Type (TYPE('Q))) \ \ + \ (typeof (PROP P \ PROP Q)) \ (Type (TYPE('P \ 'Q)))", - "(%x. typeof (PROP P (x))) == (%x. Type (TYPE(Null))) ==> \ - \ (typeof (!!x. PROP P (x))) == (Type (TYPE(Null)))", + "(\x. typeof (PROP P (x))) \ (\x. Type (TYPE(Null))) \ \ + \ (typeof (\x. PROP P (x))) \ (Type (TYPE(Null)))", - "(%x. typeof (PROP P (x))) == (%x. Type (TYPE('P))) ==> \ - \ (typeof (!!x::'a. PROP P (x))) == (Type (TYPE('a => 'P)))", + "(\x. typeof (PROP P (x))) \ (\x. Type (TYPE('P))) \ \ + \ (typeof (\x::'a. PROP P (x))) \ (Type (TYPE('a \ 'P)))", - "(%x. typeof (f (x))) == (%x. Type (TYPE('f))) ==> \ - \ (typeof (f)) == (Type (TYPE('f)))"] #> + "(\x. typeof (f (x))) \ (\x. Type (TYPE('f))) \ \ + \ (typeof (f)) \ (Type (TYPE('f)))"] #> add_realizes_eqns - ["(typeof (PROP P)) == (Type (TYPE(Null))) ==> \ - \ (realizes (r) (PROP P ==> PROP Q)) == \ - \ (PROP realizes (Null) (PROP P) ==> PROP realizes (r) (PROP Q))", + ["(typeof (PROP P)) \ (Type (TYPE(Null))) \ \ + \ (realizes (r) (PROP P \ PROP Q)) \ \ + \ (PROP realizes (Null) (PROP P) \ PROP realizes (r) (PROP Q))", - "(typeof (PROP P)) == (Type (TYPE('P))) ==> \ - \ (typeof (PROP Q)) == (Type (TYPE(Null))) ==> \ - \ (realizes (r) (PROP P ==> PROP Q)) == \ - \ (!!x::'P. PROP realizes (x) (PROP P) ==> PROP realizes (Null) (PROP Q))", + "(typeof (PROP P)) \ (Type (TYPE('P))) \ \ + \ (typeof (PROP Q)) \ (Type (TYPE(Null))) \ \ + \ (realizes (r) (PROP P \ PROP Q)) \ \ + \ (\x::'P. PROP realizes (x) (PROP P) \ PROP realizes (Null) (PROP Q))", - "(realizes (r) (PROP P ==> PROP Q)) == \ - \ (!!x. PROP realizes (x) (PROP P) ==> PROP realizes (r (x)) (PROP Q))", + "(realizes (r) (PROP P \ PROP Q)) \ \ + \ (\x. PROP realizes (x) (PROP P) \ PROP realizes (r (x)) (PROP Q))", - "(%x. typeof (PROP P (x))) == (%x. Type (TYPE(Null))) ==> \ - \ (realizes (r) (!!x. PROP P (x))) == \ - \ (!!x. PROP realizes (Null) (PROP P (x)))", + "(\x. typeof (PROP P (x))) \ (\x. Type (TYPE(Null))) \ \ + \ (realizes (r) (\x. PROP P (x))) \ \ + \ (\x. PROP realizes (Null) (PROP P (x)))", - "(realizes (r) (!!x. PROP P (x))) == \ - \ (!!x. PROP realizes (r (x)) (PROP P (x)))"] #> + "(realizes (r) (\x. PROP P (x))) \ \ + \ (\x. PROP realizes (r (x)) (PROP P (x)))"] #> Attrib.setup \<^binding>\extraction_expand\ (Scan.succeed (extraction_expand false)) "specify theorems to be expanded during extraction" #> diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/Pure.thy --- a/src/Pure/Pure.thy Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/Pure.thy Sun Feb 25 20:05:05 2018 +0100 @@ -94,15 +94,9 @@ and "named_theorems" :: thy_decl abbrevs "===>" = "===>" (*prevent replacement of very long arrows*) and "--->" = "\\" - and "default_sort" = "" - and "simproc_setup" = "" - and "hence" = "" + and "hence" "thus" "default_sort" "simproc_setup" "apply_end" "realizers" "realizability" = "" and "hence" = "then have" - and "thus" = "" and "thus" = "then show" - and "apply_end" = "" - and "realizers" = "" - and "realizability" = "" begin section \Isar commands\ diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/Syntax/simple_syntax.ML --- a/src/Pure/Syntax/simple_syntax.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/Syntax/simple_syntax.ML Sun Feb 25 20:05:05 2018 +0100 @@ -17,7 +17,7 @@ (* scanning tokens *) val lexicon = Scan.make_lexicon - (map Symbol.explode ["!!", "%", "(", ")", ".", "::", "==", "==>", "=>", "&&&", "CONST"]); + (map Symbol.explode ["\", "\", "(", ")", ".", "::", "\", "\", "\", "&&&", "CONST"]); fun read scan s = (case @@ -53,7 +53,7 @@ (* types *) (* - typ = typ1 => ... => typ1 + typ = typ1 \ ... \ typ1 | typ1 typ1 = typ2 const ... const | typ2 @@ -63,7 +63,7 @@ *) fun typ x = - (enum1 "=>" typ1 >> (op ---> o split_last)) x + (enum1 "\" typ1 >> (op ---> o split_last)) x and typ1 x = (typ2 -- Scan.repeat const >> (fn (T, cs) => fold (fn c => fn U => Type (c, [U])) cs T)) x and typ2 x = @@ -77,17 +77,17 @@ (* terms *) (* - term = !!ident :: typ. term + term = \ident :: typ. term | term1 - term1 = term2 ==> ... ==> term2 + term1 = term2 \ ... \ term2 | term2 - term2 = term3 == term2 + term2 = term3 \ term2 | term3 &&& term2 | term3 term3 = ident :: typ | var :: typ | CONST const :: typ - | %ident :: typ. term3 + | \ident :: typ. term3 | term4 term4 = term5 ... term5 | term5 @@ -104,23 +104,23 @@ val bind = idt --| $$ "."; fun term env T x = - ($$ "!!" |-- bind :|-- (fn v => term (v :: env) propT >> (Logic.all (Free v))) || + ($$ "\" |-- bind :|-- (fn v => term (v :: env) propT >> (Logic.all (Free v))) || term1 env T) x and term1 env T x = - (enum2 "==>" (term2 env propT) >> foldr1 Logic.mk_implies || + (enum2 "\" (term2 env propT) >> foldr1 Logic.mk_implies || term2 env T) x and term2 env T x = (equal env || term3 env propT -- ($$ "&&&" |-- term2 env propT) >> Logic.mk_conjunction || term3 env T) x and equal env x = - (term3 env dummyT -- ($$ "==" |-- term2 env dummyT) >> (fn (t, u) => + (term3 env dummyT -- ($$ "\" |-- term2 env dummyT) >> (fn (t, u) => Const ("Pure.eq", Term.fastype_of t --> Term.fastype_of u --> propT) $ t $ u)) x and term3 env T x = (idt >> Free || var -- constraint >> Var || $$ "CONST" |-- const -- constraint >> Const || - $$ "%" |-- bind :|-- (fn v => term3 (v :: env) dummyT >> lambda (Free v)) || + $$ "\" |-- bind :|-- (fn v => term3 (v :: env) dummyT >> lambda (Free v)) || term4 env T) x and term4 env T x = (term5 env dummyT -- Scan.repeat1 (term5 env dummyT) >> Term.list_comb || @@ -132,7 +132,10 @@ $$ "(" |-- term env T --| $$ ")") x; fun read_tm T s = - let val t = read (term [] T) s in + let + val t = read (term [] T) s + handle ERROR msg => cat_error ("Malformed input " ^ quote s) msg; + in if can (Term.map_types Term.no_dummyT) t then t else error ("Unspecified types in input: " ^ quote s) end; diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/Thy/thy_header.ML --- a/src/Pure/Thy/thy_header.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/Thy/thy_header.ML Sun Feb 25 20:05:05 2018 +0100 @@ -144,7 +144,10 @@ Scan.optional (Parse.$$$ "::" |-- Parse.!!! keyword_spec) Keyword.no_spec >> (fn (names, spec) => map (rpair spec) names); -val abbrevs = Parse.and_list1 (Parse.text -- (Parse.$$$ "=" |-- Parse.!!! Parse.text)); +val abbrevs = + Parse.and_list1 + (Scan.repeat1 Parse.text -- (Parse.$$$ "=" |-- Parse.!!! (Scan.repeat1 Parse.text)) + >> uncurry (map_product pair)) >> flat; val keyword_decls = Parse.and_list1 keyword_decl >> flat; diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/Thy/thy_header.scala --- a/src/Pure/Thy/thy_header.scala Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/Thy/thy_header.scala Sun Feb 25 20:05:05 2018 +0100 @@ -138,7 +138,8 @@ { case xs ~ yss => (xs :: yss).flatten } val abbrevs = - rep1sep(text ~ ($$$("=") ~! text) ^^ { case a ~ (_ ~ b) => (a, b) }, $$$("and")) + rep1sep(rep1(text) ~ ($$$("=") ~! rep1(text)), $$$("and")) ^^ + { case res => for ((as ~ (_ ~ bs)) <- res; a <- as; b <- bs) yield (a, b) } val args = position(theory_name) ~ diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/Tools/find_theorems.ML --- a/src/Pure/Tools/find_theorems.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/Tools/find_theorems.ML Sun Feb 25 20:05:05 2018 +0100 @@ -245,7 +245,7 @@ constants that may be subject to beta-reduction after substitution of frees should not be included for LHS set because they could be thrown away by the substituted function. E.g. for (?F 1 2) do not - include 1 or 2, if it were possible for ?F to be (%x y. 3). The + include 1 or 2, if it were possible for ?F to be (\x y. 3). The largest possible set should always be included on the RHS.*) fun filter_pattern ctxt pat = diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/assumption.ML --- a/src/Pure/assumption.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/assumption.ML Sun Feb 25 20:05:05 2018 +0100 @@ -34,7 +34,7 @@ : B -------- - #A ==> B + #A \ B *) fun assume_export is_goal asms = (if is_goal then Drule.implies_intr_protected asms else Drule.implies_intr_list asms, fn t => t); @@ -44,7 +44,7 @@ : B ------- - A ==> B + A \ B *) fun presume_export _ = assume_export false; @@ -60,7 +60,7 @@ (** local context data **) datatype data = Data of - {assms: (export * cterm list) list, (*assumes: A ==> _*) + {assms: (export * cterm list) list, (*assumes: A \ _*) prems: thm list}; (*prems: A |- norm_hhf A*) fun make_data (assms, prems) = Data {assms = assms, prems = prems}; diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/conjunction.ML --- a/src/Pure/conjunction.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/conjunction.ML Sun Feb 25 20:05:05 2018 +0100 @@ -72,7 +72,7 @@ val A = read_prop "A" and vA = (("A", 0), propT); val B = read_prop "B" and vB = (("B", 0), propT); val C = read_prop "C"; -val ABC = read_prop "A ==> B ==> C"; +val ABC = read_prop "A \ B \ C"; val A_B = read_prop "A &&& B"; val conjunction_def = @@ -155,9 +155,9 @@ in (* - A1 &&& ... &&& An ==> B + A1 &&& ... &&& An \ B ----------------------- - A1 ==> ... ==> An ==> B + A1 \ ... \ An \ B *) fun curry_balanced n th = if n < 2 then th @@ -172,9 +172,9 @@ end; (* - A1 ==> ... ==> An ==> B + A1 \ ... \ An \ B ----------------------- - A1 &&& ... &&& An ==> B + A1 &&& ... &&& An \ B *) fun uncurry_balanced n th = if n < 2 then th diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/conv.ML --- a/src/Pure/conv.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/conv.ML Sun Feb 25 20:05:05 2018 +0100 @@ -176,20 +176,20 @@ (* conversions on HHF rules *) -(*rewrite B in !!x1 ... xn. B*) +(*rewrite B in \x1 ... xn. B*) fun params_conv n cv ctxt ct = if n <> 0 andalso Logic.is_all (Thm.term_of ct) then arg_conv (abs_conv (params_conv (n - 1) cv o #2) ctxt) ct else cv ctxt ct; -(*rewrite the A's in A1 ==> ... ==> An ==> B*) +(*rewrite the A's in A1 \ ... \ An \ B*) fun prems_conv 0 _ ct = all_conv ct | prems_conv n cv ct = (case try Thm.dest_implies ct of NONE => all_conv ct | SOME (A, B) => Drule.imp_cong_rule (cv A) (prems_conv (n - 1) cv B)); -(*rewrite B in A1 ==> ... ==> An ==> B*) +(*rewrite B in A1 \ ... \ An \ B*) fun concl_conv 0 cv ct = cv ct | concl_conv n cv ct = (case try Thm.dest_implies ct of diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/drule.ML --- a/src/Pure/drule.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/drule.ML Sun Feb 25 20:05:05 2018 +0100 @@ -119,13 +119,13 @@ (** some cterm->cterm operations: faster than calling cterm_of! **) -(* A1==>...An==>B goes to [A1,...,An], where B is not an implication *) +(* A1\...An\B goes to [A1,...,An], where B is not an implication *) fun strip_imp_prems ct = let val (cA, cB) = Thm.dest_implies ct in cA :: strip_imp_prems cB end handle TERM _ => []; -(* A1==>...An==>B goes to B, where B is not an implication *) +(* A1\...An\B goes to B, where B is not an implication *) fun strip_imp_concl ct = (case Thm.term_of ct of Const ("Pure.imp", _) $ _ $ _ => strip_imp_concl (Thm.dest_arg ct) @@ -139,7 +139,7 @@ val implies = certify Logic.implies; fun mk_implies (A, B) = Thm.apply (Thm.apply implies A) B; -(*cterm version of list_implies: [A1,...,An], B goes to [|A1;==>;An|]==>B *) +(*cterm version of list_implies: [A1,...,An], B goes to \A1;...;An\\B *) fun list_implies([], B) = B | list_implies(A::AS, B) = mk_implies (A, list_implies(AS,B)); @@ -210,10 +210,10 @@ (*specialization over a list of cterms*) val forall_elim_list = fold Thm.forall_elim; -(*maps A1,...,An |- B to [| A1;...;An |] ==> B*) +(*maps A1,...,An |- B to \A1;...;An\ \ B*) val implies_intr_list = fold_rev Thm.implies_intr; -(*maps [| A1;...;An |] ==> B and [A1,...,An] to B*) +(*maps \A1;...;An\ \ B and [A1,...,An] to B*) fun implies_elim_list impth ths = fold Thm.elim_implies ths impth; (*Reset Var indexes to zero, renaming to preserve distinctness*) @@ -309,7 +309,7 @@ | ([], _) => raise THM ("RSN: no unifiers", i, [tha, thb]) | _ => raise THM ("RSN: multiple unifiers", i, [tha, thb])); -(*Resolution: P==>Q, Q==>R gives P==>R*) +(*Resolution: P \ Q, Q \ R gives P \ R*) fun tha RS thb = tha RSN (1,thb); (*For joining lists of rules*) @@ -332,8 +332,8 @@ makes proof trees*) fun rls MRS bottom_rl = bottom_rl OF rls; -(*compose Q and [...,Qi,Q(i+1),...]==>R to [...,Q(i+1),...]==>R - with no lifting or renaming! Q may contain ==> or meta-quants +(*compose Q and \...,Qi,Q(i+1),...\ \ R to \...,Q(i+1),...\ \ R + with no lifting or renaming! Q may contain \ or meta-quantifiers ALWAYS deletes premise i *) fun compose (tha, i, thb) = Thm.bicompose NONE {flatten = true, match = false, incremented = false} (false, tha, 0) i thb @@ -367,14 +367,14 @@ val symmetric_thm = let - val xy = read_prop "x::'a == y::'a"; + val xy = read_prop "x::'a \ y::'a"; val thm = Thm.implies_intr xy (Thm.symmetric (Thm.assume xy)); in store_standard_thm_open (Binding.make ("symmetric", \<^here>)) thm end; val transitive_thm = let - val xy = read_prop "x::'a == y::'a"; - val yz = read_prop "y::'a == z::'a"; + val xy = read_prop "x::'a \ y::'a"; + val yz = read_prop "y::'a \ z::'a"; val xythm = Thm.assume xy; val yzthm = Thm.assume yz; val thm = Thm.implies_intr yz (Thm.transitive xythm yzthm); @@ -387,13 +387,13 @@ val equals_cong = store_standard_thm_open (Binding.make ("equals_cong", \<^here>)) - (Thm.reflexive (read_prop "x::'a == y::'a")); + (Thm.reflexive (read_prop "x::'a \ y::'a")); val imp_cong = let - val ABC = read_prop "A ==> B::prop == C::prop" - val AB = read_prop "A ==> B" - val AC = read_prop "A ==> C" + val ABC = read_prop "A \ B::prop \ C::prop" + val AB = read_prop "A \ B" + val AC = read_prop "A \ C" val A = read_prop "A" in store_standard_thm_open (Binding.make ("imp_cong", \<^here>)) @@ -408,8 +408,8 @@ val swap_prems_eq = let - val ABC = read_prop "A ==> B ==> C" - val BAC = read_prop "B ==> A ==> C" + val ABC = read_prop "A \ B \ C" + val BAC = read_prop "B \ A \ C" val A = read_prop "A" val B = read_prop "B" in @@ -439,9 +439,9 @@ (* abs_def *) (* - f ?x1 ... ?xn == u + f ?x1 ... ?xn \ u -------------------- - f == %x1 ... xn. u + f \ \x1 ... xn. u *) local @@ -476,17 +476,17 @@ store_standard_thm_open (Binding.make ("asm_rl", \<^here>)) (Thm.trivial (read_prop "?psi")); -(*Meta-level cut rule: [| V==>W; V |] ==> W *) +(*Meta-level cut rule: \V \ W; V\ \ W *) val cut_rl = store_standard_thm_open (Binding.make ("cut_rl", \<^here>)) - (Thm.trivial (read_prop "?psi ==> ?theta")); + (Thm.trivial (read_prop "?psi \ ?theta")); (*Generalized elim rule for one conclusion; cut_rl with reversed premises: - [| PROP V; PROP V ==> PROP W |] ==> PROP W *) + \PROP V; PROP V \ PROP W\ \ PROP W *) val revcut_rl = let val V = read_prop "V"; - val VW = read_prop "V ==> W"; + val VW = read_prop "V \ W"; in store_standard_thm_open (Binding.make ("revcut_rl", \<^here>)) (Thm.implies_intr V @@ -501,11 +501,11 @@ val thm = Thm.implies_intr V (Thm.implies_intr W (Thm.assume W)); in store_standard_thm_open (Binding.make ("thin_rl", \<^here>)) thm end; -(* (!!x. PROP ?V) == PROP ?V Allows removal of redundant parameters*) +(* (\x. PROP ?V) \ PROP ?V Allows removal of redundant parameters*) val triv_forall_equality = let val V = read_prop "V"; - val QV = read_prop "!!x::'a. V"; + val QV = read_prop "\x::'a. V"; val x = certify (Free ("x", Term.aT [])); in store_standard_thm_open (Binding.make ("triv_forall_equality", \<^here>)) @@ -513,12 +513,12 @@ (Thm.implies_intr V (Thm.forall_intr x (Thm.assume V)))) end; -(* (PROP ?Phi ==> PROP ?Phi ==> PROP ?Psi) ==> - (PROP ?Phi ==> PROP ?Psi) +(* (PROP ?Phi \ PROP ?Phi \ PROP ?Psi) \ + (PROP ?Phi \ PROP ?Psi) *) val distinct_prems_rl = let - val AAB = read_prop "Phi ==> Phi ==> Psi"; + val AAB = read_prop "Phi \ Phi \ Psi"; val A = read_prop "Phi"; in store_standard_thm_open (Binding.make ("distinct_prems_rl", \<^here>)) @@ -526,36 +526,36 @@ (implies_elim_list (Thm.assume AAB) [Thm.assume A, Thm.assume A])) end; -(* [| PROP ?phi ==> PROP ?psi; PROP ?psi ==> PROP ?phi |] - ==> PROP ?phi == PROP ?psi - Introduction rule for == as a meta-theorem. +(* \PROP ?phi \ PROP ?psi; PROP ?psi \ PROP ?phi\ + \ PROP ?phi \ PROP ?psi + Introduction rule for \ as a meta-theorem. *) val equal_intr_rule = let - val PQ = read_prop "phi ==> psi"; - val QP = read_prop "psi ==> phi"; + val PQ = read_prop "phi \ psi"; + val QP = read_prop "psi \ phi"; in store_standard_thm_open (Binding.make ("equal_intr_rule", \<^here>)) (Thm.implies_intr PQ (Thm.implies_intr QP (Thm.equal_intr (Thm.assume PQ) (Thm.assume QP)))) end; -(* PROP ?phi == PROP ?psi ==> PROP ?phi ==> PROP ?psi *) +(* PROP ?phi \ PROP ?psi \ PROP ?phi \ PROP ?psi *) val equal_elim_rule1 = let - val eq = read_prop "phi::prop == psi::prop"; + val eq = read_prop "phi::prop \ psi::prop"; val P = read_prop "phi"; in store_standard_thm_open (Binding.make ("equal_elim_rule1", \<^here>)) (Thm.equal_elim (Thm.assume eq) (Thm.assume P) |> implies_intr_list [eq, P]) end; -(* PROP ?psi == PROP ?phi ==> PROP ?phi ==> PROP ?psi *) +(* PROP ?psi \ PROP ?phi \ PROP ?phi \ PROP ?psi *) val equal_elim_rule2 = store_standard_thm_open (Binding.make ("equal_elim_rule2", \<^here>)) (symmetric_thm RS equal_elim_rule1); -(* PROP ?phi ==> PROP ?phi ==> PROP ?psi ==> PROP ?psi *) +(* PROP ?phi \ PROP ?phi \ PROP ?psi \ PROP ?psi *) val remdups_rl = let val P = read_prop "phi"; @@ -652,7 +652,7 @@ (* HHF normalization *) -(* (PROP ?phi ==> (!!x. PROP ?psi x)) == (!!x. PROP ?phi ==> PROP ?psi x) *) +(* (PROP ?phi \ (\x. PROP ?psi x)) \ (\x. PROP ?phi \ PROP ?psi x) *) val norm_hhf_eq = let val aT = TFree ("'a", []); @@ -710,7 +710,7 @@ local -(*compose Q and [Q1,Q2,...,Qk]==>R to [Q2,...,Qk]==>R getting unique result*) +(*compose Q and \Q1,Q2,...,Qk\ \ R to \Q2,...,Qk\ \ R getting unique result*) fun comp incremented th1 th2 = Thm.bicompose NONE {flatten = true, match = false, incremented = incremented} (false, th1, 0) 1 th2 diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/goal.ML --- a/src/Pure/goal.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/goal.ML Sun Feb 25 20:05:05 2018 +0100 @@ -60,21 +60,21 @@ (* -------- (init) - C ==> #C + C \ #C *) fun init C = Thm.instantiate ([], [((("A", 0), propT), C)]) Drule.protectI; (* - A1 ==> ... ==> An ==> C + A1 \ ... \ An \ C ------------------------ (protect n) - A1 ==> ... ==> An ==> #C + A1 \ ... \ An \ #C *) fun protect n th = Drule.comp_no_flatten (th, n) 1 Drule.protectI; (* - A ==> ... ==> #C + A \ ... \ #C ---------------- (conclude) - A ==> ... ==> C + A \ ... \ C *) fun conclude th = Drule.comp_no_flatten (th, Thm.nprems_of th) 1 Drule.protectD; diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/logic.ML --- a/src/Pure/logic.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/logic.ML Sun Feb 25 20:05:05 2018 +0100 @@ -173,19 +173,19 @@ (** nested implications **) -(* [A1,...,An], B goes to A1==>...An==>B *) +(* [A1,...,An], B goes to A1\...An\B *) fun list_implies ([], B) = B | list_implies (A::As, B) = implies $ A $ list_implies(As,B); -(* A1==>...An==>B goes to [A1,...,An], where B is not an implication *) +(* A1\...An\B goes to [A1,...,An], where B is not an implication *) fun strip_imp_prems (Const("Pure.imp", _) $ A $ B) = A :: strip_imp_prems B | strip_imp_prems _ = []; -(* A1==>...An==>B goes to B, where B is not an implication *) +(* A1\...An\B goes to B, where B is not an implication *) fun strip_imp_concl (Const("Pure.imp", _) $ A $ B) = strip_imp_concl B | strip_imp_concl A = A : term; -(*Strip and return premises: (i, [], A1==>...Ai==>B) +(*Strip and return premises: (i, [], A1\...Ai\B) goes to ([Ai, A(i-1),...,A1] , B) (REVERSED) if i<0 or else i too big then raises TERM*) fun strip_prems (0, As, B) = (As, B) @@ -197,13 +197,13 @@ fun count_prems (Const ("Pure.imp", _) $ _ $ B) = 1 + count_prems B | count_prems _ = 0; -(*Select Ai from A1 ==>...Ai==>B*) +(*Select Ai from A1\...Ai\B*) fun nth_prem (1, Const ("Pure.imp", _) $ A $ _) = A | nth_prem (i, Const ("Pure.imp", _) $ _ $ B) = nth_prem (i - 1, B) | nth_prem (_, A) = raise TERM ("nth_prem", [A]); (*strip a proof state (Horn clause): - B1 ==> ... Bn ==> C goes to ([B1, ..., Bn], C) *) + B1 \ ... Bn \ C goes to ([B1, ..., Bn], C) *) fun strip_horn A = (strip_imp_prems A, strip_imp_concl A); @@ -406,7 +406,7 @@ fun combound (t, n, k) = if k>0 then combound (t,n+1,k-1) $ (Bound n) else t; -(* ([xn,...,x1], t) ======> (x1,...,xn)t *) +(* ([xn,...,x1], t) goes to \x1 ... xn. t *) fun rlist_abs ([], body) = body | rlist_abs ((a,T)::pairs, body) = rlist_abs(pairs, Abs(a, T, body)); @@ -529,11 +529,11 @@ HS = [Hn,...,H1], params = [xm,...,x1], and B, where x1...xm are the parameters. This version (21.1.2005) REQUIRES the the parameters to be flattened, but it allows erule to work on - assumptions of the form !!x. phi. Any !! after the outermost string + assumptions of the form \x. phi. Any \ after the outermost string will be regarded as belonging to the conclusion, and left untouched. Used ONLY by assum_pairs. Unless nasms<0, it can terminate the recursion early; that allows - erule to work on assumptions of the form P==>Q.*) + erule to work on assumptions of the form P\Q.*) fun strip_assums_imp (0, Hs, B) = (Hs, B) (*recursion terminated by nasms*) | strip_assums_imp (nasms, Hs, Const("Pure.imp", _) $ H $ B) = strip_assums_imp (nasms-1, H::Hs, B) @@ -546,9 +546,9 @@ (*Produces disagreement pairs, one for each assumption proof, in order. A is the first premise of the lifted rule, and thus has the form - H1 ==> ... Hk ==> B and the pairs are (H1,B),...,(Hk,B). + H1 \ ... Hk \ B and the pairs are (H1,B),...,(Hk,B). nasms is the number of assumptions in the original subgoal, needed when B - has the form B1 ==> B2: it stops B1 from being taken as an assumption. *) + has the form B1 \ B2: it stops B1 from being taken as an assumption. *) fun assum_pairs(nasms,A) = let val (params, A') = strip_assums_all ([],A) val (Hs,B) = strip_assums_imp (nasms,[],A') diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/more_thm.ML --- a/src/Pure/more_thm.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/more_thm.ML Sun Feb 25 20:05:05 2018 +0100 @@ -350,7 +350,7 @@ (** basic derived rules **) (*Elimination of implication - A A ==> B + A A \ B ------------ B *) @@ -705,7 +705,7 @@ val pretty_tags = Pretty.list "[" "]" o map pretty_tag; fun pretty_flexpair ctxt (t, u) = Pretty.block - [Syntax.pretty_term ctxt t, Pretty.str " =?=", Pretty.brk 1, Syntax.pretty_term ctxt u]; + [Syntax.pretty_term ctxt t, Pretty.str " \\<^sup>?", Pretty.brk 1, Syntax.pretty_term ctxt u]; fun pretty_thm_raw ctxt {quote, show_hyps = show_hyps'} raw_th = let diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/primitive_defs.ML --- a/src/Pure/primitive_defs.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/primitive_defs.ML Sun Feb 25 20:05:05 2018 +0100 @@ -23,7 +23,7 @@ | term_kind (Bound _) = "bound variable " | term_kind _ = ""; -(*c x == t[x] to !!x. c x == t[x]*) +(*c x \ t[x] to \x. c x \ t[x]*) fun dest_def ctxt {check_head, check_free_lhs, check_free_rhs, check_tfree} eq = let fun err msg = raise TERM (msg, [eq]); @@ -72,7 +72,7 @@ fold_rev close_arg args (Logic.list_all (eq_vars, (Logic.mk_equals (lhs, rhs))))) end; -(*!!x. c x == t[x] to c == %x. t[x]*) +(*\x. c x \ t[x] to c \ \x. t[x]*) fun abs_def eq = let val body = Term.strip_all_body eq; diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/pure_thy.ML --- a/src/Pure/pure_thy.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/pure_thy.ML Sun Feb 25 20:05:05 2018 +0100 @@ -31,14 +31,14 @@ (* application syntax variants *) val appl_syntax = - [("_appl", typ "('b => 'a) => args => logic", mixfix ("(1_/(1'(_')))", [1000, 0], 1000)), - ("_appl", typ "('b => 'a) => args => aprop", mixfix ("(1_/(1'(_')))", [1000, 0], 1000))]; + [("_appl", typ "('b \ 'a) \ args \ logic", mixfix ("(1_/(1'(_')))", [1000, 0], 1000)), + ("_appl", typ "('b \ 'a) \ args \ aprop", mixfix ("(1_/(1'(_')))", [1000, 0], 1000))]; val applC_syntax = - [("", typ "'a => cargs", Mixfix.mixfix "_"), - ("_cargs", typ "'a => cargs => cargs", mixfix ("_/ _", [1000, 1000], 1000)), - ("_applC", typ "('b => 'a) => cargs => logic", mixfix ("(1_/ _)", [1000, 1000], 999)), - ("_applC", typ "('b => 'a) => cargs => aprop", mixfix ("(1_/ _)", [1000, 1000], 999))]; + [("", typ "'a \ cargs", Mixfix.mixfix "_"), + ("_cargs", typ "'a \ cargs \ cargs", mixfix ("_/ _", [1000, 1000], 1000)), + ("_applC", typ "('b \ 'a) \ cargs \ logic", mixfix ("(1_/ _)", [1000, 1000], 999)), + ("_applC", typ "('b \ 'a) \ cargs \ aprop", mixfix ("(1_/ _)", [1000, 1000], 999))]; structure Old_Appl_Syntax = Theory_Data ( @@ -86,119 +86,119 @@ "class_name"])) #> Sign.add_syntax Syntax.mode_default (map (fn x => (x, typ "'a", NoSyn)) token_markers) #> Sign.add_syntax Syntax.mode_default - [("", typ "prop' => prop", Mixfix.mixfix "_"), - ("", typ "logic => any", Mixfix.mixfix "_"), - ("", typ "prop' => any", Mixfix.mixfix "_"), - ("", typ "logic => logic", Mixfix.mixfix "'(_')"), - ("", typ "prop' => prop'", Mixfix.mixfix "'(_')"), - ("_constrain", typ "logic => type => logic", mixfix ("_::_", [4, 0], 3)), - ("_constrain", typ "prop' => type => prop'", mixfix ("_::_", [4, 0], 3)), + [("", typ "prop' \ prop", Mixfix.mixfix "_"), + ("", typ "logic \ any", Mixfix.mixfix "_"), + ("", typ "prop' \ any", Mixfix.mixfix "_"), + ("", typ "logic \ logic", Mixfix.mixfix "'(_')"), + ("", typ "prop' \ prop'", Mixfix.mixfix "'(_')"), + ("_constrain", typ "logic \ type \ logic", mixfix ("_::_", [4, 0], 3)), + ("_constrain", typ "prop' \ type \ prop'", mixfix ("_::_", [4, 0], 3)), ("_ignore_type", typ "'a", NoSyn), - ("", typ "tid_position => type", Mixfix.mixfix "_"), - ("", typ "tvar_position => type", Mixfix.mixfix "_"), - ("", typ "type_name => type", Mixfix.mixfix "_"), - ("_type_name", typ "id => type_name", Mixfix.mixfix "_"), - ("_type_name", typ "longid => type_name", Mixfix.mixfix "_"), - ("_ofsort", typ "tid_position => sort => type", mixfix ("_::_", [1000, 0], 1000)), - ("_ofsort", typ "tvar_position => sort => type", mixfix ("_::_", [1000, 0], 1000)), - ("_dummy_ofsort", typ "sort => type", mixfix ("'_()::_", [0], 1000)), - ("", typ "class_name => sort", Mixfix.mixfix "_"), - ("_class_name", typ "id => class_name", Mixfix.mixfix "_"), - ("_class_name", typ "longid => class_name", Mixfix.mixfix "_"), + ("", typ "tid_position \ type", Mixfix.mixfix "_"), + ("", typ "tvar_position \ type", Mixfix.mixfix "_"), + ("", typ "type_name \ type", Mixfix.mixfix "_"), + ("_type_name", typ "id \ type_name", Mixfix.mixfix "_"), + ("_type_name", typ "longid \ type_name", Mixfix.mixfix "_"), + ("_ofsort", typ "tid_position \ sort \ type", mixfix ("_::_", [1000, 0], 1000)), + ("_ofsort", typ "tvar_position \ sort \ type", mixfix ("_::_", [1000, 0], 1000)), + ("_dummy_ofsort", typ "sort \ type", mixfix ("'_()::_", [0], 1000)), + ("", typ "class_name \ sort", Mixfix.mixfix "_"), + ("_class_name", typ "id \ class_name", Mixfix.mixfix "_"), + ("_class_name", typ "longid \ class_name", Mixfix.mixfix "_"), ("_dummy_sort", typ "sort", Mixfix.mixfix "'_"), ("_topsort", typ "sort", Mixfix.mixfix "{}"), - ("_sort", typ "classes => sort", Mixfix.mixfix "{_}"), - ("", typ "class_name => classes", Mixfix.mixfix "_"), - ("_classes", typ "class_name => classes => classes", Mixfix.mixfix "_,_"), - ("_tapp", typ "type => type_name => type", mixfix ("_ _", [1000, 0], 1000)), - ("_tappl", typ "type => types => type_name => type", Mixfix.mixfix "((1'(_,/ _')) _)"), - ("", typ "type => types", Mixfix.mixfix "_"), - ("_types", typ "type => types => types", Mixfix.mixfix "_,/ _"), - ("\<^type>fun", typ "type => type => type", mixfix ("(_/ \ _)", [1, 0], 0)), - ("_bracket", typ "types => type => type", mixfix ("([_]/ \ _)", [0, 0], 0)), - ("", typ "type => type", Mixfix.mixfix "'(_')"), + ("_sort", typ "classes \ sort", Mixfix.mixfix "{_}"), + ("", typ "class_name \ classes", Mixfix.mixfix "_"), + ("_classes", typ "class_name \ classes \ classes", Mixfix.mixfix "_,_"), + ("_tapp", typ "type \ type_name \ type", mixfix ("_ _", [1000, 0], 1000)), + ("_tappl", typ "type \ types \ type_name \ type", Mixfix.mixfix "((1'(_,/ _')) _)"), + ("", typ "type \ types", Mixfix.mixfix "_"), + ("_types", typ "type \ types \ types", Mixfix.mixfix "_,/ _"), + ("\<^type>fun", typ "type \ type \ type", mixfix ("(_/ \ _)", [1, 0], 0)), + ("_bracket", typ "types \ type \ type", mixfix ("([_]/ \ _)", [0, 0], 0)), + ("", typ "type \ type", Mixfix.mixfix "'(_')"), ("\<^type>dummy", typ "type", Mixfix.mixfix "'_"), ("_type_prop", typ "'a", NoSyn), - ("_lambda", typ "pttrns => 'a => logic", mixfix ("(3\_./ _)", [0, 3], 3)), + ("_lambda", typ "pttrns \ 'a \ logic", mixfix ("(3\_./ _)", [0, 3], 3)), ("_abs", typ "'a", NoSyn), - ("", typ "'a => args", Mixfix.mixfix "_"), - ("_args", typ "'a => args => args", Mixfix.mixfix "_,/ _"), - ("", typ "id_position => idt", Mixfix.mixfix "_"), + ("", typ "'a \ args", Mixfix.mixfix "_"), + ("_args", typ "'a \ args \ args", Mixfix.mixfix "_,/ _"), + ("", typ "id_position \ idt", Mixfix.mixfix "_"), ("_idtdummy", typ "idt", Mixfix.mixfix "'_"), - ("_idtyp", typ "id_position => type => idt", mixfix ("_::_", [], 0)), - ("_idtypdummy", typ "type => idt", mixfix ("'_()::_", [], 0)), - ("", typ "idt => idt", Mixfix.mixfix "'(_')"), - ("", typ "idt => idts", Mixfix.mixfix "_"), - ("_idts", typ "idt => idts => idts", mixfix ("_/ _", [1, 0], 0)), - ("", typ "idt => pttrn", Mixfix.mixfix "_"), - ("", typ "pttrn => pttrns", Mixfix.mixfix "_"), - ("_pttrns", typ "pttrn => pttrns => pttrns", mixfix ("_/ _", [1, 0], 0)), - ("", typ "aprop => aprop", Mixfix.mixfix "'(_')"), - ("", typ "id_position => aprop", Mixfix.mixfix "_"), - ("", typ "longid_position => aprop", Mixfix.mixfix "_"), - ("", typ "var_position => aprop", Mixfix.mixfix "_"), + ("_idtyp", typ "id_position \ type \ idt", mixfix ("_::_", [], 0)), + ("_idtypdummy", typ "type \ idt", mixfix ("'_()::_", [], 0)), + ("", typ "idt \ idt", Mixfix.mixfix "'(_')"), + ("", typ "idt \ idts", Mixfix.mixfix "_"), + ("_idts", typ "idt \ idts \ idts", mixfix ("_/ _", [1, 0], 0)), + ("", typ "idt \ pttrn", Mixfix.mixfix "_"), + ("", typ "pttrn \ pttrns", Mixfix.mixfix "_"), + ("_pttrns", typ "pttrn \ pttrns \ pttrns", mixfix ("_/ _", [1, 0], 0)), + ("", typ "aprop \ aprop", Mixfix.mixfix "'(_')"), + ("", typ "id_position \ aprop", Mixfix.mixfix "_"), + ("", typ "longid_position \ aprop", Mixfix.mixfix "_"), + ("", typ "var_position \ aprop", Mixfix.mixfix "_"), ("_DDDOT", typ "aprop", Mixfix.mixfix "\"), - ("_aprop", typ "aprop => prop", Mixfix.mixfix "PROP _"), - ("_asm", typ "prop => asms", Mixfix.mixfix "_"), - ("_asms", typ "prop => asms => asms", Mixfix.mixfix "_;/ _"), - ("_bigimpl", typ "asms => prop => prop", mixfix ("((1\_\)/ \ _)", [0, 1], 1)), - ("_ofclass", typ "type => logic => prop", Mixfix.mixfix "(1OFCLASS/(1'(_,/ _')))"), + ("_aprop", typ "aprop \ prop", Mixfix.mixfix "PROP _"), + ("_asm", typ "prop \ asms", Mixfix.mixfix "_"), + ("_asms", typ "prop \ asms \ asms", Mixfix.mixfix "_;/ _"), + ("_bigimpl", typ "asms \ prop \ prop", mixfix ("((1\_\)/ \ _)", [0, 1], 1)), + ("_ofclass", typ "type \ logic \ prop", Mixfix.mixfix "(1OFCLASS/(1'(_,/ _')))"), ("_mk_ofclass", typ "dummy", NoSyn), - ("_TYPE", typ "type => logic", Mixfix.mixfix "(1TYPE/(1'(_')))"), - ("", typ "id_position => logic", Mixfix.mixfix "_"), - ("", typ "longid_position => logic", Mixfix.mixfix "_"), - ("", typ "var_position => logic", Mixfix.mixfix "_"), + ("_TYPE", typ "type \ logic", Mixfix.mixfix "(1TYPE/(1'(_')))"), + ("", typ "id_position \ logic", Mixfix.mixfix "_"), + ("", typ "longid_position \ logic", Mixfix.mixfix "_"), + ("", typ "var_position \ logic", Mixfix.mixfix "_"), ("_DDDOT", typ "logic", Mixfix.mixfix "\"), ("_strip_positions", typ "'a", NoSyn), - ("_position", typ "num_token => num_position", Mixfix.mixfix "_"), - ("_position", typ "float_token => float_position", Mixfix.mixfix "_"), - ("_constify", typ "num_position => num_const", Mixfix.mixfix "_"), - ("_constify", typ "float_position => float_const", Mixfix.mixfix "_"), - ("_index", typ "logic => index", Mixfix.mixfix "(\unbreakable\\<^bsub>_\<^esub>)"), + ("_position", typ "num_token \ num_position", Mixfix.mixfix "_"), + ("_position", typ "float_token \ float_position", Mixfix.mixfix "_"), + ("_constify", typ "num_position \ num_const", Mixfix.mixfix "_"), + ("_constify", typ "float_position \ float_const", Mixfix.mixfix "_"), + ("_index", typ "logic \ index", Mixfix.mixfix "(\unbreakable\\<^bsub>_\<^esub>)"), ("_indexdefault", typ "index", Mixfix.mixfix ""), ("_indexvar", typ "index", Mixfix.mixfix "'\"), - ("_struct", typ "index => logic", NoSyn), + ("_struct", typ "index \ logic", NoSyn), ("_update_name", typ "idt", NoSyn), ("_constrainAbs", typ "'a", NoSyn), - ("_position_sort", typ "tid => tid_position", Mixfix.mixfix "_"), - ("_position_sort", typ "tvar => tvar_position", Mixfix.mixfix "_"), - ("_position", typ "id => id_position", Mixfix.mixfix "_"), - ("_position", typ "longid => longid_position", Mixfix.mixfix "_"), - ("_position", typ "var => var_position", Mixfix.mixfix "_"), - ("_position", typ "str_token => str_position", Mixfix.mixfix "_"), - ("_position", typ "string_token => string_position", Mixfix.mixfix "_"), - ("_position", typ "cartouche => cartouche_position", Mixfix.mixfix "_"), + ("_position_sort", typ "tid \ tid_position", Mixfix.mixfix "_"), + ("_position_sort", typ "tvar \ tvar_position", Mixfix.mixfix "_"), + ("_position", typ "id \ id_position", Mixfix.mixfix "_"), + ("_position", typ "longid \ longid_position", Mixfix.mixfix "_"), + ("_position", typ "var \ var_position", Mixfix.mixfix "_"), + ("_position", typ "str_token \ str_position", Mixfix.mixfix "_"), + ("_position", typ "string_token \ string_position", Mixfix.mixfix "_"), + ("_position", typ "cartouche \ cartouche_position", Mixfix.mixfix "_"), ("_type_constraint_", typ "'a", NoSyn), - ("_context_const", typ "id_position => logic", Mixfix.mixfix "CONST _"), - ("_context_const", typ "id_position => aprop", Mixfix.mixfix "CONST _"), - ("_context_const", typ "longid_position => logic", Mixfix.mixfix "CONST _"), - ("_context_const", typ "longid_position => aprop", Mixfix.mixfix "CONST _"), - ("_context_xconst", typ "id_position => logic", Mixfix.mixfix "XCONST _"), - ("_context_xconst", typ "id_position => aprop", Mixfix.mixfix "XCONST _"), - ("_context_xconst", typ "longid_position => logic", Mixfix.mixfix "XCONST _"), - ("_context_xconst", typ "longid_position => aprop", Mixfix.mixfix "XCONST _"), + ("_context_const", typ "id_position \ logic", Mixfix.mixfix "CONST _"), + ("_context_const", typ "id_position \ aprop", Mixfix.mixfix "CONST _"), + ("_context_const", typ "longid_position \ logic", Mixfix.mixfix "CONST _"), + ("_context_const", typ "longid_position \ aprop", Mixfix.mixfix "CONST _"), + ("_context_xconst", typ "id_position \ logic", Mixfix.mixfix "XCONST _"), + ("_context_xconst", typ "id_position \ aprop", Mixfix.mixfix "XCONST _"), + ("_context_xconst", typ "longid_position \ logic", Mixfix.mixfix "XCONST _"), + ("_context_xconst", typ "longid_position \ aprop", Mixfix.mixfix "XCONST _"), (const "Pure.dummy_pattern", typ "aprop", Mixfix.mixfix "'_"), - ("_sort_constraint", typ "type => prop", Mixfix.mixfix "(1SORT'_CONSTRAINT/(1'(_')))"), - (const "Pure.term", typ "logic => prop", Mixfix.mixfix "TERM _"), - (const "Pure.conjunction", typ "prop => prop => prop", infixr_ ("&&&", 2))] + ("_sort_constraint", typ "type \ prop", Mixfix.mixfix "(1SORT'_CONSTRAINT/(1'(_')))"), + (const "Pure.term", typ "logic \ prop", Mixfix.mixfix "TERM _"), + (const "Pure.conjunction", typ "prop \ prop \ prop", infixr_ ("&&&", 2))] #> Sign.add_syntax Syntax.mode_default applC_syntax #> Sign.add_syntax (Print_Mode.ASCII, true) - [(tycon "fun", typ "type => type => type", mixfix ("(_/ => _)", [1, 0], 0)), - ("_bracket", typ "types => type => type", mixfix ("([_]/ => _)", [0, 0], 0)), - ("_lambda", typ "pttrns => 'a => logic", mixfix ("(3%_./ _)", [0, 3], 3)), - (const "Pure.eq", typ "'a => 'a => prop", infix_ ("==", 2)), - (const "Pure.all_binder", typ "idts => prop => prop", mixfix ("(3!!_./ _)", [0, 0], 0)), - (const "Pure.imp", typ "prop => prop => prop", infixr_ ("==>", 1)), + [(tycon "fun", typ "type \ type \ type", mixfix ("(_/ => _)", [1, 0], 0)), + ("_bracket", typ "types \ type \ type", mixfix ("([_]/ => _)", [0, 0], 0)), + ("_lambda", typ "pttrns \ 'a \ logic", mixfix ("(3%_./ _)", [0, 3], 3)), + (const "Pure.eq", typ "'a \ 'a \ prop", infix_ ("==", 2)), + (const "Pure.all_binder", typ "idts \ prop \ prop", mixfix ("(3!!_./ _)", [0, 0], 0)), + (const "Pure.imp", typ "prop \ prop \ prop", infixr_ ("==>", 1)), ("_DDDOT", typ "aprop", Mixfix.mixfix "..."), - ("_bigimpl", typ "asms => prop => prop", mixfix ("((3[| _ |])/ ==> _)", [0, 1], 1)), + ("_bigimpl", typ "asms \ prop \ prop", mixfix ("((3[| _ |])/ ==> _)", [0, 1], 1)), ("_DDDOT", typ "logic", Mixfix.mixfix "...")] #> Sign.add_syntax ("", false) - [(const "Pure.prop", typ "prop => prop", mixfix ("_", [0], 0))] + [(const "Pure.prop", typ "prop \ prop", mixfix ("_", [0], 0))] #> Sign.add_consts - [(qualify (Binding.make ("eq", \<^here>)), typ "'a => 'a => prop", infix_ ("\", 2)), - (qualify (Binding.make ("imp", \<^here>)), typ "prop => prop => prop", infixr_ ("\", 1)), - (qualify (Binding.make ("all", \<^here>)), typ "('a => prop) => prop", binder ("\", 0, 0)), - (qualify (Binding.make ("prop", \<^here>)), typ "prop => prop", NoSyn), + [(qualify (Binding.make ("eq", \<^here>)), typ "'a \ 'a \ prop", infix_ ("\", 2)), + (qualify (Binding.make ("imp", \<^here>)), typ "prop \ prop \ prop", infixr_ ("\", 1)), + (qualify (Binding.make ("all", \<^here>)), typ "('a \ prop) \ prop", binder ("\", 0, 0)), + (qualify (Binding.make ("prop", \<^here>)), typ "prop \ prop", NoSyn), (qualify (Binding.make ("type", \<^here>)), typ "'a itself", NoSyn), (qualify (Binding.make ("dummy_pattern", \<^here>)), typ "'a", Mixfix.mixfix "'_")] #> Theory.add_deps_global "Pure.eq" ((Defs.Const, "Pure.eq"), [typ "'a"]) [] @@ -210,20 +210,20 @@ #> Sign.parse_translation Syntax_Trans.pure_parse_translation #> Sign.print_ast_translation Syntax_Trans.pure_print_ast_translation #> Sign.add_consts - [(qualify (Binding.make ("term", \<^here>)), typ "'a => prop", NoSyn), - (qualify (Binding.make ("sort_constraint", \<^here>)), typ "'a itself => prop", NoSyn), - (qualify (Binding.make ("conjunction", \<^here>)), typ "prop => prop => prop", NoSyn)] + [(qualify (Binding.make ("term", \<^here>)), typ "'a \ prop", NoSyn), + (qualify (Binding.make ("sort_constraint", \<^here>)), typ "'a itself \ prop", NoSyn), + (qualify (Binding.make ("conjunction", \<^here>)), typ "prop \ prop \ prop", NoSyn)] #> Sign.local_path #> (Global_Theory.add_defs false o map Thm.no_attributes) [(Binding.make ("prop_def", \<^here>), - prop "(CONST Pure.prop :: prop => prop) (A::prop) == A::prop"), + prop "(CONST Pure.prop :: prop \ prop) (A::prop) \ A::prop"), (Binding.make ("term_def", \<^here>), - prop "(CONST Pure.term :: 'a => prop) (x::'a) == (!!A::prop. A ==> A)"), + prop "(CONST Pure.term :: 'a \ prop) (x::'a) \ (\A::prop. A \ A)"), (Binding.make ("sort_constraint_def", \<^here>), - prop "(CONST Pure.sort_constraint :: 'a itself => prop) (CONST Pure.type :: 'a itself) ==\ - \ (CONST Pure.term :: 'a itself => prop) (CONST Pure.type :: 'a itself)"), + prop "(CONST Pure.sort_constraint :: 'a itself \ prop) (CONST Pure.type :: 'a itself) \\ + \ (CONST Pure.term :: 'a itself \ prop) (CONST Pure.type :: 'a itself)"), (Binding.make ("conjunction_def", \<^here>), - prop "(A &&& B) == (!!C::prop. (A ==> B ==> C) ==> C)")] #> snd + prop "(A &&& B) \ (\C::prop. (A \ B \ C) \ C)")] #> snd #> fold (fn (a, prop) => snd o Thm.add_axiom_global (Binding.make (a, \<^here>), prop)) Proofterm.equality_axms); diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/raw_simplifier.ML --- a/src/Pure/raw_simplifier.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/raw_simplifier.ML Sun Feb 25 20:05:05 2018 +0100 @@ -208,7 +208,7 @@ null prems andalso Pattern.matches (Proof_Context.theory_of ctxt) (lhs, rhs) (*the condition "null prems" is necessary because conditional rewrites with extra variables in the conditions may terminate although - the rhs is an instance of the lhs; example: ?m < ?n ==> f(?n) == f(?m)*) + the rhs is an instance of the lhs; example: ?m < ?n \ f ?n \ f ?m *) orelse is_Const lhs andalso not (is_Const rhs); @@ -254,8 +254,8 @@ mk_rews: mk: turn simplification thms into rewrite rules; mk_cong: prepare congruence rules; - mk_sym: turn == around; - mk_eq_True: turn P into P == True; + mk_sym: turn \ around; + mk_eq_True: turn P into P \ True; term_ord: for ordered rewriting;*) datatype simpset = @@ -1146,7 +1146,7 @@ NONE => appc () | SOME cong => (*post processing: some partial applications h t1 ... tj, j <= length ts, - may be a redex. Example: map (%x. x) = (%xs. xs) wrt map_cong*) + may be a redex. Example: map (\x. x) = (\xs. xs) wrt map_cong*) (let val thm = congc (prover ctxt) ctxt maxidx cong t0; val t = the_default t0 (Option.map Thm.rhs_of thm); @@ -1290,14 +1290,14 @@ in try_botc end; -(* Meta-rewriting: rewrites t to u and returns the theorem t==u *) +(* Meta-rewriting: rewrites t to u and returns the theorem t \ u *) (* Parameters: mode = (simplify A, use A in simplifying B, use prems of B (if B is again a meta-impl.) to simplify A) - when simplifying A ==> B + when simplifying A \ B prover: how to solve premises in conditional rewrites and congruences *) @@ -1369,7 +1369,7 @@ val lhs_of_thm = #1 o Logic.dest_equals o Thm.prop_of; -(*folding should handle critical pairs! E.g. K == Inl(0), S == Inr(Inl(0)) +(*folding should handle critical pairs! E.g. K \ Inl 0, S \ Inr (Inl 0) Returns longest lhs first to avoid folding its subexpressions.*) fun sort_lhs_depths defs = let val keylist = AList.make (term_depth o lhs_of_thm) defs @@ -1382,7 +1382,7 @@ fun fold_goals_tac ctxt defs = EVERY (map (rewrite_goals_tac ctxt) (rev_defs defs)); -(* HHF normal form: !! before ==>, outermost !! generalized *) +(* HHF normal form: \ before \, outermost \ generalized *) local diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/tactic.ML --- a/src/Pure/tactic.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/tactic.ML Sun Feb 25 20:05:05 2018 +0100 @@ -107,8 +107,8 @@ fun compose_tac ctxt arg i = PRIMSEQ (Thm.bicompose (SOME ctxt) {flatten = true, match = false, incremented = false} arg i); -(*Converts a "destruct" rule like P&Q==>P to an "elimination" rule - like [| P&Q; P==>R |] ==> R *) +(*Converts a "destruct" rule like P \ Q \ P to an "elimination" rule + like \P \ Q; P \ R\ \ R *) fun make_elim rl = zero_var_indexes (rl RS revcut_rl); (*Attack subgoal i by resolution, using flags to indicate elimination rules*) diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/term.ML --- a/src/Pure/term.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/term.ML Sun Feb 25 20:05:05 2018 +0100 @@ -598,11 +598,11 @@ val propT : typ = Type ("prop",[]); -(*maps !!x1...xn. t to t*) +(*maps \x1...xn. t to t*) fun strip_all_body (Const ("Pure.all", _) $ Abs (_, _, t)) = strip_all_body t | strip_all_body t = t; -(*maps !!x1...xn. t to [x1, ..., xn]*) +(*maps \x1...xn. t to [x1, ..., xn]*) fun strip_all_vars (Const ("Pure.all", _) $ Abs (a, T, t)) = (a, T) :: strip_all_vars t | strip_all_vars t = []; @@ -672,7 +672,7 @@ (*Substitute arguments for loose bound variables. Beta-reduction of arg(n-1)...arg0 into t replacing (Bound i) with (argi). - Note that for ((%x y. c) a b), the bound vars in c are x=1 and y=0 + Note that for ((\x y. c) a b), the bound vars in c are x=1 and y=0 and the appropriate call is subst_bounds([b,a], c) . Loose bound variables >=n are reduced by "n" to compensate for the disappearance of lambdas. diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/thm.ML --- a/src/Pure/thm.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/thm.ML Sun Feb 25 20:05:05 2018 +0100 @@ -769,7 +769,7 @@ : B ------- - A ==> B + A \ B *) fun implies_intr (ct as Cterm {t = A, T, maxidx = maxidxA, sorts, ...}) @@ -788,7 +788,7 @@ (*Implication elimination - A ==> B A + A \ B A ------------ B *) @@ -819,7 +819,7 @@ : A ------ - !!x. A + \x. A *) fun forall_intr (ct as Cterm {t = x, T, sorts, ...}) @@ -846,7 +846,7 @@ end; (*Forall elimination - !!x. A + \x. A ------ A[t/x] *) @@ -872,7 +872,7 @@ (* Equality *) (*Reflexivity - t == t + t \ t *) fun reflexive (Cterm {cert, t, T = _, maxidx, sorts}) = Thm (deriv_rule0 Proofterm.reflexive, @@ -885,9 +885,9 @@ prop = Logic.mk_equals (t, t)}); (*Symmetry - t == u + t \ u ------ - u == t + u \ t *) fun symmetric (th as Thm (der, {cert, maxidx, shyps, hyps, tpairs, prop, ...})) = (case prop of @@ -903,9 +903,9 @@ | _ => raise THM ("symmetric", 0, [th])); (*Transitivity - t1 == u u == t2 + t1 \ u u \ t2 ------------------ - t1 == t2 + t1 \ t2 *) fun transitive th1 th2 = let @@ -931,7 +931,7 @@ end; (*Beta-conversion - (%x. t)(u) == t[u/x] + (\x. t) u \ t[u/x] fully beta-reduces the term if full = true *) fun beta_conversion full (Cterm {cert, t, T = _, maxidx, sorts}) = @@ -973,9 +973,9 @@ (*The abstraction rule. The Free or Var x must not be free in the hypotheses. The bound variable will be named "a" (since x will be something like x320) - t == u + t \ u -------------- - %x. t == %x. u + \x. t \ \x. u *) fun abstract_rule a (Cterm {t = x, T, sorts, ...}) @@ -1005,9 +1005,9 @@ end; (*The combination rule - f == g t == u - -------------- - f t == g u + f \ g t \ u + ------------- + f t \ g u *) fun combination th1 th2 = let @@ -1039,9 +1039,9 @@ end; (*Equality introduction - A ==> B B ==> A + A \ B B \ A ---------------- - A == B + A \ B *) fun equal_intr th1 th2 = let @@ -1067,7 +1067,7 @@ end; (*The equal propositions rule - A == B A + A \ B A --------- B *) @@ -1110,7 +1110,7 @@ else let val tpairs' = tpairs |> map (apply2 (Envir.norm_term env)) - (*remove trivial tpairs, of the form t==t*) + (*remove trivial tpairs, of the form t \ t*) |> filter_out (op aconv); val der' = deriv_rule1 (Proofterm.norm_proof' env) der; val prop' = Envir.norm_term env prop; @@ -1269,7 +1269,7 @@ end; -(*The trivial implication A ==> A, justified by assume and forall rules. +(*The trivial implication A \ A, justified by assume and forall rules. A can contain Vars, not so for assume!*) fun trivial (Cterm {cert, t = A, T, maxidx, sorts}) = if T <> propT then @@ -1673,7 +1673,7 @@ and nlift = Logic.count_prems (strip_all_body Bi) + (if eres_flg then ~1 else 0) val (context, cert) = make_context_certificate [state, orule] opt_ctxt (join_certificate2 (state, orule)); - (*Add new theorem with prop = '[| Bs; As |] ==> C' to thq*) + (*Add new theorem with prop = "\Bs; As\ \ C" to thq*) fun addth A (As, oldAs, rder', n) ((env, tpairs), thq) = let val normt = Envir.norm_term env; (*perform minimal copying here by examining env*) diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/unify.ML --- a/src/Pure/unify.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/unify.ML Sun Feb 25 20:05:05 2018 +0100 @@ -243,7 +243,7 @@ (*flexflex: the flex-flex pairs, flexrigid: the flex-rigid pairs Does not perform assignments for flex-flex pairs: may create nonrigid paths, which prevent other assignments. - Does not even identify Vars in dpairs such as ?a =?= ?b; an attempt to + Does not even identify Vars in dpairs such as ?a \\<^sup>? ?b; an attempt to do so caused numerous problems with no compensating advantage. *) fun SIMPL0 context dp0 (env,flexflex,flexrigid) : Envir.env * dpair list * dpair list = @@ -426,7 +426,7 @@ (*If an argument contains a banned Bound, then it should be deleted. But if the only path is flexible, this is difficult; the code gives up! - In %x y.?a(x) =?= %x y.?b(?c(y)) should we instantiate ?b or ?c *) + In \x y. ?a x \\<^sup>? \x y. ?b (?c y) should we instantiate ?b or ?c *) exception CHANGE_FAIL; (*flexible occurrence of banned variable, or other reason to quit*) @@ -570,7 +570,7 @@ (*Print a tracing message + list of dpairs. - In t==u print u first because it may be rigid or flexible -- + In t \ u print u first because it may be rigid or flexible -- t is always flexible.*) fun print_dpairs context msg (env, dpairs) = if Context_Position.is_visible_generic context then @@ -580,7 +580,7 @@ val ctxt = Context.proof_of context; fun termT t = Syntax.pretty_term ctxt (Envir.norm_term env (Logic.rlist_abs (rbinder, t))); - val prt = Pretty.blk (0, [termT u, Pretty.str " =?=", Pretty.brk 1, termT t]); + val prt = Pretty.blk (0, [termT u, Pretty.str " \\<^sup>?", Pretty.brk 1, termT t]); in tracing (Pretty.string_of prt) end; in tracing msg; List.app pdp dpairs end else (); @@ -636,12 +636,12 @@ (*Eliminate a flex-flex pair by the trivial substitution, see Huet (1975) - Unifies ?f(t1...rm) with ?g(u1...un) by ?f -> %x1...xm.?a, ?g -> %x1...xn.?a - Unfortunately, unifies ?f(t,u) with ?g(t,u) by ?f, ?g -> %(x,y)?a, + Unifies ?f t1 ... rm with ?g u1 ... un by ?f -> \x1...xm. ?a, ?g -> \x1...xn. ?a + Unfortunately, unifies ?f t u with ?g t u by ?f, ?g -> \x y. ?a, though just ?g->?f is a more general unifier. Unlike Huet (1975), does not smash together all variables of same type -- requires more work yet gives a less general unifier (fewer variables). - Handles ?f(t1...rm) with ?f(u1...um) to avoid multiple updates. *) + Handles ?f t1 ... rm with ?f u1 ... um to avoid multiple updates. *) fun smash_flexflex1 (t, u) env : Envir.env = let val vT as (v, T) = var_head_of (env, t) diff -r b342f96e47b5 -r 0cd2fd0c2dcf src/Pure/variable.ML --- a/src/Pure/variable.ML Sun Feb 25 12:55:07 2018 +0000 +++ b/src/Pure/variable.ML Sun Feb 25 20:05:05 2018 +0100 @@ -642,7 +642,7 @@ val trade = gen_trade (import true) export; -(* focus on outermost parameters: !!x y z. B *) +(* focus on outermost parameters: \x y z. B *) val bound_focus = Config.bool (Config.declare ("bound_focus", \<^here>) (K (Config.Bool false)));