# HG changeset patch # User paulson # Date 808137375 -7200 # Node ID c9f7848bc5ee5ffab8125eb8be3698e2948b1e7a # Parent e9c01f251f5d70c5dfa6f195259d952d1e18b560 Mateja Jamnik's theory to html converter diff -r e9c01f251f5d -r c9f7848bc5ee src/Pure/Thy/blue_arrow.gif Binary file src/Pure/Thy/blue_arrow.gif has changed diff -r e9c01f251f5d -r c9f7848bc5ee src/Pure/Thy/chart.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/Thy/chart.ML Fri Aug 11 12:36:15 1995 +0200 @@ -0,0 +1,377 @@ +(* ID: $Id$ + Author: Mateja Jamnik, University of Cambridge + Copyright 1995 University of Cambridge + + Generator of the dependency charts of Isabelle theories. +*) + + +(* -------------------- + Declaration of the variables carrying the name + of the location of source files: + + path - location where the generated HTML files + should be stored + srcpath - location where the source files are stored + (ie. the graphics files) + -------------------- *) + +val htmlpath = "/homes/lcp/html_isa/"; +val srcpath = "/homes/lcp/Isa/Theory/"; + + + +(* -------------------- + Notices that display the legend for every dependency + chart, and the link back to the main listing of all + theories. + -------------------- *) + +val notice1 = +"
\n"^"The name of the file is linked to the theory file
\n"; + +val notice2 = +"(sub)\n"^ +" stands for subtheories (child theories)
\n"^ + +"(super)\n"^ +" stands for supertheories (parent theories)
\n
\n"; + + +val back = "Back\n"^ +" to the original listing of all the theories.
\n
\n
\n"; + + + +(* -------------------- + Get a list of all theory files in Isabelle + that have been loaded. + -------------------- *) + +print_depth 1000; + +val theory_list = !loaded_thys; + +val files = Symtab.alist_of (theory_list); + + + +(* -------------------- + Get a list of all theory files and their children. + -------------------- *) + +fun subtheory (tname, ThyInfo{children,...}) = (tname,children); + +val subtheories = map subtheory (files); + + + +(* -------------------- + Extract a list of all theory files only. + -------------------- *) + +fun theory ( [], list ) = list + | theory ( (string, _)::rest, list ) = theory(rest, string::list); + + + +(* -------------------- + Use quick sort to sort the theories in + the alphabetic order. Variable a as the first + element of the list is used as a pivot value. + -------------------- *) + +fun quick [] = [] + | quick [x] = [x] + | quick (a::bs) = let + fun part (l,r,[]) : string list = (quick l) @ (a::quick r) + | part (l,r,x::xs) = if x<=a then part(x::l, r, xs) + else part(l, x::r, xs) + in part([], [], bs) end; + +val theories = quick(theory(subtheories,[])); + + + +(* -------------------- + Insert the identifier of the supertheory + matrix for every row. Also insert elements + into the list of identifier's parents in + alphabetic order. + -------------------- *) + +fun listins ( l, [], x:string ) = l@[x] + | listins ( l, e::r, x ) = if x>e then listins( l@[e], r, x ) + else if x<>e then l@[x,e]@r + else l@[x]@r; + + + +(* -------------------- + Arguments: + t - accumulator of the resulting supertheory matrix + result - temporary resulting matrix (up until now) + (e,ident) - a list of identifier and its child + The child e of subtheory matrix becomes identifier ident of + the supertheory matrix. The identifier ident of subtheory + matrix becomes the parent elem of supertheory matrix. + Call function: + - listins + -------------------- *) + +fun matins ( t, [], (ident:string, elem:string) ) = t@[(ident,[elem])] + | matins ( t, (x:string,y:string list)::b, + (ident:string, elem:string) ) = + if x=ident then t@[(x,listins([],y,elem))]@b + else matins (t@[(x,y)], b, (ident,elem)); + + + +(* -------------------- + Instantiate the identifier ident, and the list of + its children (e::l), where e is its first child. + Call function: + - matins + -------------------- *) + +fun rowins ( (ident, []), result ) = result + | rowins ( (ident, e::l), result ) = + rowins ( (ident, l), matins([],result,(e,ident)) ); + + + +(* -------------------- + Extract recursively one row at a time of the + subtheory matrix. + Call function: + -rowins + -------------------- *) + +fun matinv ( [], result ) = result + | matinv ( l::b, result ) = matinv( b, rowins(l, result) ); + + + +(* -------------------- + Holds the value of supertheory matrix. + Call function: + -matinv + -------------------- *) + +val supertheories = matinv ( subtheories, [] ); + + + + +(* ---------------------------------------------------------------------- *) + + + + +(* -------------------- + Entries for the main chart of all the theories. + Every theory name is a link to the theory definition. + Down arrow is a link to the subtheory chart of that theory. + Up arrow is a link to the supertheory chart of that theory. + -------------------- *) + +fun entries x = "\n"^ + "\n(sub)\n"^ + + "\n(super) \n"^ + ""^x^"
\n"; + +fun all_list x = map entries x; + + + +(* -------------------- + Convert a list of characters and strings + into a string. + -------------------- *) + +fun join ([],res) = res + | join (x::xs,res) = join(xs,res^x); + + + +val bottom = +"
\n\n"^ +"\n\n"; + + + +(* -------------------- + Find the row of the matrix into which + the parent (or children) theories + are going to be inserted. + -------------------- *) + +fun findline (_,[]) = [] + | findline (th:string,(id,row:string list)::mat) = + if th=id then row else findline(th,mat); + + + +(* -------------------- + Once a parent (or child) theory has been identified + make an entry for it with appropriate indentation and with + further links to its parent (or child) dependency chart. + It contains the subfunction: + - maketableentry + -------------------- *) + +fun dependencies (th, mat) = + let + val stringlist = findline(th,mat); + fun maketableentry name = + "
  • "^name^"\n"^ + " \n(sub) \n"^ + "\n(super)\n"^ + "\n" + in + map maketableentry stringlist + end; + + + +(* -------------------- + Create individual super- and sub-theory charts for + each theory. Store them in separate files. + It contains the subfunction: + - page + -------------------- *) + +fun pages ([],_,_,_) = 0 + | pages (th::theories,theorymatrix,suffix,headline) = + let + fun page th = + let + + (* Open files for output of individual charts. *) + + val outf_each = open_out (htmlpath^th^"_"^suffix^".html") + in + + (* Recursively write to the output files the *) + (* dependent theories with appropriate links *) + (* depending upon the suffix. *) + + output(outf_each, + + (* Heading of each dependency chart. *) + + "\n"^ + "

    "^headline^"

    \n\n"^notice1^notice2^back^ + "\n"^""^th^"\n"^ + (if suffix="sup" + then ("\n(sub)\n") + else ("\n(super)\n"))^ + "\n"^bottom); + + (* Close for output the individual *) + (* dependency charts. *) + + close_out (outf_each) + end; + in + + (* Anti-bugging technique to follow the *) + (* execution of the program. *) + + output(std_out,"theory: "^th^"\n"); + + + (* The main driver for the creation of the dependencies *) + (* which calls the functions page and pages. This will *) + (* go into depth of dependency of each theory with the *) + (* appropriate indentation. *) + + page(th); + pages(theories,theorymatrix,suffix,headline) + end; + + + + +(* ---------------------------------------------------------------------- *) + + + + +(* -------------------- + Generate the main chart 00-chart.html containing + the listing of all the theories and the links to charts. + -------------------- *) + +val outtext = htmlpath^"00-chart.html"; +val outfile = open_out outtext; + +val head = "\n\n"^outtext^ + "\n\n\n"^"\n"; + +val title = "

    Dependency Diagram

    \n\n"^notice1^notice2; +val page1part1 = "

    Subtheories and Supertheories

    \n
    \n\n"^ + join(all_list(theories),"")^"
    \n"; +val page1part3 = bottom; + +val tail = "\n"^""; + +output(outfile,head); +output(outfile,title); +output(outfile,page1part1); +output(outfile,page1part3); + + + +(* ---------------------------------------------------------------------- *) + + + +(* -------------------- + The main driver to create individual super- and sub-theory + charts for each individual theory. The arguments passed are: + - the list of all the theories + - the super- or sub-theory matrix + - the appropriate suffix sup or sub + - the title of the chart + It calls the function: + - pages + -------------------- *) + +pages(theories,subtheories,"sub","Subtheories"); +pages(theories,supertheories,"sup","Supertheories"); +output(outfile,tail); + + + +(* -------------------- + Close file for output. + -------------------- *) +close_out(outfile); diff -r e9c01f251f5d -r c9f7848bc5ee src/Pure/Thy/red_arrow.gif Binary file src/Pure/Thy/red_arrow.gif has changed diff -r e9c01f251f5d -r c9f7848bc5ee src/Pure/Thy/to_html.ML --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/src/Pure/Thy/to_html.ML Fri Aug 11 12:36:15 1995 +0200 @@ -0,0 +1,273 @@ +(* ID: $Id$ + Author: Mateja Jamnik, University of Cambridge + Copyright 1995 University of Cambridge + + Convert the Isabelle theory files to .html format for hypertext browsing +*) + +val htmlpath = "/homes/lcp/html_isa/"; + +(* -------------------- + Exception declaration in case that there is a lexical + error occurs. + -------------------- *) + +exception lex_err; + +(* -------------------- + Get the list of all the theory files and the + path to access them in their directories + -------------------- *) + +print_depth 1000; +val theory_list = !loaded_thys; +val files = Symtab.alist_of (theory_list); + +(* -------------------- + The function move_eachf is going to move all the theory + files in isabelle to their html format. It takes + for arguments the name of the theory (tname) and + other theory information such as path (path), theorems (thms) + proved, ... It is the main driver of the program, and basically + contains all other functions. It contains the following + subfunctions: + - create_pure + - gettext + - move_onef + - transform + -------------------- *) + +fun move_eachf (tname, ThyInfo{path,thms,...}) = + let val intext = path^tname^".thy"; + val outtext = htmlpath^tname^".html"; + +(* -------------------- + The function gettext takes an instrem from the auxilliary + file .thy whose name and path are specified in the existent + Isabelle structure, treats the inputted string with all the + necessary modifications for html format and puts it into the + file with the same name but .html extension. The subfunctions + it contains are: + - after_first_c + - filter + - first_comment + - format_link + - getstring + - get_word + - he + - html + - html_escape + - insert_link + - leading_comment + - lc_exists + - modify_body + - remove_lc + - rest + - thy_name + -------------------- *) + + fun gettext is = + let + val head = "\n\n"^intext^ + "\n\n\n"^"\n"; + val title = "

    "^tname^".thy"^"

    \n\n"^ + "
    \nDependency Chart\n"^ + ": display of dependencies between the theories.\n"^ + "
    \n
    \n\n
    \n";
    +		val tail = "
    "^"\n"^""; + + + (* -------------------- + The function he (html escape) is going to take + every single characterof the input stream and check + for ">", "<" and "&" characters that have a special + meaning in html and convert them into their corresponding + escape sequences. + -------------------- *) + fun he [] r = r + | he ("<"::sl) r = he sl (r@["&","l","t"]) + | he (">"::sl) r = he sl (r@["&","g","t"]) + | he ("&"::sl) r = he sl (r@["&","a","m","p"]) + | he (c::sl) r = he sl (r@[c]); + + fun html_escape sl = he sl []; + + (* -------------------- + The function first_comment is a pattern checking for + nested comments. The argument d will keep track of how + deep we are in the nested comment. The function will + return the first comment extracted from the input stream, + so that we can copy it into the output stream. Lexical + error is raised if we open an empty file to read from. + -------------------- *) + fun first_comment ("*"::")"::cs) co 1 = co@["*",")"] + | first_comment ("*"::")"::cs) co d = + first_comment cs (co@["*",")"]) (d-1) + | first_comment ("("::"*"::cs) co d = + first_comment cs (co@["(","*"]) (d+1) + | first_comment (c::cs) co d = + if d>0 then first_comment cs (co@[c]) d + else first_comment cs co d + | first_comment [] _ _ = raise lex_err; + + (* -------------------- + The function lc_exists leading comment exists?) + checks whether in the input file at the very beginning + there is a comment, which needs to be extracted and put + into the output file. We also need to check whether there + might be a space, a new line or a tab, which do not + exclude the possibility that there is a leading comment. + The function returns a boolean value. + -------------------- *) + fun lc_exists ("("::"*"::_) = true + | lc_exists (" "::cs) = lc_exists cs + | lc_exists ("\n"::cs) = lc_exists cs + | lc_exists ("\t"::cs) = lc_exists cs + | lc_exists _ = false; + + (* -------------------- + In the case that there is a leading comment, this + function leading_comment extracts it> It also need + to check for any html escape sequences in the comment. + -------------------- *) + fun leading_comment sl = + if (lc_exists sl) then + (html_escape (first_comment sl [] 0)) + else + [""]; + + (* -------------------- + The function thy_name checks for the parent theory + names apearing after the leading comment. The function + returns a boolean value indicating if the word is a + theory name, so that a link can be inserted and can look + for another potential theory name. If the word is not a + theory name then continue to modify the rest of the body. + -------------------- *) + fun thy_name word = + is_some (Symtab.lookup (theory_list, word)); + + (* -------------------- + Creates the right html format when inserting the link. + -------------------- *) + fun format_link s = + let in + output(std_out,"\n"^s); + ""^s^"" + end; + + (* -------------------- + -------------------- *) + + fun insert_link w = (explode (format_link (implode w))) + (* -------------------- + -------------------- *) + + fun html ( " "::l) r = html l (r@[ " "]) + | html ( "+"::l) r = html l (r@[ "+"]) + | html ("\n"::l) r = html l (r@["\n"]) + | html ("\t"::l) r = html l (r@["\t"]) + | html l r = r@(get_word l []) + (* -------------------- + -------------------- *) + and get_word ( " "::l) w = + if (thy_name (implode w)) + then (insert_link w)@[ " "]@(html l []) + else w@[" "]@(html_escape l) + | get_word ( "+"::l) w = + if (thy_name (implode w)) + then (insert_link w)@[ "+"]@(html l []) + else w@["+"]@(html_escape l) + | get_word ("\n"::l) w = + if (thy_name (implode w)) + then (insert_link w)@[ "\n"]@(html l []) + else w@["\n"]@(html_escape l) + | get_word ("\t"::l) w = + if (thy_name (implode w)) + then (insert_link w)@[ "\t"]@(html l []) + else w@["\t"]@(html_escape l) + | get_word (c::l) w = (get_word l (w@[c])) + | get_word [] [] = [] + | get_word [] w = + if (thy_name (implode w)) + then (insert_link w) + else w; + + (* -------------------- + -------------------- *) + fun modify_body ("="::l) r = r@["="]@(html l []) + | modify_body (c::l) r = modify_body l (r@[c]) + | modify_body [] r = r; + + (* -------------------- + -------------------- *) + fun after_first_c ("*"::")"::cs) 1 = cs + | after_first_c ("*"::")"::cs) d = after_first_c cs (d-1) + | after_first_c ("("::"*"::cs) d = after_first_c cs (d+1) + | after_first_c (c::cs) d = after_first_c cs d + | after_first_c cs 0 = cs + | after_first_c [] _ = raise lex_err; + + (* -------------------- + -------------------- *) + fun remove_lc sl = + if (lc_exists sl) then (after_first_c sl 0) else sl; + + (* -------------------- + -------------------- *) + fun rest sl = modify_body (remove_lc sl) []; + + (* -------------------- + -------------------- *) + fun filter sl = (leading_comment sl) @ (rest sl); + + (* -------------------- + -------------------- *) + fun getstring s = + case input (is, 1) of + "" => head^title^(implode(filter(explode(s))))^tail + | "\n" => getstring (s^"\n") + | c => getstring (s^c) + in + getstring "" + end; + + (* -------------------- + -------------------- *) + fun transform t = + let + val infile = open_in intext; + val outfile = open_out outtext + in + output(std_out,"\nTheory: "^t); + output (outfile, gettext(infile)); + close_in (infile); + close_out (outfile) + end; + + (* -------------------- + -------------------- *) + fun create_pure p = + let + val outfile = open_out outtext + in + output(std_out, "\nCreating Pure.html ..."); + output (outfile, "\n\n"^intext^ + "\n\n\n"^"\n"^ + "

    "^tname^".thy"^"

    \n\n"^"
    \n"^
    +			"This is a theory file called Pure.\n"^
    +			"Its children are all other Isabelle theories."^
    +			"
    "^"\n"^""); + close_out (outfile) + end + + in + if (tname <> "Pure") then (transform tname) + else (create_pure tname) + end; + +(* -------------------- + -------------------- *) +map move_eachf (files); +