--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/component_elm.scala Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,127 @@
+/* Author: Fabian Huch, TU Muenchen
+
+Build Isabelle Elm component from official downloads. See also: https://elm-lang.org/
+*/
+
+package isabelle
+
+
+object Component_Elm {
+ /* platform information */
+
+ sealed case class Download_Platform(platform_name: String, file_name: String) {
+ override def toString: String = platform_name
+
+ def archive_name = file_name + ".gz"
+ def download(base_url: String, version: String): String =
+ base_url + "/" + version + "/" + archive_name
+ }
+
+ val platforms: List[Download_Platform] =
+ List(
+ Download_Platform("arm64-darwin", "binary-for-mac-64-bit-ARM"),
+ Download_Platform("x86_64-darwin", "binary-for-mac-64-bit"),
+ Download_Platform("x86_64-linux", "binary-for-linux-64-bit"),
+ Download_Platform("x86_64-windows", "binary-for-windows-64-bit"))
+
+
+ /* build elm */
+
+ val default_url = "https://github.com/elm/compiler/releases/download"
+ val default_version = "0.19.1"
+
+ def build_elm(
+ base_url: String = default_url,
+ version: String = default_version,
+ target_dir: Path = Path.current,
+ progress: Progress = new Progress
+ ): Unit = {
+ Isabelle_System.require_command("gunzip")
+
+
+ /* component */
+
+ val component = "elm-" + version
+ val component_dir =
+ Components.Directory(target_dir + Path.basic(component)).create(progress = progress)
+
+
+ /* download */
+
+ for (platform <- platforms) {
+ val platform_dir =
+ Isabelle_System.make_directory(component_dir.path + Path.basic(platform.platform_name))
+
+ val exe = Path.basic("elm")
+ val download = platform.download(base_url, version)
+
+ Isabelle_System.with_tmp_dir("download", component_dir.path.file) { download_dir =>
+ Isabelle_System.with_tmp_dir("tmp", component_dir.path.file) { tmp_dir =>
+ val archive_file = download_dir + Path.basic(platform.archive_name)
+
+ Isabelle_System.download_file(download, archive_file, progress = progress)
+ Isabelle_System.bash("gunzip " + File.bash_path(archive_file))
+ Isabelle_System.copy_file(archive_file.drop_ext, platform_dir + exe)
+ File.set_executable(platform_dir + exe)
+ }
+ }
+ }
+
+ /* license */
+
+ File.write(
+ component_dir.LICENSE,
+ Url.read("https://raw.githubusercontent.com/elm/compiler/master/LICENSE"))
+
+
+ /* settings */
+
+ component_dir.write_settings("""
+ISABELLE_ELM="$COMPONENT/${ISABELLE_WINDOWS_PLATFORM64:-${ISABELLE_APPLE_PLATFORM64:-$ISABELLE_PLATFORM64}}"
+""")
+
+
+ /* README */
+
+ File.write(component_dir.README,
+ """This Isabelle component provides elm """ + version + """.
+
+See also https://elm-lang.org and executables from """ + base_url + """
+
+ Fabian Huch
+ """ + Date.Format.date(Date.now()) + "\n")
+ }
+
+
+ /* Isabelle tool wrapper */
+
+ val isabelle_tool =
+ Isabelle_Tool("component_elm", "build elm component", Scala_Project.here,
+ { args =>
+ var target_dir = Path.current
+ var base_url = default_url
+ var version = default_version
+
+ val getopts = Getopts("""
+Usage: isabelle component_elm [OPTIONS]
+
+ Options are:
+ -D DIR target directory (default ".")
+ -U URL download URL (default: """" + default_url + """")
+ -V VERSION version (default: """" + default_version + """")
+
+ Build elm component.
+""",
+ "D:" -> (arg => target_dir = Path.explode(arg)),
+ "U:" -> (arg => base_url = arg),
+ "V:" -> (arg => version = arg))
+
+ val more_args = getopts(args)
+ if (more_args.nonEmpty) getopts.usage()
+
+ val progress = new Console_Progress()
+
+ build_elm(base_url = base_url, version = version, target_dir = target_dir,
+ progress = progress)
+ })
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/component_solr.scala Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,125 @@
+/* Author: Fabian Huch, TU Muenchen
+
+Build Isabelle Solr component from official downloads. See also: https://solr.apache.org/
+*/
+
+package isabelle
+
+
+object Component_Solr {
+ val default_download_url = "https://dlcdn.apache.org/solr/solr/9.6.1/solr-9.6.1.tgz"
+
+
+ /* build solr */
+
+ def build_solr(
+ download_url: String = default_download_url,
+ progress: Progress = new Progress,
+ target_dir: Path = Path.current
+ ): Unit =
+ Isabelle_System.with_tmp_dir("build") { tmp_dir =>
+ /* component */
+
+ val Archive_Name = """^.*/([^/]+)$""".r
+ val Version = """^solr-(.*)\.tgz$""".r
+
+ val archive_name =
+ download_url match {
+ case Archive_Name(name) => name
+ case _ => error("Failed to determine source archive name from " + quote(download_url))
+ }
+
+ val version =
+ archive_name match {
+ case Version(version) => version
+ case _ => error("Failed to determine component version from " + quote(archive_name))
+ }
+
+ val component_name = "solr-" + version
+ val component_dir =
+ Components.Directory(target_dir + Path.basic(component_name)).create(progress = progress)
+
+
+ /* download */
+
+ val archive_path = tmp_dir + Path.basic(archive_name)
+ Isabelle_System.download_file(download_url, archive_path, progress = progress)
+
+ Isabelle_System.extract(archive_path, tmp_dir)
+ val source_dir = File.get_dir(tmp_dir, title = download_url)
+
+ Isabelle_System.copy_file(source_dir + Path.explode("LICENSE.txt"), component_dir.path)
+
+ val webapp_lib_dir = source_dir + Path.explode("server/solr-webapp/webapp/WEB-INF/lib")
+ val server_lib_dir = source_dir + Path.explode("server/lib")
+
+
+ /* jars */
+
+ Isabelle_System.make_directory(component_dir.lib)
+
+ val compile = List("solr-solrj", "solr-api", "solr-core")
+
+ val jars =
+ File.find_files(webapp_lib_dir.file, _.getName.endsWith(".jar")) ++
+ File.find_files(server_lib_dir.file, _.getName.endsWith(".jar"))
+
+ for (jar <- jars) Isabelle_System.copy_file(jar, component_dir.lib.file)
+
+
+ /* settings */
+
+ def jar_path(file: String): String = "$SOLR_HOME/lib/" + file
+
+ component_dir.write_settings("""
+SOLR_HOME="$COMPONENT"
+SOLR_JARS=""" + quote(compile.map(_ + "-" + version + ".jar").map(jar_path).mkString(":")) + """
+classpath """ + quote(File.read_dir(component_dir.lib).map(jar_path).mkString(":")) + """
+
+SOLR_LUCENE_VERSION="9.10"
+SOLR_SCHEMA_VERSION="1.6"
+""")
+
+
+ /* README */
+
+ File.write(component_dir.README,
+ "This Isabelle component provides Solr " + version + " jars from\n" + download_url + """
+
+ Fabian
+ """ + Date.Format.date(Date.now()) + "\n")
+ }
+
+
+ /* Isabelle tool wrapper */
+
+ val isabelle_tool =
+ Isabelle_Tool("component_solr", "build Isabelle solr jar distribution", Scala_Project.here,
+ { args =>
+ var target_dir = Path.current
+ var download_url = default_download_url
+ var verbose = false
+
+ val getopts = Getopts("""
+Usage: isabelle component_solr [OPTIONS]
+
+ Options are:
+ -D DIR target directory (default ".")
+ -U URL download URL
+ (default: """" + default_download_url + """")
+ -v verbose
+
+ Build solr component from official download.
+""",
+ "D:" -> (arg => target_dir = Path.explode(arg)),
+ "U:" -> (arg => download_url = arg),
+ "v" -> (_ => verbose = true))
+
+ val more_args = getopts(args)
+ if (more_args.nonEmpty) getopts.usage()
+
+ val progress = new Console_Progress(verbose = verbose)
+
+ build_solr(download_url = download_url, progress = progress, target_dir = target_dir)
+ })
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/components/find_facts Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,2 @@
+solr-9.6.1
+elm-0.19.1
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/elm.scala Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,94 @@
+/* Author: Fabian Huch, TU Muenchen
+
+Elm module for Isabelle.
+*/
+
+package isabelle
+
+
+import java.io.{File => JFile}
+
+import scala.jdk.CollectionConverters._
+
+import org.jsoup.nodes.Element
+
+
+object Elm {
+ private lazy val elm_home =
+ proper_string(Isabelle_System.getenv("ISABELLE_ELM")).getOrElse(error("No elm component found"))
+
+ private lazy val exec = Path.explode(elm_home) + Path.basic("elm")
+
+ object Project {
+ def apply(
+ name: String,
+ dir: Path,
+ main: Path = Path.explode("src/Main.elm"),
+ output: Path = Path.explode("index.html"),
+ head: XML.Body = Nil
+ ): Project = {
+ if (!dir.is_dir) error("Project directory does not exist: " + dir)
+ val main_file = dir + main
+ if (!main_file.is_file) error("Main elm file does not exist: " + main_file)
+ new Project(name, dir, main, dir + output, head)
+ }
+ }
+
+ class Project private(name: String, dir: Path, main: Path, output: Path, head: XML.Body) {
+ val definition = JSON.parse(File.read(dir + Path.basic("elm.json")))
+ val src_dirs =
+ JSON.strings(definition, "source-directories").getOrElse(
+ error("Missing source directories in elm.json"))
+
+ def sources: List[JFile] =
+ for {
+ src_dir <- src_dirs
+ path = dir + Path.explode(src_dir)
+ file <- File.find_files(path.file, _.getName.endsWith(".elm"))
+ } yield file
+
+ def sources_shasum: SHA1.Shasum = {
+ val meta_info = SHA1.shasum_meta_info(SHA1.digest(JSON.Format(definition)))
+ val head_digest = SHA1.shasum(SHA1.digest(XML.string_of_body(head)), "head")
+ val source_digest =
+ SHA1.shasum_sorted(for (file <- sources) yield SHA1.digest(file) -> file.getCanonicalPath)
+ meta_info ::: head_digest ::: source_digest
+ }
+
+ def get_digest: SHA1.Digest =
+ Exn.capture {
+ val html = HTML.parse_document(File.read(output))
+ val elem = html.head.getElementsByTag("meta").attr("name", "shasum")
+ Library.the_single(elem.eachAttr("content").asScala.toList)
+ } match {
+ case Exn.Res(s) => SHA1.fake_digest(s)
+ case _ => SHA1.digest_empty
+ }
+
+ def build_html(progress: Progress): String = {
+ val digest = sources_shasum.digest
+ if (digest == get_digest) File.read(output)
+ else {
+ progress.echo("### Building " + name + " (" + output.canonical.implode + ") ...")
+
+ val cmd =
+ File.bash_path(exec) + " make " + File.bash_path(main) + " --optimize --output=" + output
+ val res = Isabelle_System.bash(cmd, cwd = dir)
+
+ if (!res.ok) {
+ progress.echo(res.err)
+ error("Failed to compile Elm sources")
+ }
+
+ val file = HTML.parse_document(File.read(output))
+ file.head.appendChild(
+ Element("meta").attr("name", "shasum").attr("content", digest.toString))
+ file.head.append(XML.string_of_body(head))
+ val html = file.html
+ File.write(output, html)
+
+ html
+ }
+ }
+ }
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/etc/build.props Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,15 @@
+title = Find Facts
+module = $FIND_FACTS_HOME/lib/find-facts.jar
+requirements = \
+ env:ISABELLE_SCALA_JAR \
+ env:SOLR_JARS
+sources = \
+ component_elm.scala \
+ component_solr.scala \
+ elm.scala \
+ find_facts.scala \
+ find_facts_tools.scala \
+ solr.scala \
+ thy_blocks.scala
+services = \
+ isabelle.Find_Facts_Tools
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/etc/options Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,9 @@
+section "Find Facts"
+
+option find_facts_database_name : string = "local"
+
+option browser_info_url_library : string = "https://isabelle.in.tum.de/dist/library/"
+ -- "base url for Isabelle HTML presentation"
+
+option browser_info_url_afp : string = "https://www.isa-afp.org/browser_info/current/"
+ -- "base url for AFP HTML presentation"
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/etc/settings Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,3 @@
+FIND_FACTS_HOME="$COMPONENT"
+
+init_components "${ISABELLE_COMPONENTS_BASE:-$USER_HOME/.isabelle/contrib}" "$FIND_FACTS_HOME/components/find_facts"
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/find_facts.scala Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,949 @@
+/* Title: Tools/Find_Facts.scala
+ Author: Fabian Huch, TU Muenchen
+
+Full-text search engine for Isabelle (including web server), using Solr as backend.
+*/
+package isabelle
+
+
+import scala.annotation.tailrec
+import scala.collection.immutable.TreeMap
+import scala.collection.mutable
+
+
+object Find_Facts {
+ /** blocks **/
+
+ object Kind {
+ val CONST = "constant"
+ val TYPE = "type"
+ val THM = "fact"
+ }
+
+ case class Block(
+ id: String,
+ version: Long,
+ chapter: String,
+ session: String,
+ theory: String,
+ file: String,
+ url_path: Path,
+ command: String,
+ start_line: Int,
+ src_before: String,
+ src: String,
+ src_after: String,
+ xml: XML.Body,
+ html: String,
+ entity_kname: Option[String],
+ consts: List[String],
+ typs: List[String],
+ thms: List[String]
+ ) {
+ def path = Path.explode(file)
+ def file_type: String = path.get_ext
+ def file_name: String = path.file_name
+
+ def kinds: List[String] =
+ (if (typs.nonEmpty) List(Kind.TYPE) else Nil) :::
+ (if (consts.nonEmpty) List(Kind.CONST) else Nil) :::
+ (if (thms.nonEmpty) List(Kind.THM) else Nil)
+ def names: List[String] = (typs ::: consts ::: thms).distinct
+ }
+
+ case class Blocks(num_found: Long, blocks: List[Block], cursor: String)
+
+
+ /** queries */
+
+ enum Atom {
+ case Exact(s: String) extends Atom
+ case Value(s: String) extends Atom
+ }
+
+ enum Field {
+ case chapter, session, file_type, theory, command, source, names, consts, typs, thms, kinds
+ }
+
+ sealed trait Filter
+ case class Field_Filter(field: Field, either: List[Atom]) extends Filter
+ case class Any_Filter(either: List[Atom]) extends Filter {
+ def fields: List[Field] = List(Field.session, Field.theory, Field.source, Field.names)
+ }
+
+ case class Select(field: Field, values: List[String])
+
+ object Query {
+ def apply(filters: Filter*): Query = new Query(filters.toList)
+ }
+
+ case class Query(
+ filters: List[Filter] = Nil,
+ exclude: List[Filter] = Nil,
+ selects: List[Select] = Nil)
+
+
+ /* stats and facets */
+
+ case class Stats(
+ results: Long,
+ sessions: Long,
+ theories: Long,
+ commands: Long,
+ consts: Long,
+ typs: Long,
+ thms: Long)
+
+ case class Facets(
+ chapter: Map[String, Long],
+ session: Map[String, Long],
+ theory: Map[String, Long],
+ file_type: Map[String, Long],
+ command: Map[String, Long],
+ kinds: Map[String, Long],
+ consts: Map[String, Long],
+ typs: Map[String, Long],
+ thms: Map[String, Long])
+
+
+ /** Solr data model **/
+
+ object private_data extends Solr.Data("find_facts") {
+ /* types */
+
+ val symbol_codes =
+ for {
+ entry <- Symbol.symbols.entries
+ code <- entry.decode.toList
+ input <- entry.symbol :: entry.abbrevs
+ } yield input -> code
+
+ val replacements =
+ for ((symbol, codes) <- symbol_codes.groupMap(_._1)(_._2).toList if codes.length == 1)
+ yield symbol -> Library.the_single(codes)
+
+ val Special_Char = """(.*[(){}\[\].,:"].*)""".r
+ val Arrow = """(.*=>.*)""".r
+
+ val synonyms =
+ for {
+ (symbol, code) <- symbol_codes
+ if !Special_Char.matches(symbol) && !Arrow.matches(symbol)
+ } yield symbol + " => " + code
+
+ override lazy val more_config = Map(Path.basic("synonyms.txt") -> synonyms.mkString("\n"))
+
+ object Types {
+ private val strip_html = Solr.Class("charFilter", "HTMLStripCharFilterFactory")
+ private val replace_symbol_chars =
+ replacements.collect {
+ case (Special_Char(pattern), code) =>
+ Solr.Class(
+ "charFilter", "PatternReplaceCharFilterFactory",
+ List("pattern" -> ("\\Q" + pattern + "\\E"), "replacement" -> code))
+ }
+
+ private val symbol_pattern =
+ """\s*(::|[(){}\[\].,:"]|[^\p{ASCII}]|((?![^\p{ASCII}])[^(){}\[\].,:"\s])+)\s*""".r
+
+ private val tokenize =
+ Solr.Class("tokenizer", "WhitespaceTokenizerFactory", List("rule" -> "java"))
+ private val tokenize_symbols =
+ Solr.Class("tokenizer", "PatternTokenizerFactory",
+ List("pattern" -> symbol_pattern.toString, "group" -> "1"))
+
+ private val to_lower = Solr.Class("filter", "LowerCaseFilterFactory")
+ private val add_ascii =
+ Solr.Class("filter", "ASCIIFoldingFilterFactory", List("preserveOriginal" -> "true"))
+ private val delimit_words =
+ Solr.Class("filter", "WordDelimiterGraphFilterFactory", List(
+ "splitOnCaseChange" -> "0", "stemEnglishPossessive" -> "0", "preserveOriginal" -> "1"))
+ private val flatten = Solr.Class("filter", "FlattenGraphFilterFactory")
+ private val replace_symbols =
+ Solr.Class("filter", "SynonymGraphFilterFactory", List("synonyms" -> "synonyms.txt"))
+ private val replace_special_symbols =
+ replacements.collect {
+ case (Arrow(arrow), code) =>
+ Solr.Class("filter", "PatternReplaceFilterFactory",
+ List("pattern" -> ("\\Q" + arrow + "\\E"), "replacement" -> code))
+ }
+
+ val source =
+ Solr.Type("name", "TextField", Nil, List(
+ XML.Elem(Markup("analyzer", List("type" -> "index")),
+ List(strip_html, tokenize_symbols, to_lower, add_ascii, delimit_words, flatten)),
+ XML.Elem(
+ Markup("analyzer", List("type" -> "query")),
+ replace_symbol_chars ::: tokenize_symbols :: replace_symbols ::
+ replace_special_symbols ::: to_lower :: Nil)))
+
+ val name =
+ Solr.Type("source", "TextField", Nil, List(
+ XML.Elem(Markup("analyzer", List("type" -> "index")),
+ List(tokenize, to_lower, delimit_words, flatten)),
+ XML.Elem(Markup("analyzer", List("type" -> "query")), List(tokenize, to_lower))))
+
+ val text = Solr.Type("text", "TextField")
+ }
+
+
+ /* fields */
+
+ object Fields {
+ val id = Solr.Field("id", Solr.Type.string).make_unique_key
+ val version = Solr.Field("version", Solr.Type.long, Solr.Column_Wise(true))
+ val chapter = Solr.Field("chapter", Solr.Type.string, Solr.Column_Wise(true))
+ val session = Solr.Field("session", Types.name)
+ val session_facet = Solr.Field("session_facet", Solr.Type.string, Solr.Stored(false))
+ val theory = Solr.Field("theory", Types.name)
+ val theory_facet = Solr.Field("theory_facet", Solr.Type.string, Solr.Stored(false))
+ val file = Solr.Field("file", Solr.Type.string, Solr.Indexed(false))
+ val file_type =
+ Solr.Field("file_type", Solr.Type.string, Solr.Column_Wise(true) ::: Solr.Stored(false))
+ val url_path = Solr.Field("url_path", Solr.Type.string, Solr.Indexed(false))
+ val command = Solr.Field("command", Solr.Type.string, Solr.Column_Wise(true))
+ val start_line = Solr.Field("start_line", Solr.Type.int, Solr.Column_Wise(true))
+ val src_before = Solr.Field("src_before", Solr.Type.string, Solr.Indexed(false))
+ val src_after = Solr.Field("src_after", Solr.Type.string, Solr.Indexed(false))
+ val src = Solr.Field("src", Types.source)
+ val xml = Solr.Field("xml", Solr.Type.bytes, Solr.Indexed(false))
+ val html = Solr.Field("html", Solr.Type.bytes, Solr.Indexed(false))
+ val entity_kname = Solr.Field("entity_kname", Solr.Type.string, Solr.Indexed(false))
+ val consts = Solr.Field("consts", Types.name, Solr.Multi_Valued(true))
+ val consts_facet =
+ Solr.Field("consts_facet", Solr.Type.string, Solr.Multi_Valued(true) ::: Solr.Stored(false))
+ val typs = Solr.Field("typs", Types.name, Solr.Multi_Valued(true))
+ val typs_facet =
+ Solr.Field("typs_facet", Solr.Type.string, Solr.Multi_Valued(true) ::: Solr.Stored(false))
+ val thms = Solr.Field("thms", Types.name, Solr.Multi_Valued(true))
+ val thms_facet =
+ Solr.Field("thms_facet", Solr.Type.string, Solr.Multi_Valued(true) ::: Solr.Stored(false))
+ val names = Solr.Field("names", Types.name, Solr.Multi_Valued(true) ::: Solr.Stored(false))
+ val kinds =
+ Solr.Field("kinds", Solr.Type.string,
+ Solr.Multi_Valued(true) ::: Solr.Column_Wise(true) ::: Solr.Stored(false))
+ }
+
+ lazy val fields: Solr.Fields = Solr.Fields(
+ Fields.id, Fields.version, Fields.chapter, Fields.session, Fields.session_facet,
+ Fields.theory, Fields.theory_facet, Fields.file, Fields.file_type, Fields.url_path,
+ Fields.command, Fields.start_line, Fields.src_before, Fields.src_after, Fields.src,
+ Fields.xml, Fields.html, Fields.entity_kname, Fields.consts, Fields.consts_facet, Fields.typs,
+ Fields.typs_facet, Fields.thms, Fields.thms_facet, Fields.names, Fields.kinds)
+
+
+ /* operations */
+
+ def read_domain(db: Solr.Database, q: Solr.Source): Set[String] =
+ db.execute_query(Fields.id, List(Fields.id), None, 100000,
+ { results =>
+ results.map(_.string(Fields.id)).toSet
+ }, q = q)
+
+ def read_block(res: Solr.Result): Block = {
+ val id = res.string(Fields.id)
+ val version = res.long(Fields.version)
+ val chapter = res.string(Fields.chapter)
+ val session = res.string(Fields.session)
+ val theory = res.string(Fields.theory)
+ val file = res.string(Fields.file)
+ val url_path = Path.explode(res.string(Fields.url_path))
+ val command = res.string(Fields.command)
+ val start_line = res.int(Fields.start_line)
+ val src_before = res.string(Fields.src_before)
+ val src = res.string(Fields.src)
+ val src_after = res.string(Fields.src_after)
+ val xml = YXML.parse_body(res.bytes(Fields.xml))
+ val html = res.bytes(Fields.html).text
+ val entity_kname = res.get_string(Fields.entity_kname)
+ val consts = res.list_string(Fields.consts)
+ val typs = res.list_string(Fields.typs)
+ val thms = res.list_string(Fields.thms)
+
+ Block(id = id, version = version, chapter = chapter, session = session, theory = theory,
+ file = file, url_path = url_path, command = command, start_line = start_line, src_before =
+ src_before, src = src, src_after = src_after, xml = xml, html = html, entity_kname =
+ entity_kname, consts = consts, typs = typs, thms = thms)
+ }
+
+ def read_blocks(
+ db: Solr.Database,
+ q: Solr.Source,
+ fq: List[Solr.Source],
+ cursor: Option[String] = None,
+ max_results: Int = 10
+ ): Blocks =
+ db.execute_query(Fields.id, stored_fields, cursor, max_results,
+ { results =>
+ val next_cursor = results.next_cursor
+ val blocks = results.map(read_block).toList
+ Blocks(results.num_found, blocks, next_cursor)
+ }, q = q, fq = fq, more_chunks = 0)
+
+ def stream_blocks(
+ db: Solr.Database,
+ q: Solr.Source,
+ stream: Iterator[Block] => Unit,
+ cursor: Option[String] = None,
+ ): Unit =
+ db.execute_query(Fields.id, stored_fields, cursor, 10000,
+ { results =>
+ stream(results.map(read_block))
+ }, q = q)
+
+ def update_theory(db: Solr.Database, theory_name: String, blocks: List[Block]): Unit =
+ db.transaction {
+ val delete =
+ read_domain(db, Solr.filter(Fields.theory, Solr.phrase(theory_name))) -- blocks.map(_.id)
+
+ if (delete.nonEmpty) db.execute_batch_delete(delete.toList)
+
+ db.execute_batch_insert(
+ for (block <- blocks) yield { (doc: Solr.Document) =>
+ doc.string(Fields.id) = block.id
+ doc.long(Fields.version) = block.version
+ doc.string(Fields.chapter) = block.chapter
+ doc.string(Fields.session) = block.session
+ doc.string(Fields.session_facet) = block.session
+ doc.string(Fields.theory) = block.theory
+ doc.string(Fields.theory_facet) = block.theory
+ doc.string(Fields.file) = block.file
+ doc.string(Fields.file_type) = block.file_type
+ doc.string(Fields.url_path) = block.url_path.implode
+ doc.string(Fields.command) = block.command
+ doc.int(Fields.start_line) = block.start_line
+ doc.string(Fields.src_before) = block.src_before
+ doc.string(Fields.src) = block.src
+ doc.string(Fields.src_after) = block.src_after
+ doc.bytes(Fields.xml) = YXML.bytes_of_body(block.xml)
+ doc.bytes(Fields.html) = Bytes(block.html)
+ doc.string(Fields.entity_kname) = block.entity_kname
+ doc.string(Fields.consts) = block.consts
+ doc.string(Fields.consts_facet) = block.consts
+ doc.string(Fields.typs) = block.typs
+ doc.string(Fields.typs_facet) = block.typs
+ doc.string(Fields.thms) = block.thms
+ doc.string(Fields.thms_facet) = block.thms
+ doc.string(Fields.names) = block.names
+ doc.string(Fields.kinds) = block.kinds
+ })
+ }
+
+ def read_theory(db: Solr.Database, theory_name: String): List[Block] = {
+ val blocks = new mutable.ListBuffer[Block]
+ stream_blocks(db, Solr.filter(Fields.theory_facet, Solr.phrase(theory_name)), {
+ res => blocks ++= res
+ })
+ blocks.toList
+ }
+
+ def delete_session(db: Solr.Database, session_name: String): Unit =
+ db.transaction {
+ val delete = read_domain(db, Solr.filter(Fields.session, Solr.phrase(session_name)))
+ if (delete.nonEmpty) db.execute_batch_delete(delete.toList)
+ }
+
+ def query_stats(db: Solr.Database, q: Solr.Source, fq: List[Solr.Source]): Stats =
+ db.execute_stats_query(
+ List(Fields.session_facet, Fields.theory_facet, Fields.command, Fields.consts_facet,
+ Fields.typs_facet, Fields.thms_facet, Fields.start_line),
+ { res =>
+ val results = res.count
+ val sessions = res.count(Fields.session_facet)
+ val theories = res.count(Fields.theory_facet)
+ val commands = res.count(Fields.theory_facet)
+ val consts = res.count(Fields.consts_facet)
+ val typs = res.count(Fields.typs_facet)
+ val thms = res.count(Fields.thms_facet)
+
+ Stats(results, sessions, theories, commands, consts, typs, thms)
+ }, q = q, fq = fq)
+
+ def query_facets(
+ db: Solr.Database,
+ q: Solr.Source,
+ fq: List[Solr.Source],
+ max_terms: Int = 100
+ ): Facets =
+ db.execute_facet_query(
+ List(Fields.chapter, Fields.session_facet, Fields.theory_facet, Fields.file_type,
+ Fields.command, Fields.kinds, Fields.consts_facet, Fields.typs_facet, Fields.thms_facet),
+ { res =>
+ val chapter = res.string(Fields.chapter)
+ val sessions = res.string(Fields.session_facet)
+ val theories = res.string(Fields.theory_facet)
+ val file_types = res.string(Fields.file_type)
+ val commands = res.string(Fields.command)
+ val kinds = res.string(Fields.kinds)
+ val consts = res.string(Fields.consts_facet)
+ val typs = res.string(Fields.typs_facet)
+ val thms = res.string(Fields.thms_facet)
+
+ Facets(chapter, sessions, theories, file_types, commands, kinds, consts, typs, thms)
+ }, q = q, fq = fq, max_terms = max_terms)
+
+
+ /* queries */
+
+ def solr_field(field: Field, select: Boolean = false): Solr.Field =
+ field match {
+ case Field.chapter => Fields.chapter
+ case Field.session if select => Fields.session_facet
+ case Field.session => Fields.session
+ case Field.theory if select => Fields.theory_facet
+ case Field.theory => Fields.theory
+ case Field.file_type => Fields.file_type
+ case Field.command => Fields.command
+ case Field.source => Fields.src
+ case Field.names => Fields.names
+ case Field.consts if select => Fields.consts_facet
+ case Field.consts => Fields.consts
+ case Field.typs if select => Fields.typs_facet
+ case Field.typs => Fields.typs
+ case Field.thms if select => Fields.thms_facet
+ case Field.thms => Fields.thms
+ case Field.kinds => Fields.kinds
+ }
+
+ def solr_query(query: Query): (Solr.Source, List[Solr.Source]) = {
+ def solr_atom(atom: Atom): List[Solr.Source] =
+ atom match {
+ case Atom.Value(s) if s.isEmpty => Nil
+ case Atom.Value(s) if !s.exists(Solr.wildcard_char(_)) => List(Solr.term(s))
+ case Atom.Value(s) =>
+ val terms = s.split("\\s+").toList.filterNot(_.isBlank)
+ if (terms.isEmpty) Nil else terms.map(Solr.wildcard)
+ case Atom.Exact(s) => List(Solr.phrase(s))
+ }
+
+ def solr_atoms(field: Field, atoms: List[Atom]): List[Solr.Source] =
+ for {
+ atom <- atoms
+ source <- solr_atom(atom)
+ } yield Solr.filter(solr_field(field), source)
+
+ def solr_filter(filter: Filter): List[Solr.Source] =
+ filter match {
+ case Field_Filter(field, atoms) => solr_atoms(field, atoms)
+ case any@Any_Filter(atoms) => any.fields.flatMap(solr_atoms(_, atoms))
+ }
+
+ def solr_select(select: Select): Solr.Source = {
+ val field = solr_field(select.field, select = true)
+ Solr.tag(field.name, Solr.filter(field, Solr.OR(select.values.map(Solr.phrase))))
+ }
+
+ val filter = query.filters.map(filter => Solr.OR(solr_filter(filter)))
+ val exclude = query.exclude.flatMap(solr_filter).map(Solr.exclude)
+ val selects = query.selects.map(solr_select)
+
+ (Solr.AND(filter ::: exclude), selects)
+ }
+ }
+
+ def query_block(db: Solr.Database, id: String): Option[Block] = {
+ val q = Solr.filter(Find_Facts.private_data.Fields.id, Solr.phrase(id))
+ Find_Facts.private_data.read_blocks(db, q, Nil).blocks.headOption
+ }
+
+ def query_blocks(db: Solr.Database, query: Query, cursor: Option[String] = None): Blocks = {
+ val (q, fq) = Find_Facts.private_data.solr_query(query)
+ Find_Facts.private_data.read_blocks(db, q, fq, cursor = cursor)
+ }
+
+ def query_stats(db: Solr.Database, query: Query): Stats = {
+ val (q, fq) = Find_Facts.private_data.solr_query(query)
+ Find_Facts.private_data.query_stats(db, q, fq)
+ }
+
+ def query_facet(db: Solr.Database, query: Query): Facets = {
+ val (q, fq) = Find_Facts.private_data.solr_query(query)
+ Find_Facts.private_data.query_facets(db, q, fq)
+ }
+
+
+ /** indexing **/
+
+ def make_thy_blocks(
+ options: Options,
+ session: Session,
+ browser_info_context: Browser_Info.Context,
+ document_info: Document_Info,
+ theory_context: Export.Theory_Context,
+ snapshot: Document.Snapshot,
+ chapter: String
+ ): List[Block] = {
+ val theory = theory_context.theory
+ val entities = Export_Theory.read_theory(theory_context).entity_iterator.toList
+ val session_name = theory_context.session_context.session_name
+
+ val theory_info =
+ document_info.theory_by_name(session_name, theory).getOrElse(
+ error("No info for theory " + theory))
+ val thy_dir = browser_info_context.theory_dir(theory_info)
+
+ def make_node_blocks(
+ snapshot: Document.Snapshot,
+ command_ranges: List[(String, Text.Range)]
+ ): List[Block] = {
+ val version = snapshot.version.id
+ val file = Path.explode(snapshot.node_name.node).squash.implode
+ val url_path = thy_dir + browser_info_context.smart_html(theory_info, snapshot.node_name.node)
+
+ val elements =
+ Browser_Info.Elements(html = Browser_Info.default_elements.html - Markup.ENTITY)
+ val node_context = Browser_Info.Node_Context.empty
+
+ val content = XML.content(snapshot.xml_markup(elements = Markup.Elements.empty))
+ def get_content(range: Text.Range): String =
+ Symbol.decode(Line.normalize(range.substring(content)))
+
+ val document = Line.Document(content.replace('\r', '\u001a'))
+ val num_lines = document.lines.length
+ def content_range(start: Line.Position, stop: Line.Position): Text.Range =
+ Text.Range(document.offset(start).get, document.offset(stop).get)
+
+ val index = Symbol.Index(content)
+ val node_entities =
+ TreeMap.from(entities
+ .filter(entity => entity.file == snapshot.node_name.node)
+ .groupBy(entity => index.decode(entity.range).start))
+
+ val rendering = new Rendering(snapshot, options, session)
+ val comment_ranges = rendering.comments(Text.Range.full).map(markup => ("", markup.range))
+
+ for ((command, range) <- command_ranges ::: comment_ranges) yield {
+ val line_range = document.range(range)
+ val start_line = line_range.start.line1
+
+ val id = file + "|" + range.start + ".." + range.stop
+
+ val src_before = get_content(
+ content_range(Line.Position((line_range.start.line - 5).max(0)), line_range.start))
+ val src = get_content(range)
+ val src_after = get_content(
+ content_range(line_range.stop, Line.Position((line_range.stop.line + 5).min(num_lines))))
+
+ val xml = snapshot.xml_markup(range, elements = elements.html)
+ val html =
+ HTML.output(node_context.make_html(elements, xml), hidden = true, structural = false)
+
+ val entities = node_entities.range(range.start, range.stop).values.toList.flatten.distinct
+
+ def get_entities(kind: String): List[String] =
+ for {
+ entity <- entities
+ if entity.export_kind == kind
+ if range.contains(index.decode(entity.range))
+ } yield entity.name
+
+ val entity_kname = entities.sortBy(_.name.length).headOption.map(_.kname)
+
+ val typs = get_entities(Export_Theory.Kind.TYPE)
+ val consts = get_entities(Export_Theory.Kind.CONST)
+ val thms = get_entities(Export_Theory.Kind.THM)
+
+ Block(id = id, version = version, chapter = chapter, session = session_name, theory =
+ theory, file = file, url_path = url_path, command = command, start_line = start_line,
+ src_before = src_before, src = src, src_after = src_after, xml = xml, html = html,
+ entity_kname = entity_kname, consts = consts, typs = typs, thms = thms)
+ }
+ }
+
+ def expand_block(block: Thy_Blocks.Block): List[Thy_Blocks.Block] =
+ block match {
+ case Thy_Blocks.Thy(inner) => inner.flatMap(expand_block)
+ case e@Thy_Blocks.Decl(inner) =>
+ val inner1 = inner.flatMap(expand_block)
+ if (inner.length > 1) e :: inner1 else List(e)
+ case _ => List(block)
+ }
+
+ val thy_command_ranges =
+ for (block <- Thy_Blocks.read_blocks(snapshot).flatMap(expand_block))
+ yield (block.command, block.range)
+
+ make_node_blocks(snapshot, thy_command_ranges) :::
+ (for {
+ blob_name <- snapshot.node_files.tail
+ snapshot1 = snapshot.switch(blob_name)
+ if snapshot1.node.source_wellformed
+ range = Text.Range.length(snapshot1.node.source)
+ block <- make_node_blocks(snapshot1, List(("", range)))
+ } yield block)
+ }
+
+ def find_facts_index(
+ options: Options,
+ sessions: List[String],
+ dirs: List[Path] = Nil,
+ clean: Boolean = false,
+ progress: Progress = new Progress
+ ): Unit = {
+ val store = Store(options)
+ val database = options.string("find_facts_database_name")
+ val session = Session(options, Resources.bootstrap)
+
+ val selection = Sessions.Selection(sessions = sessions)
+ val session_structure = Sessions.load_structure(options, dirs = dirs).selection(selection)
+ val deps = Sessions.Deps.load(session_structure)
+ val browser_info_context = Browser_Info.context(session_structure)
+
+ if (sessions.isEmpty) progress.echo("Nothing to index")
+ else {
+ val stats =
+ using(Solr.init_database(database, Find_Facts.private_data, clean = clean)) { db =>
+ using(Export.open_database_context(store)) { database_context =>
+ val document_info = Document_Info.read(database_context, deps, sessions)
+
+ def index_session(session_name: String): Unit = {
+ using(database_context.open_session0(session_name)) { session_context =>
+ val info = session_structure(session_name)
+ progress.echo("Session " + info.chapter + "/" + session_name + " ...")
+
+ Find_Facts.private_data.delete_session(db, session_name)
+ deps(session_name).proper_session_theories.foreach { name =>
+ val theory_context = session_context.theory(name.theory)
+ Build.read_theory(theory_context) match {
+ case None => progress.echo_warning("No snapshot for theory " + name.theory)
+ case Some(snapshot) =>
+ progress.echo("Theory " + name.theory + " ...")
+ val blocks =
+ make_thy_blocks(options, session, browser_info_context, document_info,
+ theory_context, snapshot, info.chapter)
+ Find_Facts.private_data.update_theory(db, theory_context.theory, blocks)
+ }
+ }
+ }
+ }
+
+ Par_List.map(index_session, sessions)
+ }
+
+ val query = Query(Field_Filter(Field.session, sessions.map(Atom.Exact(_))))
+ Find_Facts.query_stats(db, query)
+ }
+
+ progress.echo("Indexed " + stats.results + " blocks with " +
+ stats.consts + " consts, " + stats.typs + " typs, " + stats.thms + " thms")
+ }
+ }
+
+
+ /* Isabelle tool wrapper */
+
+ val isabelle_tool = Isabelle_Tool("find_facts_index", "index sessions for find_facts",
+ Scala_Project.here,
+ { args =>
+ var clean = false
+ val dirs = new mutable.ListBuffer[Path]
+ var options = Options.init()
+
+ val getopts = Getopts("""
+ Usage: isabelle find_facts_index [OPTIONS] [SESSIONS ...]
+
+ Options are:
+ -c clean previous index
+ -d DIR include session directory
+ -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
+
+ Index sessions for find_facts.
+ """,
+ "c" -> (_ => clean = true),
+ "d:" -> (arg => dirs += Path.explode(arg)),
+ "o:" -> (arg => options = options + arg))
+
+ val sessions = getopts(args)
+
+ val progress = new Console_Progress()
+
+ find_facts_index(options, sessions, dirs = dirs.toList, clean = clean, progress = progress)
+ })
+
+
+ /** index components **/
+
+ def find_facts_index_component(
+ options: Options,
+ target_dir: Path = Path.current,
+ progress: Progress = new Progress
+ ): Unit = {
+ val database = options.string("find_facts_database_name")
+
+ val component = "find_facts_index-" + database
+ val component_dir =
+ Components.Directory(target_dir + Path.basic(component)).create(progress = progress)
+
+ Isabelle_System.copy_dir(Solr.database_dir(database), component_dir.path)
+ component_dir.write_settings("SOLR_COMPONENTS=\"$SOLR_COMPONENTS:$COMPONENT/" + database + "\"")
+ }
+
+
+ /* Isabelle tool wrapper */
+
+ val isabelle_tool1 = Isabelle_Tool("find_facts_index_component",
+ "build component from find_facts index", Scala_Project.here,
+ { args =>
+ var options = Options.init()
+ var target_dir = Path.current
+
+ val getopts = Getopts("""
+ Usage: isabelle find_facts_index_component
+
+ Options are:
+ -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
+ -D DIR target directory (default ".")
+
+ Build component from finalized find_facts index with given name.
+ """,
+ "o:" -> (arg => options = options + arg),
+ "D:" -> (arg => target_dir = Path.explode(arg)))
+
+ val more_args = getopts(args)
+ if (more_args.nonEmpty) getopts.usage()
+
+ find_facts_index_component(options, target_dir = target_dir)
+ })
+
+
+ /** querying **/
+
+ /* requests and parsing */
+
+ case class Query_Blocks(query: Query, cursor: String)
+
+ object Parse {
+ def atom(json: JSON.T): Option[Atom] =
+ JSON.string(json, "value").map(Atom.Value(_)) orElse
+ JSON.string(json, "exact").map(Atom.Exact(_))
+
+ def field(name: String): Option[Field] = Field.values.find(_.toString == name)
+
+ def filter(json: JSON.T): Option[Filter] =
+ for {
+ atoms <- JSON.list(json, "either", atom)
+ filter <-
+ JSON.string(json, "field") match {
+ case None => Some(Any_Filter(atoms))
+ case Some(name) => for (field <- field(name)) yield Field_Filter(field, atoms)
+ }
+ } yield filter
+
+ def select(json: JSON.T): Option[Select] =
+ for {
+ name <- JSON.string(json, "field")
+ field <- field(name)
+ values <- JSON.strings(json, "values")
+ } yield Select(field, values)
+
+ def query(json: JSON.T): Option[Query] =
+ for {
+ filters <- JSON.list(json, "filters", filter)
+ exclude <- JSON.list(json, "exclude", filter)
+ selects <- JSON.list(json, "selects", select)
+ } yield Query(filters, exclude, selects)
+
+ def query_blocks(json: JSON.T): Option[Query_Blocks] =
+ for {
+ query <- JSON.value(json, "query", query)
+ cursor <- JSON.string(json, "cursor")
+ } yield Query_Blocks(query, cursor)
+
+ def query_block(json: JSON.T): Option[String] = for (id <- JSON.string(json, "id")) yield id
+ }
+
+
+ /* responses and encoding */
+
+ case class Result(blocks: Blocks, facets: Facets)
+
+ class Encode(options: Options) {
+ val library_base_url = Url(options.string("browser_info_url_library"))
+ val afp_base_url = Url(options.string("browser_info_url_afp"))
+
+ def url(block: Block): Url = {
+ val base_url = if (block.chapter == AFP.chapter) afp_base_url else library_base_url
+ base_url.resolve(block.url_path.implode)
+ }
+
+ def block(block: Block): JSON.T =
+ JSON.Object(
+ "id" -> block.id,
+ "chapter" -> block.chapter,
+ "session" -> block.session,
+ "theory" -> block.theory,
+ "file" -> block.file,
+ "file_name" -> block.file_name,
+ "url" -> url(block).toString,
+ "command" -> block.command,
+ "start_line" -> block.start_line,
+ "src_before" -> block.src_before,
+ "src_after" -> block.src_after,
+ "html" -> block.html,
+ "entity_kname" -> block.entity_kname.orNull)
+
+ def blocks(blocks: Blocks): JSON.T =
+ JSON.Object(
+ "num_found" -> blocks.num_found,
+ "blocks" -> blocks.blocks.map(block),
+ "cursor" -> blocks.cursor)
+
+ def facets(facet: Facets): JSON.T =
+ JSON.Object(
+ "chapter" -> facet.chapter,
+ "session" -> facet.session,
+ "file_type" -> facet.file_type,
+ "theory" -> facet.theory,
+ "command" -> facet.command,
+ "kinds" -> facet.kinds,
+ "consts" -> facet.consts,
+ "typs" -> facet.typs,
+ "thms" -> facet.thms)
+
+ def result(result: Result): JSON.T =
+ JSON.Object(
+ "blocks" -> blocks(result.blocks),
+ "facets" -> facets(result.facets))
+ }
+
+
+ /* find facts */
+
+ abstract class REST_Service(path: Path, progress: Progress, method: String = "POST")
+ extends HTTP.Service(path.implode, method = method) {
+ def handle(body: JSON.T): Option[JSON.T]
+
+ def apply(request: HTTP.Request): Option[HTTP.Response] =
+ try {
+ for {
+ json <-
+ Exn.capture(JSON.parse(request.input.text)) match {
+ case Exn.Res(json) => Some(json)
+ case _ =>
+ progress.echo("Could not parse: " + quote(request.input.text), verbose = true)
+ None
+ }
+ res <-
+ handle(json) match {
+ case Some(res) => Some(res)
+ case None =>
+ progress.echo("Invalid request: " + JSON.Format(json), verbose = true)
+ None
+ }
+ } yield HTTP.Response(Bytes(JSON.Format(res)), content_type = "application/json")
+ }
+ catch { case exn: Throwable =>
+ val uuid = UUID.random()
+ progress.echo_error_message("Server error <" + uuid + ">: " + exn)
+ Some(HTTP.Response.text("internal server error: " + uuid))
+ }
+ }
+
+ def find_facts(
+ options: Options,
+ port: Int,
+ devel: Boolean = false,
+ progress: Progress = new Progress
+ ): Unit = {
+ val database = options.string("find_facts_database_name")
+ val encode = new Encode(options)
+ val logo = Bytes.read(Path.explode("$FIND_FACTS_HOME/web/favicon.ico"))
+
+ val isabelle_style = HTML.fonts_css("/fonts/" + _) + "\n\n" + File.read(HTML.isabelle_css)
+
+ val project = Elm.Project("Find_Facts", Path.explode("$FIND_FACTS_HOME/web"), head = List(
+ HTML.style("html,body {width: 100%, height: 100%}"),
+ Web_App.More_HTML.icon("data:image/x-icon;base64," + logo.encode_base64.text),
+ HTML.style_file("isabelle.css"),
+ HTML.style_file("https://fonts.googleapis.com/css?family=Roboto:300,400,500|Material+Icons"),
+ HTML.style_file(
+ "https://unpkg.com/material-components-web-elm@9.1.0/dist/material-components-web-elm.min.css"),
+ HTML.script_file(
+ "https://unpkg.com/material-components-web-elm@9.1.0/dist/material-components-web-elm.min.js")))
+
+ val frontend = project.build_html(progress)
+
+ using(Solr.open_database(database)) { db =>
+ val stats = Find_Facts.query_stats(db, Query(Nil))
+ progress.echo("Started find facts with " + stats.results + " blocks, " +
+ stats.consts + " consts, " + stats.typs + " typs, " + stats.thms + " thms")
+
+ val server =
+ HTTP.server(port, name = "", services = List(
+ HTTP.Fonts_Service,
+ new HTTP.Service("isabelle.css") {
+ def apply(request: HTTP.Request): Option[HTTP.Response] =
+ Some(HTTP.Response(Bytes(isabelle_style), "text/css"))
+ },
+ new HTTP.Service("app") {
+ def apply(request: HTTP.Request): Option[HTTP.Response] =
+ Some(HTTP.Response.html(if (devel) project.build_html(progress) else frontend))
+ },
+ new REST_Service(Path.explode("api/block"), progress) {
+ def handle(body: JSON.T): Option[JSON.T] =
+ for {
+ request <- Parse.query_block(body)
+ block <- query_block(db, request)
+ } yield encode.block(block)
+ },
+ new REST_Service(Path.explode("api/blocks"), progress) {
+ def handle(body: JSON.T): Option[JSON.T] =
+ for (request <- Parse.query_blocks(body))
+ yield encode.blocks(query_blocks(db, request.query, Some(request.cursor)))
+ },
+ new REST_Service(Path.explode("api/query"), progress) {
+ def handle(body: JSON.T): Option[JSON.T] =
+ for (query <- Parse.query(body)) yield {
+ val facet = query_facet(db, query)
+ val blocks = query_blocks(db, query)
+ encode.result(Result(blocks, facet))
+ }
+ }))
+
+ server.start()
+ progress.echo("Server started on port " + server.http_server.getAddress.getPort)
+
+ @tailrec
+ def loop(): Unit = {
+ Thread.sleep(Long.MaxValue)
+ loop()
+ }
+
+ Isabelle_Thread.interrupt_handler(_ => server.stop()) { loop() }
+ }
+ }
+
+
+ /* Isabelle tool wrapper */
+
+ val isabelle_tool2 = Isabelle_Tool("find_facts", "run find_facts server", Scala_Project.here,
+ { args =>
+ var devel = false
+ var options = Options.init()
+ var port = 8080
+ var verbose = false
+
+ val getopts = Getopts("""
+Usage: isabelle find_facts [OPTIONS]
+
+ Options are:
+ -d devel mode
+ -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
+ -p PORT explicit web server port
+ -v verbose server
+
+ Run the find_facts web service.
+""",
+ "d" -> (_ => devel = true),
+ "o:" -> (arg => options = options + arg),
+ "p:" -> (arg => port = Value.Int.parse(arg)),
+ "v" -> (_ => verbose = true))
+
+ val more_args = getopts(args)
+ if (more_args.nonEmpty) getopts.usage()
+
+ val progress = new Console_Progress(verbose = verbose)
+
+ find_facts(options, port, devel = devel, progress = progress)
+ })
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/find_facts_tools.scala Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,8 @@
+package isabelle
+
+class Find_Facts_Tools extends Isabelle_Scala_Tools(
+ Component_Elm.isabelle_tool,
+ Component_Solr.isabelle_tool,
+ Find_Facts.isabelle_tool,
+ Find_Facts.isabelle_tool1,
+ Find_Facts.isabelle_tool2)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/solr.scala Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,453 @@
+/* Author: Fabian Huch, TU Muenchen
+
+Support for full-text search via Solr. See also: https://solr.apache.org/
+*/
+
+package isabelle
+
+
+import scala.jdk.CollectionConverters.*
+
+import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer
+import org.apache.solr.client.solrj.request.json.{JsonQueryRequest, TermsFacetMap, DomainMap}
+import org.apache.solr.client.solrj.response.json.{BucketJsonFacet, NestableJsonFacet}
+import org.apache.solr.client.solrj.response.QueryResponse
+import org.apache.solr.client.solrj.SolrQuery
+import org.apache.solr.common.params.CursorMarkParams
+import org.apache.solr.common.{SolrDocument, SolrInputDocument}
+
+
+object Solr {
+ def init(solr_home: Path): Path = {
+ File.write(Isabelle_System.make_directory(solr_home) + Path.basic("solr.xml"), "<solr/>")
+
+ for (entry <- space_explode(':', Isabelle_System.getenv("SOLR_COMPONENTS")) if entry.nonEmpty)
+ Isabelle_System.symlink(Path.explode(entry).absolute, solr_home, force = true)
+
+ java.util.logging.LogManager.getLogManager.reset()
+ solr_home
+ }
+
+ lazy val solr_home = init(Path.explode("$ISABELLE_HOME_USER/solr"))
+
+
+ /** query language */
+
+ val wildcard_char = Set('*', '?')
+ val special =
+ Set("+", "-", "&&", "||", "!", "(", ")", "{", "}", "[", "]", "^", "\"", "~", "*", "?", ":", "/")
+
+ type Source = String
+
+ val all: Source = "*"
+
+ def enclose(s: Source): Source = "(" + s + ")"
+
+ def escape(s: String, seqs: Set[String]): Source =
+ seqs.foldLeft(s.replace("\\", "\\\\"))((s, seq) => s.replace(seq, "\\" + seq))
+
+ def term(b: Boolean): Source = b.toString
+ def term(i: Int): Source = i.toString
+ def term(s: String): Source =
+ "(\\s+)".r.replaceAllIn(escape(s, special), ws => "\\\\" + ws.matched)
+
+ def range(from: Int, to: Int): Source = "[" + from + " TO " + to + "]"
+ def phrase(s: String): Source = quote(escape(s, special))
+ def wildcard(s: String): Source =
+ if (!s.toList.exists(Symbol.is_ascii_blank)) escape(s, special -- wildcard_char.map(_.toString))
+ else error("Invalid whitespace character in wildcard: " + quote(s))
+
+ def filter(field: Field, x: Source): Source = field.name + ":" + x
+
+ def infix(op: Source, args: Iterable[Source]): Source = {
+ val body = args.iterator.filterNot(_.isBlank).mkString(" " + op + " ")
+ if_proper(body, enclose(body))
+ }
+
+ def AND(args: Iterable[Source]): Source = infix("AND", args)
+ def OR(args: Iterable[Source]): Source = infix("OR", args)
+
+ def and(args: Source*): Source = AND(args)
+ def or(args: Source*): Source = OR(args)
+
+ def exclude(arg: Source): Source = if_proper(arg, "-" + arg)
+
+ val query_all: Source = "*:" + all
+
+ def tag(name: String, arg: Source): Source = "{!tag=" + name + "}" + arg
+
+
+ /** solr schema **/
+
+ object Class {
+ def apply(
+ markup: String,
+ name: String,
+ props: Properties.T = Nil,
+ body: XML.Body = Nil
+ ): XML.Elem = XML.Elem(Markup(markup, ("class" -> ("solr." + name)) :: props), body)
+ }
+
+
+ /* properties */
+
+ val Indexed = new Properties.Boolean("indexed")
+ val Stored = new Properties.Boolean("stored")
+ val Column_Wise = new Properties.Boolean("docValues")
+ val Multi_Valued = new Properties.Boolean("multiValued")
+
+
+ /* types */
+
+ object Type {
+ val bool = Type("bool", "BoolField")
+ val int = Type("int", "IntPointField")
+ val long = Type("long", "LongPointField")
+ val double = Type("double", "DoublePointField")
+ val string = Type("string", "StrField")
+ val bytes = Type("bytes", "BinaryField")
+ val date = Type("date", "DatePointField")
+ }
+
+ case class Type(name: String, cls: String, props: Properties.T = Nil, body: XML.Body = Nil) {
+ def defn: XML.Elem = Class("fieldType", cls, Markup.Name(name) ::: props, body)
+ }
+
+
+ /* fields */
+
+ sealed case class Field(
+ name: String,
+ T: Type,
+ props: Properties.T = Nil,
+ unique_key: Boolean = false
+ ) {
+ def make_unique_key: Field = copy(unique_key = true)
+ def defn: XML.Elem = XML.elem(Markup("field", ("name" -> name) :: ("type" -> T.name) :: props))
+ }
+
+ object Fields {
+ def list(list: List[Field]): Fields = new Fields(list)
+ def apply(args: Field*): Fields = list(args.toList)
+ }
+
+ final class Fields private(val list: List[Field]) extends Iterable[Field] {
+ override def toString: String = list.mkString("Solr.Fields(", ", ", ")")
+ def iterator: Iterator[Field] = list.iterator
+ }
+
+
+ /* data */
+
+ abstract class Data(val name: String) {
+ def fields: Fields
+ def more_config: Map[Path, String] = Map.empty
+
+ def stored_fields: List[Field] =
+ fields.toList.filter(_.props match {
+ case Stored(false) => false
+ case _ => true
+ })
+
+ def unique_key: Field = Library.the_single(fields.filter(_.unique_key).toList)
+
+ def solr_config: XML.Body = List(XML.elem("config", List(
+ Class("schemaFactory", "ClassicIndexSchemaFactory"),
+ XML.elem("luceneMatchVersion", XML.string(Isabelle_System.getenv("SOLR_LUCENE_VERSION"))),
+ Class("updateHandler", "DirectUpdateHandler2", body = List(
+ XML.elem("autoCommit", List(
+ XML.elem("maxDocs", XML.string("-1")),
+ XML.elem("maxTime", XML.string("-1")),
+ XML.elem("maxSize", XML.string("-1")))))),
+ Class("requestHandler", "SearchHandler", Markup.Name("/select")))))
+
+ def schema: XML.Body =
+ List(XML.Elem(Markup("schema",
+ List(
+ "name" -> "isabelle",
+ "version" -> Isabelle_System.getenv("SOLR_SCHEMA_VERSION"))),
+ List(
+ XML.elem("uniqueKey", XML.string(unique_key.name)),
+ XML.elem("fields", fields.toList.map(_.defn)),
+ XML.elem("types", fields.map(_.T).toList.distinct.map(_.defn)))))
+ }
+
+
+ /** solr operations */
+
+ /* documents */
+
+ object Document {
+ def empty: Document = new Document(new SolrInputDocument())
+ }
+
+ class Document private[Solr](val rep: SolrInputDocument) {
+ private def obj[A](a: A): Object = a.asInstanceOf[Object]
+ private def set[A](field: Field, x: A, f: A => Object = obj): Unit =
+ rep.addField(field.name, f(x))
+ private def set_option[A](field: Field, x: Option[A], f: A => Object = obj): Unit =
+ rep.addField(field.name, x.map(f).orNull)
+ private def set_list[A](field: Field, x: List[A], f: A => Object = obj): Unit =
+ rep.addField(field.name, x.map(f).toArray)
+
+ object bool {
+ def update(field: Field, x: Boolean): Unit = set(field, x)
+ def update(field: Field, x: Option[Boolean]): Unit = set_option(field, x)
+ def update(field: Field, x: List[Boolean]): Unit = set_list(field, x)
+ }
+ object int {
+ def update(field: Field, x: Int): Unit = set(field, x)
+ def update(field: Field, x: Option[Int]): Unit = set_option(field, x)
+ def update(field: Field, x: List[Int]): Unit = set_list(field, x)
+ }
+ object long {
+ def update(field: Field, x: Long): Unit = set(field, x)
+ def update(field: Field, x: Option[Long]): Unit = set_option(field, x)
+ def update(field: Field, x: List[Long]): Unit = set_list(field, x)
+ }
+ object double {
+ def update(field: Field, x: Double): Unit = set(field, x)
+ def update(field: Field, x: Option[Double]): Unit = set_option(field, x)
+ def update(field: Field, x: List[Double]): Unit = set_list(field, x)
+ }
+ object string {
+ def update(field: Field, x: String): Unit = set(field, x)
+ def update(field: Field, x: Option[String]): Unit = set_option(field, x)
+ def update(field: Field, x: List[String]): Unit = set_list(field, x)
+ }
+ object bytes {
+ private def value(bytes: Bytes): Array[Byte] =
+ if (bytes.size > Int.MaxValue) throw new IllegalArgumentException else bytes.make_array
+ def update(field: Field, x: Bytes): Unit = set(field, x, value)
+ def update(field: Field, x: Option[Bytes]): Unit = set_option(field, x, value)
+ def update(field: Field, x: List[Bytes]): Unit = set_list(field, x, value)
+ }
+ object date {
+ private def value(date: Date): java.util.Date = java.util.Date.from(date.rep.toInstant)
+ def update(field: Field, x: Date): Unit = set(field, x, value)
+ def update(field: Field, x: Option[Date]): Unit = set_option(field, x, value)
+ def update(field: Field, x: List[Date]): Unit = set_list(field, x, value)
+ }
+ }
+
+
+ /* results */
+
+ class Result private[Solr](rep: SolrDocument) {
+ private def single[A](field: Field, f: Object => A): A = {
+ val obj = rep.getFieldValue(field.name)
+ if (obj == null) error("No such field: " + field.name) else f(obj)
+ }
+ private def get[A](field: Field, f: Object => A): Option[A] = {
+ val obj = rep.getFieldValue(field.name)
+ if (obj == null) None else Some(f(obj))
+ }
+ private def list[A](field: Field, f: Object => A): List[A] = {
+ val objs = rep.getFieldValues(field.name)
+ if (objs == null) Nil else objs.iterator().asScala.toList.map(f)
+ }
+
+ def bool_value(obj: Object): Boolean = obj.asInstanceOf[Boolean]
+ def int_value(obj: Object): Int = obj.asInstanceOf[Int]
+ def long_value(obj: Object): Long = obj.asInstanceOf[Long]
+ def double_value(obj: Object): Double = obj.asInstanceOf[Double]
+ def string_value(obj: Object): String = obj.asInstanceOf[String]
+ def bytes_value(obj: Object): Bytes = Bytes(obj.asInstanceOf[Array[Byte]])
+ def date_value(obj: Object): Date = Date.instant(obj.asInstanceOf[java.util.Date].toInstant)
+
+ def bool(field: Field): Boolean = single(field, bool_value)
+ def int(field: Field): Int = single(field, int_value)
+ def long(field: Field): Long = single(field, long_value)
+ def double(field: Field): Double = single(field, double_value)
+ def string(field: Field): String = single(field, string_value)
+ def bytes(field: Field): Bytes = single(field, bytes_value)
+ def date(field: Field): Date = single(field, date_value)
+
+ def get_bool(field: Field): Option[Boolean] = get(field, bool_value)
+ def get_int(field: Field): Option[Int] = get(field, int_value)
+ def get_long(field: Field): Option[Long] = get(field, long_value)
+ def get_double(field: Field): Option[Double] = get(field, double_value)
+ def get_string(field: Field): Option[String] = get(field, string_value)
+ def get_bytes(field: Field): Option[Bytes] = get(field, bytes_value)
+ def get_date(field: Field): Option[Date] = get(field, date_value)
+
+ def list_bool(field: Field): List[Boolean] = list(field, bool_value)
+ def list_int(field: Field): List[Int] = list(field, int_value)
+ def list_long(field: Field): List[Long] = list(field, long_value)
+ def list_double(field: Field): List[Double] = list(field, double_value)
+ def list_string(field: Field): List[String] = list(field, string_value)
+ def list_bytes(field: Field): List[Bytes] = list(field, bytes_value)
+ def list_date(field: Field): List[Date] = list(field, date_value)
+ }
+
+ class Results private[Solr](
+ solr: EmbeddedSolrServer,
+ query: SolrQuery,
+ private var cursor: String,
+ private var more_chunks: Int = -1
+ ) extends Iterator[Result] {
+ private def response: QueryResponse =
+ solr.query(query.set(CursorMarkParams.CURSOR_MARK_PARAM, cursor))
+
+ private var _response = response
+ private var _iterator = _response.getResults.iterator
+
+ def num_found: Long = _response.getResults.getNumFound
+ def next_cursor: String = _response.getNextCursorMark
+
+ def hasNext: Boolean = _iterator.hasNext
+ def next(): Result = {
+ val res = new Result(_iterator.next())
+
+ if (!_iterator.hasNext && next_cursor != cursor && more_chunks != 0) {
+ cursor = next_cursor
+ more_chunks = more_chunks - 1
+ _response = response
+ _iterator = _response.getResults.iterator
+ }
+
+ res
+ }
+ }
+
+
+ /* facet results */
+
+ class Facet_Result private[Solr](rep: NestableJsonFacet) {
+ def count: Long = rep.getCount
+
+ private def get_bucket[A](bucket: BucketJsonFacet): (A, Long) =
+ bucket.getVal.asInstanceOf[A] -> bucket.getCount
+ private def get_facet[A](field: Field): Map[A, Long] =
+ rep.getBucketBasedFacets(field.name).getBuckets.asScala.map(get_bucket).toMap
+
+ def bool(field: Field): Map[Boolean, Long] = get_facet(field)
+ def int(field: Field): Map[Int, Long] = get_facet(field)
+ def long(field: Field): Map[Long, Long] = get_facet(field)
+ def string(field: Field): Map[String, Long] = get_facet(field)
+ }
+
+
+ /* stat results */
+
+ private def count_field(field: Field): String = field.name + "/count"
+
+ class Stat_Result private[Solr](rep: NestableJsonFacet) {
+ def count: Long = rep.getCount
+ def count(field: Field): Long = rep.getStatValue(count_field(field)).asInstanceOf[Long]
+ }
+
+
+ /* database */
+
+ def database_dir(database: String): Path = solr_home + Path.basic(database)
+
+ def init_database(database: String, data: Data, clean: Boolean = false): Database = {
+ val db_dir = database_dir(database)
+
+ if (clean) Isabelle_System.rm_tree(db_dir)
+
+ val conf_dir = db_dir + Path.basic("conf")
+ if (!conf_dir.is_dir) {
+ Isabelle_System.make_directory(conf_dir)
+ File.write(conf_dir + Path.basic("schema.xml"), XML.string_of_body(data.schema))
+ File.write(conf_dir + Path.basic("solrconfig.xml"), XML.string_of_body(data.solr_config))
+ data.more_config.foreach((path, content) => File.write(conf_dir + path, content))
+ }
+
+ open_database(database)
+ }
+
+ def open_database(database: String): Database = {
+ val server = new EmbeddedSolrServer(solr_home.java_path, database)
+
+ val cores = server.getCoreContainer.getAllCoreNames.asScala
+ if (cores.contains(database)) server.getCoreContainer.reload(database)
+ else server.getCoreContainer.create(database, Map.empty.asJava)
+
+ new Database(server)
+ }
+
+ class Database private[Solr](solr: EmbeddedSolrServer) extends AutoCloseable {
+ override def close(): Unit = solr.close()
+
+ def execute_query[A](
+ id: Field,
+ fields: List[Field],
+ cursor: Option[String],
+ chunk_size: Int,
+ make_result: Results => A,
+ q: Source = query_all,
+ fq: List[Source] = Nil,
+ more_chunks: Int = -1
+ ): A = {
+ val query = new SolrQuery(proper_string(q).getOrElse(query_all))
+ .setFields(fields.map(_.name): _*)
+ .setFilterQueries(fq.filterNot(_.isBlank): _*)
+ .setRows(chunk_size)
+ .addSort("score", SolrQuery.ORDER.desc)
+ .addSort(id.name, SolrQuery.ORDER.asc)
+
+ val cursor1 = cursor.getOrElse(CursorMarkParams.CURSOR_MARK_START)
+ make_result(new Results(solr, query, cursor1, more_chunks))
+ }
+
+ private val in_transaction = Synchronized(false)
+ def transaction[A](body: => A): A = synchronized {
+ in_transaction.change(b => { require(!b, "transaction already active"); true })
+ try {
+ val result = body
+ solr.commit()
+ result
+ }
+ catch { case exn: Throwable => solr.rollback(); throw exn }
+ finally { in_transaction.change(_ => false) }
+ }
+
+ def execute_facet_query[A](
+ fields: List[Field],
+ make_result: Facet_Result => A,
+ q: Source = query_all,
+ fq: List[Source] = Nil,
+ max_terms: Int = -1
+ ): A = {
+ val query = new JsonQueryRequest().setQuery(proper_string(q).getOrElse(query_all)).setLimit(0)
+ fq.filterNot(_.isBlank).foreach(query.withFilter)
+
+ for (field <- fields) {
+ val facet =
+ new TermsFacetMap(field.name).setLimit(max_terms).withDomain(
+ new DomainMap().withTagsToExclude(field.name))
+ query.withFacet(field.name, facet)
+ }
+
+ make_result(new Facet_Result(query.process(solr).getJsonFacetingResponse))
+ }
+
+ def execute_stats_query[A](
+ fields: List[Field],
+ make_result: Stat_Result => A,
+ q: Source = query_all,
+ fq: List[Source] = Nil
+ ): A = {
+ val query = new JsonQueryRequest().setQuery(proper_string(q).getOrElse(query_all)).setLimit(0)
+ fq.filterNot(_.isBlank).foreach(query.withFilter)
+
+ for (field <- fields) query.withStatFacet(count_field(field), "unique(" + field.name + ")")
+
+ make_result(new Stat_Result(query.process(solr).getJsonFacetingResponse))
+ }
+
+ def execute_batch_insert(batch: IterableOnce[Document => Unit]): Unit = {
+ val it =
+ batch.iterator.map { fill =>
+ val doc = Document.empty
+ fill(doc)
+ doc.rep
+ }
+ solr.add(it.asJava)
+ }
+
+ def execute_batch_delete(ids: List[String]): Unit = solr.deleteById(ids.asJava)
+ }
+}
\ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/thy_blocks.scala Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,141 @@
+/* Author: Fabian Huch, TU Muenchen
+
+Block structure for Isabelle theories, read from build database.
+*/
+
+package isabelle
+
+
+import scala.annotation.tailrec
+
+
+object Thy_Blocks {
+ /** spans **/
+
+ val keyword_elements =
+ Markup.Elements(Markup.KEYWORD, Markup.KEYWORD1, Markup.KEYWORD2, Markup.KEYWORD3)
+
+ object Span {
+ def read_build(snapshot: Document.Snapshot): List[Span] = {
+ def is_begin(markup: Text.Markup): Boolean = markup.info match {
+ case XML.Elem(Markup(_, Markup.Kind(Markup.KEYWORD)), Nil) =>
+ XML.content(snapshot.xml_markup(markup.range)) == "begin"
+ case _ => false
+ }
+
+ def has_begin(range: Text.Range): Boolean =
+ snapshot
+ .select(range, keyword_elements, _ => markup => Some(is_begin(markup)))
+ .exists(_.info)
+
+ snapshot.select(Text.Range.full, Markup.Elements(Markup.COMMAND_SPAN), _ =>
+ {
+ case Text.Info(range, XML.Elem(Markup.Command_Span(arg), _)) =>
+ Some(Span(range, arg.name, arg.kind, has_begin(range)))
+ case _ => None
+ }).map(_.info)
+ }
+ }
+
+ case class Span(
+ override val range: Text.Range,
+ override val command: String,
+ kind: String,
+ has_begin: Boolean
+ ) extends Block {
+ def spans: List[Span] = List(this)
+
+ def is_of_kind(kinds: Set[String]): Boolean = kinds.contains(kind)
+ }
+
+
+ /** block structure **/
+
+ sealed trait Block {
+ def spans: List[Span]
+
+ def command: String = spans.head.command
+ def range: Text.Range = Text.Range(spans.head.range.start, spans.last.range.stop)
+ }
+
+ case class Single(span: Span) extends Block { def spans = List(span) }
+ case class Thy(inner: List[Block]) extends Block { def spans = inner.flatMap(_.spans) }
+ case class Prf(inner: List[Block]) extends Block { def spans = inner.flatMap(_.spans) }
+ case class Decl(inner: List[Block]) extends Block { def spans = inner.flatMap(_.spans) }
+
+
+ /** parser **/
+
+ object Parser {
+ object Blocks {
+ def empty: Blocks = new Blocks(Nil, Nil)
+ }
+
+ case class Blocks(private val stack: List[Block], private val out: List[Block]) {
+ def peek: Option[Block] = stack.headOption
+ def push(block: Block): Blocks = copy(stack = block :: stack)
+ def add(block: Block): Blocks =
+ stack match {
+ case Nil => copy(out = block :: out)
+ case head :: rest =>
+ val head1 =
+ head match {
+ case Thy(inner) => Thy(inner :+ block)
+ case Prf(inner) => Prf(inner :+ block)
+ case Decl(inner) => Decl(inner :+ block)
+ case _ => error("Cannot add to " + head)
+ }
+ copy(stack = head1 :: rest)
+ }
+
+ def pop: Blocks =
+ stack match {
+ case Nil => error("Nothing to pop")
+ case head :: rest => copy(stack = rest).add(head)
+ }
+
+ def pop_prfs: Blocks = {
+ val blocks1 = pop
+ blocks1.stack match {
+ case Prf(_) :: _ => blocks1.pop_prfs
+ case _ => blocks1
+ }
+ }
+
+ def output: List[Block] = if (stack.nonEmpty) error("Nonempty stack") else out.reverse
+ }
+
+ def parse(spans: List[Span]): List[Block] = {
+ import Keyword.*
+
+ def parse1(blocks: Blocks, span: Span): Blocks =
+ blocks.peek match {
+ case _ if span.is_of_kind(document) => blocks.add(span)
+ case None if span.is_of_kind(theory_begin) => blocks.push(Thy(List(span)))
+ case Some(_) if span.is_of_kind(diag) => blocks.add(span)
+ case Some(Thy(_)) if span.is_of_kind(theory_goal) => blocks.push(Prf(List(span)))
+ case Some(Thy(_)) if span.is_of_kind(theory_block) =>
+ (if (span.has_begin) blocks.push else blocks.add)(Decl(List(span)))
+ case Some(Thy(_)) if span.is_of_kind(theory_end) => blocks.add(span).pop
+ case Some(Thy(_)) if span.is_of_kind(theory_body) => blocks.add(span)
+ case Some(Prf(_)) if span.is_of_kind(proof_open) => blocks.push(Prf(List(span)))
+ case Some(Prf(_)) if span.is_of_kind(proof_close) => blocks.add(span).pop
+ case Some(Prf(_)) if span.is_of_kind(qed_global) => blocks.add(span).pop_prfs
+ case Some(Prf(_)) if span.is_of_kind(proof_body) => blocks.add(span)
+ case Some(Decl(_)) if span.is_of_kind(proof_open) => blocks.push(Prf(List(span)))
+ case Some(Decl(_)) if span.is_of_kind(proof_body) => blocks.add(span)
+ case Some(Decl(_)) if span.is_of_kind(theory_goal) => blocks.push(Prf(List(span)))
+ case Some(Decl(_)) if span.is_of_kind(theory_block) =>
+ (if (span.has_begin) blocks.push else blocks.add)(Decl(List(span)))
+ case Some(Decl(_)) if span.is_of_kind(theory_end) => blocks.add(span).pop
+ case Some(Decl(_)) if span.is_of_kind(theory_body) => blocks.add(span)
+ case e => error("Unexpected span " + span + " at " + e)
+ }
+
+ spans.foldLeft(Blocks.empty)(parse1).output
+ }
+ }
+
+ def read_blocks(snapshot: Document.Snapshot): List[Block] =
+ Parser.parse(Span.read_build(snapshot))
+}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/web/elm.json Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,43 @@
+{
+ "type": "application",
+ "source-directories": [
+ "src"
+ ],
+ "elm-version": "0.19.1",
+ "dependencies": {
+ "direct": {
+ "Dacit/material-components-web-elm": "1.0.0",
+ "elm/browser": "1.0.2",
+ "elm/core": "1.0.5",
+ "elm/html": "1.0.0",
+ "elm/http": "2.0.0",
+ "elm/json": "1.1.3",
+ "elm/time": "1.0.0",
+ "elm/url": "1.0.0",
+ "elm-community/array-extra": "2.6.0",
+ "elm-community/basics-extra": "4.1.0",
+ "elm-community/dict-extra": "2.4.0",
+ "elm-community/html-extra": "3.4.0",
+ "elm-community/json-extra": "4.3.0",
+ "elm-community/list-extra": "8.7.0",
+ "elm-community/maybe-extra": "5.3.0",
+ "elm-community/result-extra": "2.4.0",
+ "elm-community/string-extra": "4.0.1",
+ "hecrj/html-parser": "2.4.0"
+ },
+ "indirect": {
+ "elm/bytes": "1.0.8",
+ "elm/file": "1.0.5",
+ "elm/parser": "1.1.0",
+ "elm/regex": "1.0.0",
+ "elm/svg": "1.0.1",
+ "elm/virtual-dom": "1.0.3",
+ "rtfeldman/elm-hex": "1.0.0",
+ "rtfeldman/elm-iso8601-date-strings": "1.1.4"
+ }
+ },
+ "test-dependencies": {
+ "direct": {},
+ "indirect": {}
+ }
+}
Binary file src/Tools/Find_Facts/web/favicon.ico has changed
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/web/src/About.elm Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,36 @@
+{- Author: Fabian Huch, TU München
+
+Find Facts about page.
+-}
+module About exposing (view)
+
+
+import Html exposing (..)
+import Html.Attributes exposing (href)
+import Material.Typography as Typography
+
+
+{- about -}
+
+view_def name def = [dt [] [text name], dd [] [text def]]
+defs =
+ view_def "Terms" "are words and Isabelle symbols separated by space, with wildcards and phrases." ++
+ view_def "Isabelle symbols" "can be entered as ascii, abbreviation (if unique) or UTF-8 symbol." ++
+ view_def "Wildcards" "consist of ?/* inside a term and match a single/arbitrary many characters." ++
+ view_def "Phrases" "are delimited by \" quotes and match a term sequence (no wildcards) exactly." ++
+ view_def "Main search bar" "will look for any of your terms in source code and names." ++
+ view_def "Filters" "allow you to exclude/include (click on the checkmark) specific properties." ++
+ view_def "Drill-down" "allows you to multi-select once your result set is small enough."
+
+view: Html Never
+view =
+ div [] [
+ h2 [Typography.headline4] [text "About"],
+ p [Typography.body1] [
+ text "This is a search engine for ",
+ a [href "https://isabelle.in.tum.de"] [text "Isabelle"],
+ text "."],
+ p [Typography.body1] [
+ text "Enter terms in the main search bar, add/remove filters, or drill down."],
+ p [Typography.body1] [text "You can also click on results to find out more about them!"],
+ p [Typography.body1] [dl [] defs]]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/web/src/Delay.elm Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,49 @@
+{- Author: Fabian Huch, TU München
+
+Simple delayed events.
+-}
+module Delay exposing (Model, empty, Delay, later, now, cancel, update)
+
+
+import Dict exposing (Dict)
+import Process
+import Task
+
+
+{- model to count invocations -}
+
+type Model = Model (Dict String Int)
+
+empty: Model
+empty = Model Dict.empty
+
+
+{- invoking delays -}
+
+type alias Delay msg = {name: String, delay: Float, event: Cmd msg}
+
+later: (Delay msg -> msg) -> Model -> Delay msg -> (Model, Cmd msg)
+later msg (Model model) delay =
+ let
+ num = model |> Dict.get delay.name |> Maybe.withDefault 0
+ model1 = model |> Dict.insert delay.name (num + 1)
+ cmd = Task.perform (always (msg delay)) (Process.sleep delay.delay)
+ in (Model model1, cmd)
+
+now: Model -> Delay msg -> (Model, Cmd msg)
+now (Model model) delay = (model |> Dict.remove delay.name |> Model, delay.event)
+
+cancel: Model -> Delay msg -> Model
+cancel (Model model) delay = model |> Dict.remove delay.name |> Model
+
+
+{- update -}
+
+update: Model -> Delay msg -> (Model, Cmd msg)
+update (Model model) delay =
+ case Dict.get delay.name model of
+ Nothing -> (Model model, Cmd.none)
+ Just num ->
+ if num > 1
+ then (model |> Dict.insert delay.name (num - 1) |> Model, Cmd.none)
+ else (model |> Dict.remove delay.name |> Model, delay.event)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/web/src/Details.elm Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,58 @@
+{- Author: Fabian Huch, TU München
+
+Find Facts details view.
+-}
+module Details exposing (Model, init, get_id, set_loaded, view)
+
+
+import Html exposing (..)
+import Html.Attributes exposing (href, style)
+import Html.Extra as Html exposing (viewIf)
+import Http
+import Library exposing (..)
+import Material.Theme as Theme
+import Material.Typography as Typography
+import Query exposing (Block)
+import Utils exposing (Query_Param)
+
+
+{- model -}
+
+type State = Loading | Error String | Loaded Block
+type Model = Model {id: String, state: State}
+
+init id = Model {id = id, state = Loading}
+
+get_id: Model -> String
+get_id (Model model) = model.id
+
+
+{- updates -}
+
+set_loaded: Result Http.Error Block -> Model -> Model
+set_loaded res (Model model) =
+ case res of
+ Result.Ok block -> Model {model | state = Loaded block}
+ Result.Err err -> Model {model | state = Error (get_msg err)}
+
+
+{- view -}
+
+view: Model -> Html Never
+view (Model model) =
+ case model.state of
+ Loading -> text "Loading ..."
+ Error msg -> text msg
+ Loaded block ->
+ let
+ theory = block.chapter ++ "/" ++ block.theory
+ start_before =
+ block.start_line - count_lines (block.src_before ++ block.html) + count_lines block.html
+ around s = "<span style=\"color: gray\">" ++ (Utils.escape_html s) ++ "</span>"
+ code = around block.src_before ++ block.html ++ around block.src_after
+ url = block.url ++ maybe_proper block.entity_kname ((++) "#")
+ in div [] [
+ h2 [Typography.headline4] [text "Details"],
+ h3 [Typography.headline6]
+ [text "In ", a [href url] [text theory], text (" (" ++ block.file ++ ")")],
+ Utils.view_code code start_before]
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/web/src/Library.elm Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,53 @@
+{- Author: Fabian Huch, TU München
+
+Basic library functions.
+-}
+module Library exposing (quote, try_unquote, perhaps_unquote, if_proper, maybe_proper, split_lines,
+ count_lines, list_if, get_msg)
+
+
+import Http
+
+
+{- strings -}
+
+quote: String -> String
+quote s = "\"" ++ s ++ "\""
+
+try_unquote: String -> Maybe String
+try_unquote s =
+ if String.startsWith "\"" s && String.endsWith "\"" s then Just (String.slice 1 -1 s) else Nothing
+
+perhaps_unquote: String -> String
+perhaps_unquote s = try_unquote s |> Maybe.withDefault s
+
+if_proper: Bool -> String -> String
+if_proper cond s = if cond then s else ""
+
+maybe_proper: Maybe a -> (a -> String) -> String
+maybe_proper a f = a |> Maybe.map f |> Maybe.withDefault ""
+
+split_lines: String -> List String
+split_lines s = s |> String.replace "\r" "" |> String.split "\n"
+
+count_lines: String -> Int
+count_lines s = s |> split_lines |> List.length
+
+
+{- lists -}
+
+list_if: Bool -> a -> List a
+list_if cond x = if cond then [x] else []
+
+
+{- errors -}
+
+get_msg: Http.Error -> String
+get_msg error =
+ let print_error = (++) "*** "
+ in case error of
+ Http.BadUrl s -> print_error "Internal Error: Malformed Url " ++ quote s
+ Http.Timeout -> print_error "Timeout"
+ Http.NetworkError -> print_error "Network Error"
+ Http.BadStatus i -> print_error ("Status Code " ++ String.fromInt i)
+ Http.BadBody s -> print_error "Internal Error: Bad Body" ++ quote s
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/web/src/Main.elm Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,305 @@
+{- Author: Fabian Huch, TU München
+
+Find Facts application.
+-}
+module Main exposing (init, main, subscriptions, update, view)
+
+
+import About
+import Delay exposing (Delay)
+import Details
+import Html.Attributes exposing (href, style)
+import Html.Events as Events
+import Html.Lazy as Lazy
+import Http
+import Browser
+import Browser.Navigation as Navigation
+import Html exposing (..)
+import Json.Decode as Decode
+import Material.Theme as Theme
+import Material.TopAppBar as TopAppBar
+import Material.Typography as Typography
+import Query exposing (Query, Query_Blocks)
+import Results
+import Searcher
+import Url exposing (Url)
+import Url.Builder
+import Utils
+
+
+{- main -}
+
+main: Program () Model Msg
+main =
+ Browser.application {init = init, view = view, update = update, subscriptions = subscriptions,
+ onUrlChange = Url_Changed, onUrlRequest = Link_Clicked}
+
+
+{- model -}
+
+type Page = Not_Found | About | Detail Details.Model | Search Searcher.Model Query Results.Model
+
+home: Page
+home = Search Searcher.empty Query.empty Results.empty
+
+type alias Model = {nav_key: Navigation.Key, url: Url, page: Page, delay: Delay.Model}
+
+should_query : Maybe Query -> Query -> Bool
+should_query query query1 = query /= Just query1 && not (Query.empty_query query1)
+
+populate: Page -> Searcher.Model -> Page
+populate page0 searcher =
+ case page0 of
+ Search searcher0 query0 results0 ->
+ let
+ query = Searcher.get_query searcher
+ results =
+ if should_query (Just query0) query then Results.loading
+ else if Query.empty_query query then Results.empty
+ else results0
+ in Search (Searcher.populate searcher0 searcher) query results
+ _ ->
+ let
+ query = Searcher.get_query searcher
+ results0 = Results.empty
+ results = if should_query Nothing query then Results.loading else results0
+ in Search searcher query results
+
+init: () -> Url -> Navigation.Key -> (Model, Cmd Msg)
+init _ url key =
+ case url.fragment of
+ Nothing -> (Model key url home Delay.empty, home |> url_encode url |> push_url False key)
+ Just fragment ->
+ let
+ page = fragment_decode (populate Not_Found) fragment
+ cmd =
+ case page of
+ Search _ query _ ->
+ if should_query Nothing query then get_result query else Cmd.none
+ Detail details -> get_block (Details.get_id details)
+ _ -> Cmd.none
+ in (Model key url page Delay.empty, cmd)
+
+
+{- url encoding/decoding -}
+
+aboutN = "about"
+searchN = "search"
+
+url_encode: Url -> Page -> Url
+url_encode url page =
+ case page of
+ Not_Found -> {url | fragment = Nothing}
+ About -> {url | fragment = Just aboutN}
+ Detail details ->
+ {url | fragment = Just (Details.get_id details)}
+ Search searcher _ _ ->
+ let params = Searcher.params searcher
+ in {url | fragment = Just (searchN ++ (Url.Builder.toQuery params))}
+
+fragment_decode : (Searcher.Model -> Page) -> String -> Page
+fragment_decode make_page fragment =
+ case String.split "?" fragment of
+ [] -> Not_Found
+ page :: qs ->
+ let
+ parse parser f =
+ Utils.parse_query (String.join "?" qs) parser
+ |> Maybe.map f
+ |> Maybe.withDefault Not_Found
+ in
+ if page == aboutN && List.isEmpty qs then About
+ else if page == searchN then parse Searcher.parser make_page
+ else if List.isEmpty qs then Detail (Details.init page)
+ else Not_Found
+
+url_decode: Url -> Page -> Page
+url_decode url page0 =
+ case url.fragment of
+ Nothing -> home
+ Just fragment -> fragment_decode (populate page0) fragment
+
+
+{- commands -}
+
+push_url: Bool -> Navigation.Key -> Url -> Cmd msg
+push_url save key url =
+ (if save then Navigation.pushUrl else Navigation.replaceUrl) key (Url.toString url)
+
+get_result: Query -> Cmd Msg
+get_result query =
+ Http.post {url="/api/query", expect = Http.expectJson (Query_Result query) Query.decode_result,
+ body = query |> Query.encode_query |> Http.jsonBody}
+
+query_delay: Query -> Delay Msg
+query_delay query = {name = "query", delay = 500, event = get_result query}
+
+get_blocks: Query -> String -> Cmd Msg
+get_blocks query cursor =
+ Http.post {url = "/api/blocks", expect = Http.expectJson (Query_Blocks query) Query.decode_blocks,
+ body = {query = query, cursor = cursor} |> Query.encode_query_blocks |> Http.jsonBody}
+
+get_block: String -> Cmd Msg
+get_block id =
+ Http.post {url = "/api/block", expect = Http.expectJson (Query_Block id) Query.decode_block,
+ body = id |> Query.encode_query_block |> Http.jsonBody}
+
+
+{- update -}
+
+type alias Scroll_Info = {pos: Float, top: Float, height: Float}
+type Msg =
+ Link_Clicked Browser.UrlRequest |
+ Url_Changed Url |
+ Searcher_Msg Searcher.Msg |
+ Delay_Msg (Delay Msg) |
+ Results_Msg (Results.Msg) |
+ Query_Result Query (Result Http.Error Query.Result) |
+ Query_Blocks Query (Result Http.Error Query.Blocks) |
+ Query_Block String (Result Http.Error Query.Block) |
+ Scroll_Event Scroll_Info
+
+update : Msg -> Model -> (Model, Cmd Msg)
+update msg model =
+ case msg of
+ Link_Clicked urlRequest ->
+ case urlRequest of
+ Browser.Internal url -> (model, push_url True model.nav_key url)
+ Browser.External href -> (model, Navigation.load href)
+
+ Url_Changed url ->
+ let
+ query0 =
+ case model.page of
+ Search _ query _ -> Just query
+ _ -> Nothing
+ page = url_decode url model.page
+ (delay, cmd) =
+ case page of
+ Search _ query _ ->
+ if should_query query0 query then Delay.now model.delay (query_delay query)
+ else (model.delay, Cmd.none)
+ Detail details -> (model.delay, get_block (Details.get_id details))
+ _ -> (model.delay, Cmd.none)
+ in ({model | url = url, delay = delay, page = page}, cmd)
+
+ Delay_Msg msg1 ->
+ let (delay, cmd) = Delay.update model.delay msg1
+ in ({model | delay = delay}, cmd)
+
+ Searcher_Msg msg1 ->
+ case model.page of
+ Search searcher _ results ->
+ let
+ (searcher1, update_search) = Searcher.update msg1 searcher
+ ((delay, search_cmd), query1, results1) =
+ case update_search of
+ Searcher.None query -> ((model.delay, Cmd.none), query, results)
+ Searcher.Empty query ->
+ ((Delay.cancel model.delay (query_delay query), Cmd.none), query, Results.empty)
+ Searcher.Later query ->
+ (Delay.later Delay_Msg model.delay (query_delay query), query, Results.loading)
+ Searcher.Now query ->
+ (Delay.now model.delay (query_delay query), query, Results.loading)
+ page = Search searcher1 query1 results1
+ nav_cmd = url_encode model.url page |> push_url False model.nav_key
+ in
+ ({model | page = page, delay = delay}, Cmd.batch [nav_cmd, search_cmd])
+ _ -> (model, Cmd.none)
+
+ Results_Msg (Results.Selected id) ->
+ (model, url_encode model.url (id |> Details.init |> Detail) |> push_url True model.nav_key)
+
+ Query_Result query res ->
+ case model.page of
+ Search searcher query1 _ ->
+ case res of
+ Result.Ok result ->
+ if query /= query1 then (model, Cmd.none)
+ else
+ let
+ searcher1 = Searcher.set_result result searcher
+ results = Results.result result
+ in ({model | page = Search searcher1 query1 results}, Cmd.none)
+ Result.Err err ->
+ ({model | page = Search searcher query (Results.error err)}, Cmd.none)
+ _ -> (model, Cmd.none)
+
+ Query_Blocks query res ->
+ case model.page of
+ Search searcher query1 results ->
+ if query /= query1 then (model, Cmd.none)
+ else ({model | page = Search searcher query (Results.set_loaded res results)}, Cmd.none)
+ _ -> (model, Cmd.none)
+
+ Query_Block id res ->
+ case model.page of
+ Detail details ->
+ if id /= (Details.get_id details) then (model, Cmd.none)
+ else ({model | page = Detail (Details.set_loaded res details)}, Cmd.none)
+ _ -> (model, Cmd.none)
+
+ Scroll_Event scroll ->
+ if (scroll.pos - scroll.top) > scroll.height then (model, Cmd.none)
+ else
+ case model.page of
+ Search searcher query results ->
+ case Results.get_maybe_cursor results of
+ Nothing -> (model, Cmd.none)
+ Just cursor ->
+ let results1 = Results.set_load_more results
+ in ({model | page = Search searcher query results1}, get_blocks query cursor)
+ _ -> (model, Cmd.none)
+
+
+
+{- subscriptions -}
+
+subscriptions: Model -> Sub Msg
+subscriptions model =
+ case model.page of
+ Search searcher _ _ -> Searcher.subscriptions searcher |> Sub.map Searcher_Msg
+ _ -> Sub.none
+
+
+{- view -}
+
+decode_scroll : Decode.Decoder Msg
+decode_scroll =
+ Decode.map Scroll_Event (
+ Decode.map3 Scroll_Info
+ (Decode.at ["target", "scrollHeight"] Decode.float)
+ (Decode.at ["target", "scrollTop"] Decode.float)
+ (Decode.at ["target", "offsetHeight"] Decode.float))
+
+view: Model -> Browser.Document Msg
+view model =
+ let
+ view_navlink attrs page name =
+ a (attrs ++ [href ("#" ++ page), style "margin" "0 16px", Theme.onPrimary])
+ [text name]
+ (title, content) = case model.page of
+ Not_Found -> ("404 Not Found", [h2 [Typography.headline4] [text "404 Not Found"]])
+ About -> ("Find Facts | About", [About.view |> Html.map never])
+ Detail details -> ("Find Facts | Details", [Details.view details |> Html.map never])
+ Search searcher _ results -> ("Find Facts | Search", [
+ searcher |> Lazy.lazy Searcher.view |> Html.map Searcher_Msg,
+ results |> Lazy.lazy Results.view |> Html.map Results_Msg])
+ in {
+ title = title,
+ body = [
+ div [style "height" "100vh", Events.on "scroll" decode_scroll, style "overflow" "scroll"] [
+ TopAppBar.regular
+ (TopAppBar.config
+ |> TopAppBar.setDense True
+ |> TopAppBar.setAttributes [style "position" "relative"])
+ [TopAppBar.row [style "max-width" "1200px", style "margin" "0 auto"]
+ [TopAppBar.section [TopAppBar.alignStart]
+ [view_navlink [TopAppBar.title] searchN "Find Facts"],
+ TopAppBar.section [TopAppBar.alignEnd]
+ [view_navlink [Typography.button] searchN "Search",
+ view_navlink [Typography.button] aboutN "About"]]],
+ div
+ [style "padding-left" "16px", style "padding-right" "16px", style "max-width" "1200px",
+ style "margin" "0 auto"]
+ content]]}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/web/src/Parser.elm Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,70 @@
+{- Author: Fabian Huch, TU Muenchen
+
+Generic parser combinators.
+-}
+module Parser exposing (Parser, succeed, fail, elem, rep, map, map2, map3, flat_map, or, parse)
+
+
+type Parser a b = Parser (List a -> Maybe (List a, b))
+
+succeed: b -> Parser a b
+succeed y = Parser (\xs -> Just (xs, y))
+
+fail: Parser a b
+fail = Parser (always Nothing)
+
+elem: (a -> Bool) -> Parser a a
+elem cond =
+ Parser (\xs ->
+ case xs of
+ [] -> Nothing
+ x :: xs1 -> (if cond x then Just (xs1, x) else Nothing))
+
+rep: Parser a b -> Parser a (List b)
+rep (Parser parser) =
+ let
+ rep_rec xs =
+ case parser xs of
+ Nothing -> (xs, [])
+ Just (xs1, y) -> Tuple.mapSecond ((::) y) (rep_rec xs1)
+ in Parser (rep_rec >> Just)
+
+map: (b -> c) -> Parser a b -> Parser a c
+map f (Parser parser) =
+ Parser (parser >> Maybe.map (Tuple.mapSecond f))
+
+map2: (b -> c -> d) -> Parser a b -> Parser a c -> Parser a d
+map2 f (Parser parser1) (Parser parser2) =
+ Parser (\xs ->
+ case parser1 xs of
+ Nothing -> Nothing
+ Just (xs1, y1) ->
+ case (parser2 xs1) of
+ Nothing -> Nothing
+ Just (xs2, y2) -> Just (xs2, f y1 y2))
+
+map3: (b -> c -> d -> e) -> Parser a b -> Parser a c -> Parser a d -> Parser a e
+map3 f parser1 parser2 parser3 =
+ map2 (\(b, c) d -> f b c d) (map2 (\b c -> (b, c)) parser1 parser2) parser3
+
+flat_map: (b -> Parser a c) -> Parser a b -> Parser a c
+flat_map f (Parser parser) =
+ Parser (\xs ->
+ case parser xs of
+ Nothing -> Nothing
+ Just (xs1, y) ->
+ case (f y) of
+ Parser parser1 -> parser1 xs1)
+
+or: Parser a b -> Parser a b -> Parser a b
+or (Parser parser1) (Parser parser2) =
+ Parser (\xs ->
+ case parser1 xs of
+ Nothing -> parser2 xs
+ res -> res)
+
+parse : Parser a b -> List a -> Maybe b
+parse (Parser parser) elems =
+ case parser elems of
+ Just ([], y) -> Just y
+ _ -> Nothing
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/web/src/Query.elm Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,137 @@
+{- Author: Fabian Huch, TU Muenchen
+
+Queries against the find facts server.
+-}
+module Query exposing (Atom(..), Filter(..), Select, Query, Query_Blocks, empty, empty_query,
+ encode_query, encode_query_blocks, encode_query_block, Block, Blocks, Facets, Result,
+ decode_block, decode_blocks, decode_result)
+
+
+import Dict exposing (Dict)
+import Json.Decode as Decode
+import Json.Decode.Extra as Decode
+import Json.Encode as Encode
+
+
+{- queries -}
+
+type Atom = Value String | Exact String
+type Filter = Any_Filter (List Atom) | Field_Filter String (List Atom)
+type alias Select = {field: String, values: List String}
+type alias Query = {filters: List Filter, exclude: List Filter, selects: List Select}
+type alias Query_Blocks = {query: Query, cursor: String}
+
+empty: Query
+empty = Query [] [] []
+
+empty_atom: Atom -> Bool
+empty_atom atom =
+ case atom of
+ Value s -> String.words s |> List.isEmpty
+ Exact s -> String.words s |> List.isEmpty
+
+empty_filter: Filter -> Bool
+empty_filter filter =
+ case filter of
+ Any_Filter atoms -> List.all empty_atom atoms
+ Field_Filter _ atoms -> List.all empty_atom atoms
+
+empty_select: Select -> Bool
+empty_select select = List.isEmpty select.values
+
+empty_query: Query -> Bool
+empty_query query =
+ List.all empty_filter (query.filters ++ query.exclude) && List.all empty_select query.selects
+
+
+{- json encoding -}
+
+encode_atom: Atom -> Encode.Value
+encode_atom atom =
+ case atom of
+ Exact phrase -> Encode.object [("exact", Encode.string phrase)]
+ Value wildcard -> Encode.object [("value", Encode.string wildcard)]
+
+encode_filter: Filter -> Encode.Value
+encode_filter filter =
+ case filter of
+ Any_Filter atoms -> Encode.object [("either", Encode.list encode_atom atoms)]
+ Field_Filter field atoms ->
+ Encode.object [("either", Encode.list encode_atom atoms), ("field", Encode.string field)]
+
+encode_select: Select -> Encode.Value
+encode_select select =
+ Encode.object [
+ ("field", Encode.string select.field),
+ ("values", Encode.list Encode.string select.values)]
+
+encode_query: Query -> Encode.Value
+encode_query query =
+ Encode.object [
+ ("filters", Encode.list encode_filter query.filters),
+ ("exclude", Encode.list encode_filter query.exclude),
+ ("selects", Encode.list encode_select query.selects)]
+
+encode_query_blocks: Query_Blocks -> Encode.Value
+encode_query_blocks query_blocks =
+ Encode.object
+ [("query", encode_query query_blocks.query), ("cursor", Encode.string query_blocks.cursor)]
+
+encode_query_block: String -> Encode.Value
+encode_query_block id = Encode.object [("id", Encode.string id)]
+
+
+{- results -}
+
+type alias Block = {
+ id: String,
+ chapter: String,
+ session: String,
+ theory: String,
+ file: String,
+ file_name: String,
+ url: String,
+ command: String,
+ start_line: Int,
+ src_before: String,
+ src_after: String,
+ html: String,
+ entity_kname: Maybe String}
+
+type alias Blocks = {num_found: Int, blocks: List Block, cursor: String}
+type alias Facets = Dict String (Dict String Int)
+type alias Result = {blocks: Blocks, facets: Facets}
+
+
+{- json decoding -}
+
+decode_block: Decode.Decoder Block
+decode_block =
+ Decode.succeed Block
+ |> Decode.andMap (Decode.field "id" Decode.string)
+ |> Decode.andMap (Decode.field "chapter" Decode.string)
+ |> Decode.andMap (Decode.field "session" Decode.string)
+ |> Decode.andMap (Decode.field "theory" Decode.string)
+ |> Decode.andMap (Decode.field "file" Decode.string)
+ |> Decode.andMap (Decode.field "file_name" Decode.string)
+ |> Decode.andMap (Decode.field "url" Decode.string)
+ |> Decode.andMap (Decode.field "command" Decode.string)
+ |> Decode.andMap (Decode.field "start_line" Decode.int)
+ |> Decode.andMap (Decode.field "src_before" Decode.string)
+ |> Decode.andMap (Decode.field "src_after" Decode.string)
+ |> Decode.andMap (Decode.field "html" Decode.string)
+ |> Decode.andMap (Decode.field "entity_kname" (Decode.maybe Decode.string))
+
+decode_blocks: Decode.Decoder Blocks
+decode_blocks =
+ Decode.map3 Blocks
+ (Decode.field "num_found" Decode.int)
+ (Decode.field "blocks" (Decode.list decode_block))
+ (Decode.field "cursor" Decode.string)
+
+decode_facets: Decode.Decoder Facets
+decode_facets = Decode.dict (Decode.dict Decode.int)
+
+decode_result: Decode.Decoder Result
+decode_result =
+ Decode.map2 Result (Decode.field "blocks" decode_blocks) (Decode.field "facets" decode_facets)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/web/src/Results.elm Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,101 @@
+{- Author: Fabian Huch, TU München
+
+Find Facts results component.
+-}
+module Results exposing (Model, empty, Msg(..), loading, error, result, set_load_more, set_loaded,
+ get_maybe_cursor, view)
+
+
+import Html exposing (..)
+import Html.Attributes exposing (style)
+import Html.Extra as Html
+import Html.Lazy as Lazy
+import Http
+import Material.Card as Card
+import Material.Elevation as Elevation
+import Material.LayoutGrid as LayoutGrid
+import Material.Typography as Typography
+import Material.LinearProgress as LinearProgress
+import Query exposing (Block)
+import Library exposing (..)
+import Utils
+
+
+{- model -}
+
+type Model = Empty | Loading | Error String | Results Query.Blocks Bool
+
+empty: Model
+empty = Empty
+
+
+{- updates -}
+
+type Msg = Selected String
+
+loading: Model
+loading = Loading
+
+error: Http.Error -> Model
+error err = Error (get_msg err)
+
+result: Query.Result -> Model
+result res = Results res.blocks False
+
+set_load_more: Model -> Model
+set_load_more model =
+ case model of
+ Results blocks _ -> Results blocks True
+ _ -> model
+
+set_loaded: Result Http.Error Query.Blocks -> Model -> Model
+set_loaded res model =
+ case (res, model) of
+ (Result.Ok blocks1, Results blocks _) ->
+ let blocks2 = {blocks1 | blocks = blocks.blocks ++ blocks1.blocks}
+ in Results blocks2 False
+ (Result.Err err, _) -> Error (get_msg err)
+ _ -> model
+
+get_maybe_cursor: Model -> Maybe String
+get_maybe_cursor model =
+ case model of
+ Results blocks is_loading ->
+ if blocks.num_found <= List.length blocks.blocks || is_loading then Nothing
+ else Just blocks.cursor
+ _ -> Nothing
+
+
+{- view -}
+
+view_block block =
+ Card.card
+ (Card.setAttributes [Elevation.z3, style "margin-bottom" "16px"]
+ Card.config |> Card.setOnClick (Selected block.id))
+ {actions = Nothing,
+ blocks = (
+ Card.block
+ (LayoutGrid.layoutGrid [LayoutGrid.alignLeft, style "width" "100%"] [
+ div [Typography.caption, style "margin-bottom" "8px"]
+ [text (block.session ++ "/" ++ block.file_name)],
+ Utils.view_code block.html block.start_line]),
+ [])}
+
+view_results blocks is_loading =
+ let
+ num = List.length blocks.blocks
+ loaded = span [Typography.subtitle1] [text ("Loaded " ++ (String.fromInt num) ++ "/" ++
+ (String.fromInt blocks.num_found) ++
+ if_proper (blocks.num_found > num) ". Scroll for more ...")]
+ in div [] (
+ h2 [Typography.headline4] [text ("Found " ++ String.fromInt blocks.num_found ++ " results")] ::
+ (blocks.blocks |> List.map (Lazy.lazy view_block)) ++
+ [if is_loading then LinearProgress.indeterminate LinearProgress.config else loaded])
+
+view: Model -> Html Msg
+view model =
+ case model of
+ Empty -> Html.nothing
+ Loading -> LinearProgress.indeterminate LinearProgress.config
+ Error msg -> span [Typography.body1] [text msg]
+ Results blocks is_loading -> Lazy.lazy2 view_results blocks is_loading
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/web/src/Searcher.elm Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,390 @@
+{- Author: Fabian Huch, TU München
+
+Find Facts searcher component: Url-encoded 'dry' query state,
+enriched by facet information from query.
+-}
+module Searcher exposing (Model, empty, params, parser, Msg, Update(..), update, populate,
+ set_result, subscriptions, view, get_query)
+
+
+import Array exposing (Array)
+import Array.Extra as Array
+import Browser.Events as Events
+import Dict exposing (Dict)
+import Html exposing (..)
+import Html.Attributes exposing (name, style)
+import Html.Events.Extra exposing (onEnter)
+import Html.Extra as Html
+import Json.Decode as Decode
+import Library exposing (..)
+import Material.Chip.Filter as FilterChip
+import Material.ChipSet.Filter as FilterChipSet
+import Material.Elevation as Elevation
+import Material.IconButton as IconButton
+import Material.LayoutGrid as LayoutGrid
+import Material.List as ItemList
+import Material.List.Item as ListItem
+import Material.Select as Select
+import Material.Select.Item as SelectItem
+import Material.TextField as TextField
+import Material.TextField.Icon as TextFieldIcon
+import Material.Theme as Theme
+import Material.Typography as Typography
+import Parser exposing (Parser)
+import Query exposing (Query)
+import Set exposing (Set)
+import Html.Lazy as Lazy
+import Url.Builder exposing (QueryParameter)
+import Maybe exposing (Maybe)
+import Maybe.Extra as Maybe
+import Utils exposing (Query_Param, parse_key)
+
+
+{- config -}
+
+max_facet_terms = 5
+
+
+{- fields -}
+
+chapterN = "chapter"
+sessionN = "session"
+file_typeN = "file_type"
+theoryN = "theory"
+commandN = "command"
+sourceN = "source"
+namesN = "names"
+constsN = "consts"
+typsN = "typs"
+thmsN = "thms"
+kindsN = "kinds"
+
+search_fields = [sessionN, theoryN, commandN, sourceN, namesN, constsN, typsN, thmsN]
+facet_fields = [chapterN, sessionN, file_typeN, theoryN, commandN, constsN, typsN, thmsN, kindsN]
+
+
+{- search components -}
+
+type alias Facet = {field: String, terms: Set String}
+type alias Filter = {field: String, value: String, exclude: Bool}
+type alias Search = {any_filter: String, filters: Array Filter, facets: Dict String Facet}
+type Model =
+ Model {search: Search, facets: Maybe Query.Facets, filter_search: String, focus: Maybe Int}
+
+init search = Model {search = search, facets = Nothing, filter_search = "", focus = Nothing}
+
+empty: Model
+empty = init (Search "" Array.empty Dict.empty)
+
+
+{- URL encoding -}
+
+filter_params : Filter -> List QueryParameter
+filter_params filter =
+ [Url.Builder.string ((if filter.exclude then "-" else "!") ++ filter.field) filter.value ]
+
+facet_params : Facet -> List QueryParameter
+facet_params facet =
+ facet.terms |> Set.toList |> List.sort |> List.map (Url.Builder.string facet.field)
+
+search_params: Search -> List QueryParameter
+search_params search =
+ list_if (search.any_filter /= "") (Url.Builder.string "q" search.any_filter) ++
+ (search.filters |> Array.toList |> List.concatMap filter_params) ++
+ (search.facets |> Dict.values |> List.sortBy .field |> List.concatMap facet_params)
+
+params: Model -> List QueryParameter
+params (Model model) = search_params model.search
+
+{- Url parsing -}
+
+filter_parser: Parser Query_Param Filter
+filter_parser =
+ let
+ make_filter exclude (key, value) = Filter (String.dropLeft 1 key) value exclude
+ include_parser = parse_key (String.startsWith "!") |> Parser.map (make_filter False)
+ exclude_parser = parse_key (String.startsWith "-") |> Parser.map (make_filter True)
+ in Parser.or include_parser exclude_parser
+
+facet_parser: Parser Query_Param Facet
+facet_parser =
+ let
+ rest_parser (key, value) =
+ Parser.rep (parse_key ((==) key))
+ |> Parser.map (List.map Tuple.second >> ((::) value) >> Set.fromList >> Facet key)
+ in Parser.elem (always True) |> Parser.flat_map rest_parser
+
+search_parser: Parser Query_Param Search
+search_parser =
+ Parser.map3 Search
+ (Parser.or (parse_key ((==) "q") |> Parser.map Tuple.second) (Parser.succeed ""))
+ (Parser.rep filter_parser |> Parser.map Array.fromList)
+ (Parser.rep facet_parser |> Parser.map
+ (List.map (\facet -> (facet.field, facet)) >> Dict.fromList))
+
+parser: Parser Query_Param Model
+parser = Parser.map init search_parser
+
+
+{- update -}
+
+type Msg =
+ Input_Any String |
+ Add_Filter String |
+ Input_Filter Bool String |
+ Change_Filter Int |
+ Remove_Filter Int |
+ Change_Facet String String |
+ Set_Focus (Maybe Int)
+
+type Update = Empty Query | None Query | Later Query | Now Query
+
+empty_filter field = Filter field "" False
+update_filter value filter = {filter | value = value}
+change_filter filter = {filter | exclude = (not filter.exclude)}
+change_terms value terms = (if Set.member value terms then Set.remove else Set.insert) value terms
+
+change_facet : String -> String -> Maybe Facet -> Maybe Facet
+change_facet field value facet0 =
+ let
+ facet1 = Maybe.withDefault (Facet field Set.empty) facet0
+ facet2 = {facet1 | terms = facet1.terms |> change_terms value}
+ in if Set.isEmpty facet2.terms then Nothing else Just facet2
+
+update_search: Model -> (Search -> (Search, Query -> Update)) -> (Model, Update)
+update_search (Model model) update_search_f =
+ let
+ search0 = model.search
+ search1 =
+ case model.focus of
+ Nothing -> search0
+ Just i ->
+ let filters = search0.filters |> Array.update i (update_filter model.filter_search)
+ in {search0 | filters = filters}
+ (search2, update_f) = update_search_f search1
+ query1 = search_query search2
+ model1 = {model | search = search2}
+ in
+ if Query.empty_query query1 then (Model {model1 | facets = Nothing}, Empty query1)
+ else (Model model1, update_f query1)
+
+update: Msg -> Model -> (Model, Update)
+update msg (Model model) =
+ let maybe_same h m = if Maybe.withDefault True m then None else h
+ in case msg of
+ Input_Any value ->
+ update_search (Model model) (\search ->
+ ({search | any_filter = value}, (if value == search.any_filter then None else Later)))
+
+ Add_Filter field ->
+ update_search (Model model) (\search ->
+ ({search | filters = search.filters |> Array.push (empty_filter field)}, None))
+
+ Input_Filter commit value ->
+ let model1 = {model | filter_search = value}
+ in
+ if not commit then (Model model1, None (search_query model.search))
+ else update_search (Model model1) (\search -> (search, Later))
+
+ Change_Filter i ->
+ update_search (Model model) (\search ->
+ ({search | filters = search.filters |> Array.update i change_filter},
+ maybe_same Now (Array.get i search.filters |> Maybe.map (.value >> String.isEmpty))))
+
+ Remove_Filter i ->
+ update_search (Model model) (\search ->
+ ({search | filters = search.filters |> Array.removeAt i},
+ maybe_same Now (Array.get i search.filters |> Maybe.map (.value >> String.isEmpty))))
+
+ Change_Facet field value ->
+ update_search (Model model) (\search ->
+ ({search | facets = search.facets |> Dict.update field (change_facet field value)}, Now))
+
+ Set_Focus focus ->
+ let
+ update_f = if Maybe.isNothing focus || Maybe.isJust model.focus then Now else None
+ value =
+ case focus of
+ Nothing -> Nothing
+ Just i -> Array.get i model.search.filters |> Maybe.map .value
+ ((Model model1), upd) = update_search (Model model) (\search -> (search, update_f))
+ in (Model {model1 | filter_search = value |> Maybe.withDefault "", focus = focus}, upd)
+
+populate: Model -> Model -> Model
+populate (Model model0) (Model model) = Model {model0 | search = model.search}
+
+set_result: Query.Result -> Model -> Model
+set_result res (Model model) = Model {model | facets = Just res.facets}
+
+
+{- subscriptions -}
+
+subscriptions: Model -> Sub Msg
+subscriptions (Model model) =
+ let
+ decode_outside outside =
+ if outside then Decode.succeed (Set_Focus Nothing) else Decode.fail "inside"
+ decoder =
+ Decode.field "target" (Utils.outside_elem "filter_menu") |> Decode.andThen decode_outside
+ in if Maybe.isNothing model.focus then Sub.none else Events.onClick decoder
+
+
+{- view -}
+
+view_field: String -> String
+view_field field =
+ Dict.fromList [
+ (chapterN, "Chapter"),
+ (sessionN, "Session"),
+ (file_typeN, "File Type"),
+ (theoryN, "Theory"),
+ (commandN, "Command"),
+ (sourceN, "Source Code"),
+ (namesN, "Name"),
+ (constsN, "Constant Name"),
+ (typsN, "Type Name"),
+ (thmsN, "Theorem Name"),
+ (kindsN, "Kind")]
+ |> Dict.get field
+ |> Maybe.withDefault field
+
+view_menu: Dict String Int -> String -> Html Msg
+view_menu counts value =
+ let
+ suggestions =
+ counts
+ |> Dict.toList
+ |> List.filter (Tuple.first >> String.isEmpty >> not)
+ |> List.filter (Tuple.first >> String.toLower >> String.contains (String.toLower value))
+ |> List.sortBy Tuple.second
+ |> List.reverse
+ view_option (term, count) =
+ ListItem.listItem (ListItem.config |> ListItem.setOnClick (Input_Filter True term))
+ [text (term ++ " (" ++ String.fromInt count ++ ")")]
+ menu_config =
+ [name "filter_menu", style "position" "sticky", style "z-index" "999999", style "overflow-y" "scroll",
+ style "max-height" "384px", Elevation.z3, Theme.surface]
+ in case suggestions |> List.map view_option of
+ x::xs -> ItemList.list (ItemList.config |> ItemList.setAttributes menu_config) x xs
+ _ -> Html.nothing
+
+view_filter: Maybe String -> Dict String Int -> (Int, Filter) -> Html Msg
+view_filter search0 counts (i, filter) =
+ let
+ search = search0 |> Maybe.filter (always (Dict.size counts > 1))
+ value = search |> Maybe.withDefault filter.value
+ in
+ LayoutGrid.inner [name "filter_menu", Typography.body1, style "margin" "16px 0"] [
+ LayoutGrid.cell
+ [LayoutGrid.span3Phone, LayoutGrid.span7Tablet, LayoutGrid.span11Desktop,
+ LayoutGrid.alignMiddle]
+ [TextField.outlined
+ (TextField.config
+ |> TextField.setLeadingIcon (Just (
+ TextFieldIcon.icon (if filter.exclude then "block" else "done")
+ |> TextFieldIcon.setOnInteraction (Change_Filter i)))
+ |> TextField.setValue (Just value)
+ |> TextField.setOnInput (Input_Filter (Maybe.isNothing search))
+ |> TextField.setOnFocus (Set_Focus (Just i))
+ |> TextField.setAttributes [style "width" "100%", onEnter (Input_Filter True value)]
+ |> TextField.setLabel (Just (view_field filter.field))),
+ search |> Maybe.map (view_menu counts) |> Maybe.withDefault Html.nothing],
+ LayoutGrid.cell [LayoutGrid.span1, LayoutGrid.alignLeft] [
+ IconButton.iconButton
+ (IconButton.config |> IconButton.setOnClick (Remove_Filter i))
+ (IconButton.icon "close")]]
+
+view_facet: String -> String -> List String -> Dict String Int -> Set String -> Html Msg
+view_facet field t ts counts selected =
+ let
+ chip term =
+ FilterChip.chip
+ (FilterChip.config
+ |> FilterChip.setSelected (Set.member term selected)
+ |> FilterChip.setOnChange (Change_Facet field term))
+ (term ++ maybe_proper (Dict.get term counts) (\i -> " (" ++ String.fromInt i ++ ")"))
+ in LayoutGrid.inner [Typography.body1] [
+ LayoutGrid.cell [LayoutGrid.span2, LayoutGrid.alignMiddle] [text (view_field field)],
+ LayoutGrid.cell [LayoutGrid.span10]
+ [FilterChipSet.chipSet [] (chip t) (ts |> List.map chip)]]
+
+view_add_filter : Html Msg
+view_add_filter =
+ let option field = SelectItem.selectItem (SelectItem.config {value=field}) (view_field field)
+ in case search_fields of
+ [] -> Html.nothing
+ x :: xs ->
+ Select.outlined
+ (Select.config
+ |> Select.setLabel (Just "Add Filter")
+ |> Select.setAttributes [style "margin" "16px 0"]
+ |> Select.setOnChange Add_Filter)
+ (option x)
+ (xs |> List.map option)
+
+view: Model -> Html Msg
+view (Model model) =
+ let
+ field_facet field =
+ model.facets |> Maybe.map (Dict.get field) |> Maybe.join |> Maybe.withDefault Dict.empty
+
+ search i = if model.focus == Just i then Just model.filter_search else Nothing
+ view_filter1 (i, filter) = view_filter (search i) (field_facet filter.field) (i, filter)
+
+ maybe_view_facet field =
+ let
+ counts = field_facet field
+ selected =
+ model.search.facets |> Dict.get field |> Maybe.map .terms |> Maybe.withDefault Set.empty
+ counts1 =
+ if Dict.size counts > max_facet_terms || Dict.size counts < 2 then Dict.empty else counts
+ terms = Dict.keys counts1 |> Set.fromList |> Set.union selected |> Set.toList |> List.sort
+ in case terms of
+ [] -> Nothing
+ t::ts -> Just (Lazy.lazy5 view_facet field t ts counts1 selected)
+ facets = facet_fields |> List.filterMap maybe_view_facet
+ in
+ LayoutGrid.layoutGrid [Elevation.z2, style "margin" "16px 0"] ([
+ TextField.outlined (TextField.config
+ |> TextField.setPlaceholder (Just "Enter search terms...")
+ |> TextField.setAttributes
+ [style "width" "100%", style "background-color" "white", style "margin-bottom" "16px"]
+ |> TextField.setValue (Just model.search.any_filter)
+ |> TextField.setOnInput Input_Any),
+ h3 [Typography.headline6] [text "Filters"]] ++ (
+ Array.toIndexedList model.search.filters
+ |> List.map view_filter1) ++
+ [view_add_filter] ++ (
+ list_if (not (List.isEmpty facets)) (h3 [Typography.headline6] [text "Drill-down"])) ++
+ facets)
+
+
+{- queries -}
+
+explode_query: String -> Query.Atom
+explode_query s =
+ case try_unquote s of
+ Just s1 -> Query.Exact s1
+ Nothing -> Query.Value s
+
+atoms_query: String -> List Query.Atom
+atoms_query s = explode_query s |> List.singleton
+
+filter_query: Filter -> Query.Filter
+filter_query filter =
+ Query.Field_Filter filter.field (filter.value |> atoms_query)
+
+facet_query: Facet -> Query.Select
+facet_query facet = Query.Select facet.field (Set.toList facet.terms)
+
+search_query: Search -> Query.Query
+search_query search =
+ let (exclude, filters) = search.filters |> Array.toList |> List.partition .exclude
+ in Query.Query (
+ (list_if (search.any_filter /= "") (search.any_filter |> atoms_query |> Query.Any_Filter)) ++
+ (filters |> List.filter (.value >> String.isEmpty >> not) |> List.map filter_query))
+ (exclude |> List.filter (.value >> String.isEmpty >> not) |> List.map filter_query)
+ (Dict.toList search.facets |> List.map Tuple.second |> List.map facet_query)
+
+get_query: Model -> Query.Query
+get_query (Model model) = search_query model.search
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/src/Tools/Find_Facts/web/src/Utils.elm Sat Jan 11 21:31:13 2025 +0100
@@ -0,0 +1,70 @@
+{- Author: Fabian Huch, TU München
+
+Various utilities.
+-}
+module Utils exposing (..)
+
+
+import Html exposing (..)
+import Html.Attributes exposing (class, style)
+import Html.Parser
+import Html.Parser.Util
+import Json.Decode as Decode
+import Maybe.Extra as Maybe
+import Parser exposing (Parser)
+import String.Extra as String
+import Library exposing (..)
+import Url
+
+
+{- query params -}
+
+type alias Query_Param = (String, String)
+
+parse_query: String -> Parser Query_Param a -> Maybe a
+parse_query query parser =
+ let
+ pair s =
+ case String.split "=" s |> List.map Url.percentDecode of
+ [Just k, Just v] -> Just (k, v)
+ _ -> Nothing
+ params = if query == "" then [] else String.split "&" query
+ pairs = params |> List.map pair |> Maybe.combine
+ in pairs |> Maybe.map (Parser.parse parser) |> Maybe.join
+
+parse_key: (a -> Bool) -> Parser (a, b) (a, b)
+parse_key cond = Parser.elem (Tuple.first >> cond)
+
+
+{- code blocks -}
+
+escape_html: String -> String
+escape_html s = s
+ |> String.replace "&" "&"
+ |> String.replace "<" "<"
+ |> String.replace ">" ">"
+ |> String.replace "\"" """
+
+view_html: String -> Html msg
+view_html html =
+ case Html.Parser.run html of
+ Ok nodes -> span [] (Html.Parser.Util.toVirtualDom nodes)
+ _ -> text (String.stripTags html)
+
+view_code: String -> Int -> Html msg
+view_code src start =
+ let
+ view_line i line =
+ "<div style=\"width:5ch; color:gray; text-align:right; display:inline-block\">" ++ (String.fromInt (start + i)) ++ "</div> " ++ line
+ src1 = split_lines src |> List.indexedMap view_line |> String.join "\n"
+ in Html.pre [class "source"] [view_html src1]
+
+
+outside_elem: String -> Decode.Decoder Bool
+outside_elem name =
+ let decode_name name1 = if name == name1 then Decode.succeed False else Decode.fail "continue"
+ in
+ Decode.oneOf [
+ Decode.field "name" Decode.string |> Decode.andThen decode_name,
+ Decode.lazy (\_ -> outside_elem name |> Decode.field "parentNode"),
+ Decode.succeed True]
\ No newline at end of file