removed somewhat pointless support for Jenkins log files: it has stopped working long ago;
authorwenzelm
Sat, 28 Jan 2023 22:29:24 +0100
changeset 77133 536c033fb6eb
parent 77132 53ce5a39c987
child 77134 523839d6d8ff
removed somewhat pointless support for Jenkins log files: it has stopped working long ago;
etc/build.props
src/Pure/Admin/build_log.scala
src/Pure/Admin/build_status.scala
src/Pure/Admin/isabelle_cronjob.scala
src/Pure/Admin/jenkins.scala
--- a/etc/build.props	Sat Jan 28 21:40:06 2023 +0100
+++ b/etc/build.props	Sat Jan 28 22:29:24 2023 +0100
@@ -44,7 +44,6 @@
   src/Pure/Admin/ci_build.scala \
   src/Pure/Admin/isabelle_cronjob.scala \
   src/Pure/Admin/isabelle_devel.scala \
-  src/Pure/Admin/jenkins.scala \
   src/Pure/Admin/other_isabelle.scala \
   src/Pure/Concurrent/consumer_thread.scala \
   src/Pure/Concurrent/counter.scala \
--- a/src/Pure/Admin/build_log.scala	Sat Jan 28 21:40:06 2023 +0100
+++ b/src/Pure/Admin/build_log.scala	Sat Jan 28 22:29:24 2023 +0100
@@ -122,7 +122,7 @@
     def is_log(file: JFile,
       prefixes: List[String] =
         List(Build_History.log_prefix, Identify.log_prefix, Identify.log_prefix2,
-          Isatest.log_prefix, AFP_Test.log_prefix, Jenkins.log_prefix),
+          Isatest.log_prefix, AFP_Test.log_prefix),
       suffixes: List[String] = List(".log", ".log.gz", ".log.xz")
     ): Boolean = {
       val name = file.getName
@@ -279,9 +279,7 @@
     val log_prefix2 = "plain_identify_"
 
     def engine(log_file: Log_File): String =
-      if (log_file.name.startsWith(Jenkins.log_prefix)) "jenkins_identify"
-      else if (log_file.name.startsWith(log_prefix2)) "plain_identify"
-      else "identify"
+      if (log_file.name.startsWith(log_prefix2)) "plain_identify" else "identify"
 
     def content(date: Date, isabelle_version: Option[String], afp_version: Option[String]): String =
       terminate_lines(
@@ -381,18 +379,6 @@
         parse(AFP_Test.engine, "", start, AFP_Test.End,
           AFP_Test.Isabelle_Version, AFP_Test.AFP_Version)
 
-      case Jenkins.Start() :: _ =>
-        log_file.lines.dropWhile(_ != Jenkins.BUILD) match {
-          case Jenkins.BUILD :: _ :: Jenkins.Start_Date(log_file.Strict_Date(start)) :: _ =>
-            val host =
-              log_file.lines.takeWhile(_ != Jenkins.CONFIGURATION).collectFirst({
-                case Jenkins.Host(a, b) => a + "." + b
-              }).getOrElse("")
-            parse(Jenkins.engine, host, start.to(Date.timezone_berlin), Jenkins.No_End,
-              Jenkins.Isabelle_Version, Jenkins.AFP_Version)
-          case _ => Meta_Info.empty
-        }
-
       case line :: _ if line.startsWith("\u0000") => Meta_Info.empty
       case List(Isatest.End(_)) => Meta_Info.empty
       case _ :: AFP_Test.Bad_Init() :: _ => Meta_Info.empty
--- a/src/Pure/Admin/build_status.scala	Sat Jan 28 21:40:06 2023 +0100
+++ b/src/Pure/Admin/build_status.scala	Sat Jan 28 22:29:24 2023 +0100
@@ -14,8 +14,7 @@
   val default_image_size = (800, 600)
   val default_history = 30
 
-  def default_profiles: List[Profile] =
-    Jenkins.build_status_profiles ::: Isabelle_Cronjob.build_status_profiles
+  def default_profiles: List[Profile] = Isabelle_Cronjob.build_status_profiles
 
 
   /* data profiles */
--- a/src/Pure/Admin/isabelle_cronjob.scala	Sat Jan 28 21:40:06 2023 +0100
+++ b/src/Pure/Admin/isabelle_cronjob.scala	Sat Jan 28 22:29:24 2023 +0100
@@ -594,8 +594,6 @@
                       (r, i) <- (if (seq.length <= 1) seq.map((_, -1)) else seq.zipWithIndex)
                       (rev, afp_rev) <- r.pick(logger.options, hg.id(), history_base_filter(r))
                     } yield remote_build_history(rev, afp_rev, i, r)))),
-                Logger_Task("jenkins_logs", _ =>
-                  Jenkins.download_logs(logger.options, Jenkins.build_log_jobs, main_dir)),
                 Logger_Task("build_log_database",
                   logger => Isabelle_Devel.build_log_database(logger.options, build_log_dirs)),
                 Logger_Task("build_status",
--- a/src/Pure/Admin/jenkins.scala	Sat Jan 28 21:40:06 2023 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,145 +0,0 @@
-/*  Title:      Pure/Admin/jenkins.scala
-    Author:     Makarius
-
-Support for Jenkins continuous integration service.
-*/
-
-package isabelle
-
-
-import java.net.URL
-
-import scala.util.matching.Regex
-
-
-object Jenkins {
-  /* server API */
-
-  def root(): String =
-    Isabelle_System.getenv_strict("ISABELLE_JENKINS_ROOT")
-
-  def invoke(url: String, args: String*): Any = {
-    val req = url + "/api/json?" + args.mkString("&")
-    val result = Url.read(req)
-    try { JSON.parse(result) }
-    catch { case ERROR(_) => error("Malformed JSON from " + quote(req)) }
-  }
-
-
-  /* build jobs */
-
-  def build_job_names(): List[String] =
-    for {
-      job <- JSON.array(invoke(root()), "jobs").getOrElse(Nil)
-      _class <- JSON.string(job, "_class")
-      if _class == "hudson.model.FreeStyleProject"
-      name <- JSON.string(job, "name")
-    } yield name
-
-
-  def download_logs(
-    options: Options,
-    job_names: List[String],
-    dir: Path,
-    progress: Progress = new Progress
-  ): Unit = {
-    val store = Sessions.store(options)
-    val infos = job_names.flatMap(build_job_infos)
-    Par_List.map((info: Job_Info) => info.download_log(store, dir, progress), infos)
-  }
-
-
-  /* build log status */
-
-  val build_log_jobs = List("isabelle-nightly-benchmark", "isabelle-nightly-slow")
-
-  val build_status_profiles: List[Build_Status.Profile] =
-    build_log_jobs.map(job_name =>
-      Build_Status.Profile("jenkins " + job_name,
-        sql =
-          Build_Log.Prop.build_engine.toString + " = " + SQL.string(Build_Log.Jenkins.engine) +
-          " AND " + Build_Log.Data.session_name + " <> " + SQL.string("Pure") + " AND " +
-          Build_Log.Data.status + " = " + SQL.string(Build_Log.Session_Status.finished.toString) +
-          " AND " + Build_Log.Data.log_name + " LIKE " + SQL.string("%" + job_name)))
-
-
-  /* job info */
-
-  sealed case class Job_Info(
-    job_name: String,
-    timestamp: Long,
-    main_log: URL,
-    session_logs: List[(String, String, URL)]
-  ) {
-    val date: Date = Date(Time.ms(timestamp), Date.timezone_berlin)
-
-    def log_filename: Path =
-      Build_Log.log_filename(Build_Log.Jenkins.engine, date, List(job_name))
-
-    def read_ml_statistics(store: Sessions.Store, session_name: String): List[Properties.T] = {
-      def get_log(ext: String): Option[URL] =
-        session_logs.collectFirst({ case (a, b, url) if a == session_name && b == ext => url })
-
-      get_log("db") match {
-        case Some(url) =>
-          Isabelle_System.with_tmp_file(session_name, "db") { database =>
-            Bytes.write(database, Bytes.read(url))
-            using(SQLite.open_database(database))(db => store.read_ml_statistics(db, session_name))
-          }
-        case None =>
-          get_log("gz") match {
-            case Some(url) =>
-              val log_file = Build_Log.Log_File(session_name, Url.read_gzip(url))
-              log_file.parse_session_info(ml_statistics = true).ml_statistics
-            case None => Nil
-          }
-      }
-    }
-
-    def download_log(store: Sessions.Store, dir: Path, progress: Progress = new Progress): Unit = {
-      val log_dir = dir + Build_Log.log_subdir(date)
-      val log_path = log_dir + log_filename.xz
-
-      if (!log_path.is_file) {
-        progress.echo(File.standard_path(log_path))
-        Isabelle_System.make_directory(log_dir)
-
-        val ml_statistics =
-          session_logs.map(_._1).distinct.sorted.flatMap(session_name =>
-            read_ml_statistics(store, session_name).
-              map(props => (Build_Log.SESSION_NAME -> session_name) :: props))
-
-        File.write_xz(log_path,
-          terminate_lines(Url.read(main_log) ::
-            ml_statistics.map(Protocol.ML_Statistics_Marker.apply)),
-          Compress.Options_XZ(6))
-      }
-    }
-  }
-
-  def build_job_infos(job_name: String): List[Job_Info] = {
-    val Session_Log = new Regex("""^.*/log/([^/]+)\.(db|gz)$""")
-
-    val infos =
-      for {
-        build <-
-          JSON.array(
-            invoke(root() + "/job/" + job_name, "tree=allBuilds[number,timestamp,artifacts[*]]"),
-            "allBuilds").getOrElse(Nil)
-        number <- JSON.int(build, "number")
-        timestamp <- JSON.long(build, "timestamp")
-      } yield {
-        val job_prefix = root() + "/job/" + job_name + "/" + number
-        val main_log = Url(job_prefix + "/consoleText")
-        val session_logs =
-          for {
-            artifact <- JSON.array(build, "artifacts").getOrElse(Nil)
-            log_path <- JSON.string(artifact, "relativePath")
-            (name, ext) <- (log_path match { case Session_Log(a, b) => Some((a, b)) case _ => None })
-          } yield (name, ext, Url(job_prefix + "/artifact/" + log_path))
-        Job_Info(job_name, timestamp, main_log, session_logs)
-      }
-
-    infos.sortBy(info => - info.timestamp)
-  }
-}