32 } |
32 } |
33 |
33 |
34 |
34 |
35 /* sync */ |
35 /* sync */ |
36 |
36 |
|
37 val DIRS: Path = Path.basic("dirs") |
|
38 val DIRS_ROOTS: Path = DIRS + Sessions.ROOTS |
|
39 |
|
40 sealed case class Dir(name: String, root: Path, path: Path = Path.current, rev: String = "") { |
|
41 lazy val hg: Mercurial.Repository = Mercurial.repository(root) |
|
42 def check(): Unit = hg |
|
43 |
|
44 def source: Path = root + path |
|
45 def target: Path = DIRS + Path.basic(name) |
|
46 def roots_entry: String = ((if (name.isEmpty) Path.parent else Path.basic(name)) + path).implode |
|
47 } |
|
48 |
|
49 def afp_dir(base_dir: Path = AFP.BASE, rev: String = ""): Dir = |
|
50 Dir("AFP", base_dir, path = AFP.main_dir(base_dir = Path.current), rev = rev) |
|
51 |
|
52 def afp_dirs(root: Option[Path] = None, rev: String = ""): List[Dir] = |
|
53 root.toList.map(base_dir => afp_dir(base_dir = base_dir, rev = rev)) |
|
54 |
37 def sync(options: Options, context: Rsync.Context, target: Path, |
55 def sync(options: Options, context: Rsync.Context, target: Path, |
38 thorough: Boolean = false, |
56 thorough: Boolean = false, |
39 purge_heaps: Boolean = false, |
57 purge_heaps: Boolean = false, |
40 session_images: List[String] = Nil, |
58 session_images: List[String] = Nil, |
41 preserve_jars: Boolean = false, |
59 preserve_jars: Boolean = false, |
42 dry_run: Boolean = false, |
60 dry_run: Boolean = false, |
43 rev: String = "", |
61 rev: String = "", |
44 afp_root: Option[Path] = None, |
62 dirs: List[Dir] = Nil |
45 afp_rev: String = "" |
|
46 ): Unit = { |
63 ): Unit = { |
47 val progress = context.progress |
64 val progress = context.progress |
48 |
65 |
49 val self = Mercurial.self_repository() |
66 val self = Mercurial.self_repository() |
50 val afp = afp_root.map(Mercurial.repository(_)) |
67 dirs.foreach(_.check()) |
|
68 |
|
69 val sync_dirs: List[Dir] = { |
|
70 val m = |
|
71 Multi_Map.from( |
|
72 for (dir <- dirs.iterator if dir.name.nonEmpty) yield dir.name -> dir.root.canonical) |
|
73 for ((name, roots) <- m.iterator_list if roots.length > 1) { |
|
74 error("Incoherent sync directory " + quote(name) + ":\n" + |
|
75 cat_lines(roots.reverse.map(p => " " + p.toString))) |
|
76 } |
|
77 Library.distinct(dirs, (d1: Dir, d2: Dir) => d1.name == d2.name) |
|
78 } |
51 |
79 |
52 val more_filter = if (preserve_jars) List("include *.jar", "protect *.jar") else Nil |
80 val more_filter = if (preserve_jars) List("include *.jar", "protect *.jar") else Nil |
53 |
81 |
54 def synchronize(src: Mercurial.Repository, dest: Path, r: String, |
82 def synchronize(src: Mercurial.Repository, dest: Path, r: String, |
55 contents: List[File.Content] = Nil, filter: List[String] = Nil |
83 contents: List[File.Content] = Nil, filter: List[String] = Nil |
58 contents = contents, filter = filter ::: more_filter) |
86 contents = contents, filter = filter ::: more_filter) |
59 } |
87 } |
60 |
88 |
61 progress.echo("\n* Isabelle:", verbose = true) |
89 progress.echo("\n* Isabelle:", verbose = true) |
62 val filter_heaps = if (purge_heaps) Nil else List("protect /heaps", "protect /heaps/**") |
90 val filter_heaps = if (purge_heaps) Nil else List("protect /heaps", "protect /heaps/**") |
|
91 val filter_dirs = sync_dirs.map(dir => "protect /" + dir.target.implode) |
63 synchronize(self, target, rev, |
92 synchronize(self, target, rev, |
64 contents = List(File.content(Path.explode("etc/ISABELLE_ID"), self.id(rev = rev))), |
93 contents = List(File.content(Path.explode("etc/ISABELLE_ID"), self.id(rev = rev))), |
65 filter = filter_heaps ::: List("protect /AFP")) |
94 filter = filter_heaps ::: filter_dirs) |
66 |
95 |
67 for (src <- afp) { |
96 context.ssh.make_directory(target + DIRS) |
68 progress.echo("\n* AFP:", verbose = true) |
97 context.ssh.write(target + DIRS_ROOTS, Library.terminate_lines(dirs.map(_.roots_entry))) |
69 synchronize(src, target + Path.explode("AFP"), afp_rev) |
98 |
|
99 for (dir <- sync_dirs) { |
|
100 progress.echo("\n* " + dir.name + ":", verbose = true) |
|
101 synchronize(dir.hg, target + dir.target, dir.rev) |
70 } |
102 } |
71 |
103 |
72 val images = |
104 val images = find_images(options, session_images, dirs = dirs.map(_.source)) |
73 find_images(options, session_images, |
|
74 dirs = afp_root.map(_ + Path.explode("thys")).toList) |
|
75 if (images.nonEmpty) { |
105 if (images.nonEmpty) { |
76 progress.echo("\n* Session images:", verbose = true) |
106 progress.echo("\n* Session images:", verbose = true) |
77 val heaps = context.target(target + Path.explode("heaps")) + "/" |
107 val heaps = context.target(target + Path.explode("heaps")) + "/" |
78 Rsync.exec(context, thorough = thorough, dry_run = dry_run, |
108 Rsync.exec(context, thorough = thorough, dry_run = dry_run, |
79 args = List("--relative", "--") ::: images ::: List(heaps)).check |
109 args = List("--relative", "--") ::: images ::: List(heaps)).check |
141 |
171 |
142 using(SSH.open_system(options, host = ssh_host, port = ssh_port, user = ssh_user)) { ssh => |
172 using(SSH.open_system(options, host = ssh_host, port = ssh_port, user = ssh_user)) { ssh => |
143 val context = Rsync.Context(progress = progress, ssh = ssh, stats = verbose) |
173 val context = Rsync.Context(progress = progress, ssh = ssh, stats = verbose) |
144 sync(options, context, target, thorough = thorough, purge_heaps = purge_heaps, |
174 sync(options, context, target, thorough = thorough, purge_heaps = purge_heaps, |
145 session_images = session_images, preserve_jars = preserve_jars, dry_run = dry_run, |
175 session_images = session_images, preserve_jars = preserve_jars, dry_run = dry_run, |
146 rev = rev, afp_root = afp_root, afp_rev = afp_rev) |
176 rev = rev, dirs = afp_dirs(afp_root, afp_rev)) |
147 } |
177 } |
148 } |
178 } |
149 ) |
179 ) |
150 } |
180 } |