1"""General purpose package removal code for ftpmaster
3@contact: Debian FTP Master <ftpmaster@debian.org>
4@copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
5@copyright: 2010 Alexander Reichle-Schmehl <tolimar@debian.org>
6@copyright: 2015 Niels Thykier <niels@thykier.net>
7@license: GNU General Public License version 2 or later
8"""
9# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
10# Copyright (C) 2010 Alexander Reichle-Schmehl <tolimar@debian.org>
12# This program is free software; you can redistribute it and/or modify
13# it under the terms of the GNU General Public License as published by
14# the Free Software Foundation; either version 2 of the License, or
15# (at your option) any later version.
17# This program is distributed in the hope that it will be useful,
18# but WITHOUT ANY WARRANTY; without even the implied warranty of
19# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20# GNU General Public License for more details.
22# You should have received a copy of the GNU General Public License
23# along with this program; if not, write to the Free Software
24# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
26################################################################################
28# From: Andrew Morton <akpm@osdl.org>
29# Subject: 2.6.6-mm5
30# To: linux-kernel@vger.kernel.org
31# Date: Sat, 22 May 2004 01:36:36 -0700
32# X-Mailer: Sylpheed version 0.9.7 (GTK+ 1.2.10; i386-redhat-linux-gnu)
33#
34# [...]
35#
36# Although this feature has been around for a while it is new code, and the
37# usual cautions apply. If it munches all your files please tell Jens and
38# he'll type them in again for you.
40################################################################################
42import apt_pkg
43import fcntl
44import functools
45import sqlalchemy.sql as sql
46import email.utils
47from re import sub
48from collections import defaultdict
49from .regexes import re_build_dep_arch
50from typing import Optional, Union
52from daklib.dbconn import *
53from daklib import utils
54from daklib.regexes import re_bin_only_nmu
55import debianbts as bts
57################################################################################
60class ReverseDependencyChecker:
61 """A bulk tester for reverse dependency checks
63 This class is similar to the check_reverse_depends method from "utils". However,
64 it is primarily focused on facilitating bulk testing of reverse dependencies.
65 It caches the state of the suite and then uses that as basis for answering queries.
66 This saves a significant amount of time if multiple reverse dependency checks are
67 required.
68 """
70 def __init__(self, session, suite: str):
71 """Creates a new ReverseDependencyChecker instance
73 This will spend a significant amount of time caching data.
75 :param session: The database session in use
76 :param suite: The name of the suite that is used as basis for removal tests.
77 """
78 self._session = session
79 dbsuite = get_suite(suite, session)
80 suite_archs2id = dict((x.arch_string, x.arch_id) for x in get_suite_architectures(suite))
81 package_dependencies, arch_providers_of, arch_provided_by = self._load_package_information(session,
82 dbsuite.suite_id,
83 suite_archs2id)
84 self._package_dependencies = package_dependencies
85 self._arch_providers_of = arch_providers_of
86 self._arch_provided_by = arch_provided_by
87 self._archs_in_suite = set(suite_archs2id)
89 @staticmethod
90 def _load_package_information(session, suite_id, suite_archs2id):
91 package_dependencies = defaultdict(lambda: defaultdict(set))
92 arch_providers_of = defaultdict(lambda: defaultdict(set))
93 arch_provided_by = defaultdict(lambda: defaultdict(set))
94 source_deps = defaultdict(set)
95 metakey_d = get_or_set_metadatakey("Depends", session)
96 metakey_p = get_or_set_metadatakey("Provides", session)
97 params = {
98 'suite_id': suite_id,
99 'arch_all_id': suite_archs2id['all'],
100 'metakey_d_id': metakey_d.key_id,
101 'metakey_p_id': metakey_p.key_id,
102 }
103 all_arches = set(suite_archs2id)
104 all_arches.discard('source')
106 package_dependencies['source'] = source_deps
108 for architecture in all_arches:
109 deps = defaultdict(set)
110 providers_of = defaultdict(set)
111 provided_by = defaultdict(set)
112 arch_providers_of[architecture] = providers_of
113 arch_provided_by[architecture] = provided_by
114 package_dependencies[architecture] = deps
116 params['arch_id'] = suite_archs2id[architecture]
118 statement = sql.text('''
119 SELECT b.package,
120 (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
121 (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
122 FROM binaries b
123 JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
124 WHERE b.architecture = :arch_id OR b.architecture = :arch_all_id''')
125 query = session.query(sql.column('package'), sql.column('depends'), sql.column('provides')). \
126 from_statement(statement).params(params)
127 for package, depends, provides in query:
129 if depends is not None:
130 try:
131 parsed_dep = []
132 for dep in apt_pkg.parse_depends(depends):
133 parsed_dep.append(frozenset(d[0] for d in dep))
134 deps[package].update(parsed_dep)
135 except ValueError as e:
136 print("Error for package %s: %s" % (package, e))
137 # Maintain a counter for each virtual package. If a
138 # Provides: exists, set the counter to 0 and count all
139 # provides by a package not in the list for removal.
140 # If the counter stays 0 at the end, we know that only
141 # the to-be-removed packages provided this virtual
142 # package.
143 if provides is not None:
144 for virtual_pkg in provides.split(","):
145 virtual_pkg = virtual_pkg.strip()
146 if virtual_pkg == package:
147 continue
148 provided_by[virtual_pkg].add(package)
149 providers_of[package].add(virtual_pkg)
151 # Check source dependencies (Build-Depends and Build-Depends-Indep)
152 metakey_bd = get_or_set_metadatakey("Build-Depends", session)
153 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
154 params = {
155 'suite_id': suite_id,
156 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
157 }
158 statement = sql.text('''
159 SELECT s.source, string_agg(sm.value, ', ') as build_dep
160 FROM source s
161 JOIN source_metadata sm ON s.id = sm.src_id
162 WHERE s.id in
163 (SELECT src FROM newest_src_association
164 WHERE suite = :suite_id)
165 AND sm.key_id in :metakey_ids
166 GROUP BY s.id, s.source''')
167 query = session.query(sql.column('source'), sql.column('build_dep')) \
168 .from_statement(statement).params(params)
169 for source, build_dep in query:
170 if build_dep is not None:
171 # Remove [arch] information since we want to see breakage on all arches
172 build_dep = re_build_dep_arch.sub("", build_dep)
173 try:
174 parsed_dep = []
175 for dep in apt_pkg.parse_src_depends(build_dep):
176 parsed_dep.append(frozenset(d[0] for d in dep))
177 source_deps[source].update(parsed_dep)
178 except ValueError as e:
179 print("Error for package %s: %s" % (source, e))
181 return package_dependencies, arch_providers_of, arch_provided_by
183 def check_reverse_depends(self, removal_requests: Union[dict, list[tuple]]) -> dict:
184 """Bulk check reverse dependencies
186 Example:
187 removal_request = {
188 "eclipse-rcp": None, # means ALL architectures (incl. source)
189 "eclipse": None, # means ALL architectures (incl. source)
190 "lintian": ["source", "all"], # Only these two "architectures".
191 }
192 obj.check_reverse_depends(removal_request)
194 :param removal_requests: A dictionary mapping a package name to a list of architectures. The list of
195 architectures decides from which the package will be removed - if the list is empty the package will
196 be removed on ALL architectures in the suite (including "source").
198 :return: A mapping of "removed package" (as a "(pkg, arch)"-tuple) to a set of broken
199 broken packages (also as "(pkg, arch)"-tuple). Note that the architecture values
200 in these tuples /can/ be "source" to reflect a breakage in build-dependencies.
201 """
203 archs_in_suite = self._archs_in_suite
204 removals_by_arch = defaultdict(set)
205 affected_virtual_by_arch = defaultdict(set)
206 package_dependencies = self._package_dependencies
207 arch_providers_of = self._arch_providers_of
208 arch_provided_by = self._arch_provided_by
209 arch_provides2removal = defaultdict(lambda: defaultdict(set))
210 dep_problems = defaultdict(set)
211 src_deps = package_dependencies['source']
212 src_removals = set()
213 arch_all_removals = set()
215 if isinstance(removal_requests, dict):
216 removal_requests = removal_requests.items()
218 for pkg, arch_list in removal_requests:
219 if not arch_list:
220 arch_list = archs_in_suite
221 for arch in arch_list:
222 if arch == 'source':
223 src_removals.add(pkg)
224 continue
225 if arch == 'all':
226 arch_all_removals.add(pkg)
227 continue
228 removals_by_arch[arch].add(pkg)
229 if pkg in arch_providers_of[arch]:
230 affected_virtual_by_arch[arch].add(pkg)
232 if arch_all_removals:
233 for arch in archs_in_suite:
234 if arch in ('all', 'source'):
235 continue
236 removals_by_arch[arch].update(arch_all_removals)
237 for pkg in arch_all_removals:
238 if pkg in arch_providers_of[arch]:
239 affected_virtual_by_arch[arch].add(pkg)
241 if not removals_by_arch:
242 # Nothing to remove => no problems
243 return dep_problems
245 for arch, removed_providers in affected_virtual_by_arch.items():
246 provides2removal = arch_provides2removal[arch]
247 removals = removals_by_arch[arch]
248 for virtual_pkg, virtual_providers in arch_provided_by[arch].items():
249 v = virtual_providers & removed_providers
250 if len(v) == len(virtual_providers):
251 # We removed all the providers of virtual_pkg
252 removals.add(virtual_pkg)
253 # Pick one to take the blame for the removal
254 # - we sort for determinism, optimally we would prefer to blame the same package
255 # to minimise the number of blamed packages.
256 provides2removal[virtual_pkg] = sorted(v)[0]
258 for arch, removals in removals_by_arch.items():
259 deps = package_dependencies[arch]
260 provides2removal = arch_provides2removal[arch]
262 # Check binary dependencies (Depends)
263 for package, dependencies in deps.items():
264 if package in removals:
265 continue
266 for clause in dependencies:
267 if not (clause <= removals):
268 # Something probably still satisfies this relation
269 continue
270 # whoops, we seemed to have removed all packages that could possibly satisfy
271 # this relation. Lets blame something for it
272 for dep_package in clause:
273 removal = dep_package
274 if dep_package in provides2removal:
275 removal = provides2removal[dep_package]
276 dep_problems[(removal, arch)].add((package, arch))
278 for source, build_dependencies in src_deps.items():
279 if source in src_removals:
280 continue
281 for clause in build_dependencies:
282 if not (clause <= removals):
283 # Something probably still satisfies this relation
284 continue
285 # whoops, we seemed to have removed all packages that could possibly satisfy
286 # this relation. Lets blame something for it
287 for dep_package in clause:
288 removal = dep_package
289 if dep_package in provides2removal:
290 removal = provides2removal[dep_package]
291 dep_problems[(removal, arch)].add((source, 'source'))
293 return dep_problems
296def remove(session, reason: str, suites: list, removals: list,
297 whoami: Optional[str] = None, partial: bool = False,
298 components: Optional[list] = None, done_bugs: Optional[list] = None,
299 date: Optional[str] = None, carbon_copy: Optional[list[str]] = None,
300 close_related_bugs: bool = False) -> None:
301 """Batch remove a number of packages
302 Verify that the files listed in the Files field of the .dsc are
303 those expected given the announced Format.
305 :param session: The database session in use
306 :param reason: The reason for the removal (e.g. "[auto-cruft] NBS (no longer built by <source>)")
307 :param suites: A list of the suite names in which the removal should occur
308 :param removals: A list of the removals. Each element should be a tuple (or list) of at least the following
309 for 4 items from the database (in order): package, version, architecture, (database) id.
310 For source packages, the "architecture" should be set to "source".
311 :param whoami: The person (or entity) doing the removal. Defaults to utils.whoami()
312 :param partial: Whether the removal is "partial" (e.g. architecture specific).
313 :param components: List of components involved in a partial removal. Can be an empty list to not restrict the
314 removal to any components.
315 :param done_bugs: A list of bugs to be closed when doing this removal.
316 :param date: The date of the removal. Defaults to `date -R`
317 :param carbon_copy: A list of mail addresses to CC when doing removals. NB: all items are taken "as-is" unlike
318 "dak rm".
319 :param close_related_bugs: Whether bugs related to the package being removed should be closed as well. NB: Not implemented
320 for more than one suite.
321 """
322 # Generate the summary of what's to be removed
323 d = {}
324 summary = ""
325 affected_sources = set()
326 sources = []
327 binaries = []
328 whitelists = []
329 versions = []
330 newest_source = ''
331 suite_ids_list = []
332 suites_list = utils.join_with_commas_and(suites)
333 cnf = utils.get_conf()
334 con_components = ''
336 #######################################################################################################
338 if not reason: 338 ↛ 339line 338 didn't jump to line 339, because the condition on line 338 was never true
339 raise ValueError("Empty removal reason not permitted")
340 reason = reason.strip()
342 if not removals: 342 ↛ 343line 342 didn't jump to line 343, because the condition on line 342 was never true
343 raise ValueError("Nothing to remove!?")
345 if not suites: 345 ↛ 346line 345 didn't jump to line 346, because the condition on line 345 was never true
346 raise ValueError("Removals without a suite!?")
348 if whoami is None:
349 whoami = utils.whoami()
351 if date is None: 351 ↛ 354line 351 didn't jump to line 354, because the condition on line 351 was never false
352 date = email.utils.formatdate()
354 if partial and components: 354 ↛ 356line 354 didn't jump to line 356, because the condition on line 354 was never true
356 component_ids_list = []
357 for componentname in components:
358 component = get_component(componentname, session=session)
359 if component is None:
360 raise ValueError("component '%s' not recognised." % componentname)
361 else:
362 component_ids_list.append(component.component_id)
363 if component_ids_list:
364 con_components = "AND component IN (%s)" % ", ".join([str(i) for i in component_ids_list])
366 for i in removals:
367 package = i[0]
368 version = i[1]
369 architecture = i[2]
370 if package not in d:
371 d[package] = {}
372 if version not in d[package]:
373 d[package][version] = []
374 if architecture not in d[package][version]: 374 ↛ 366line 374 didn't jump to line 366, because the condition on line 374 was never false
375 d[package][version].append(architecture)
377 for package in sorted(d):
378 versions = sorted(d[package], key=functools.cmp_to_key(apt_pkg.version_compare))
379 for version in versions:
380 d[package][version].sort(key=utils.ArchKey)
381 summary += "%10s | %10s | %s\n" % (package, version, ", ".join(d[package][version]))
382 if apt_pkg.version_compare(version, newest_source) > 0:
383 newest_source = version
385 for package in summary.split("\n"):
386 for row in package.split("\n"):
387 element = row.split("|")
388 if len(element) == 3:
389 if element[2].find("source") > 0:
390 sources.append("%s_%s" % tuple(elem.strip(" ") for elem in element[:2]))
391 element[2] = sub(r"source\s?,?", "", element[2]).strip(" ")
392 if element[2]:
393 binaries.append("%s_%s [%s]" % tuple(elem.strip(" ") for elem in element))
395 dsc_type_id = get_override_type('dsc', session).overridetype_id
396 deb_type_id = get_override_type('deb', session).overridetype_id
398 for suite in suites:
399 s = get_suite(suite, session=session)
400 if s is not None: 400 ↛ 398line 400 didn't jump to line 398, because the condition on line 400 was never false
401 suite_ids_list.append(s.suite_id)
402 whitelists.append(s.mail_whitelist)
404 #######################################################################################################
405 log_filename = cnf["Rm::LogFile"]
406 log822_filename = cnf["Rm::LogFile822"]
407 with open(log_filename, "a") as logfile, open(log822_filename, "a") as logfile822:
408 fcntl.lockf(logfile, fcntl.LOCK_EX)
409 fcntl.lockf(logfile822, fcntl.LOCK_EX)
411 logfile.write("=========================================================================\n")
412 logfile.write("[Date: %s] [ftpmaster: %s]\n" % (date, whoami))
413 logfile.write("Removed the following packages from %s:\n\n%s" % (suites_list, summary))
414 if done_bugs:
415 logfile.write("Closed bugs: %s\n" % (", ".join(done_bugs)))
416 logfile.write("\n------------------- Reason -------------------\n%s\n" % reason)
417 logfile.write("----------------------------------------------\n")
419 logfile822.write("Date: %s\n" % date)
420 logfile822.write("Ftpmaster: %s\n" % whoami)
421 logfile822.write("Suite: %s\n" % suites_list)
423 if sources: 423 ↛ 428line 423 didn't jump to line 428, because the condition on line 423 was never false
424 logfile822.write("Sources:\n")
425 for source in sources:
426 logfile822.write(" %s\n" % source)
428 if binaries: 428 ↛ 433line 428 didn't jump to line 433, because the condition on line 428 was never false
429 logfile822.write("Binaries:\n")
430 for binary in binaries:
431 logfile822.write(" %s\n" % binary)
433 logfile822.write("Reason: %s\n" % reason.replace('\n', '\n '))
434 if done_bugs:
435 logfile822.write("Bug: %s\n" % (", ".join(done_bugs)))
437 for i in removals:
438 package = i[0]
439 architecture = i[2]
440 package_id = i[3]
441 for suite_id in suite_ids_list:
442 if architecture == "source":
443 q = session.execute("DELETE FROM src_associations sa USING source s WHERE sa.source = s.id AND sa.source = :packageid AND sa.suite = :suiteid RETURNING s.source",
444 {'packageid': package_id, 'suiteid': suite_id})
445 affected_sources.add(q.scalar())
446 else:
447 q = session.execute("DELETE FROM bin_associations ba USING binaries b, source s WHERE ba.bin = b.id AND b.source = s.id AND ba.bin = :packageid AND ba.suite = :suiteid RETURNING s.source",
448 {'packageid': package_id, 'suiteid': suite_id})
449 affected_sources.add(q.scalar())
450 # Delete from the override file
451 if not partial: 451 ↛ 441line 451 didn't jump to line 441, because the condition on line 451 was never false
452 if architecture == "source":
453 type_id = dsc_type_id
454 else:
455 type_id = deb_type_id
456 # TODO: Fix this properly to remove the remaining non-bind argument
457 session.execute("DELETE FROM override WHERE package = :package AND type = :typeid AND suite = :suiteid %s" % (con_components), {'package': package, 'typeid': type_id, 'suiteid': suite_id})
459 session.commit()
460 # ### REMOVAL COMPLETE - send mail time ### #
462 # If we don't have a Bug server configured, we're done
463 if "Dinstall::BugServer" not in cnf:
464 if done_bugs or close_related_bugs: 464 ↛ 465line 464 didn't jump to line 465, because the condition on line 464 was never true
465 utils.warn("Cannot send mail to BugServer as Dinstall::BugServer is not configured")
467 logfile.write("=========================================================================\n")
468 logfile822.write("\n")
469 return
471 # read common subst variables for all bug closure mails
472 Subst_common = {}
473 Subst_common["__RM_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
474 Subst_common["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
475 Subst_common["__CC__"] = "X-DAK: dak rm"
476 if carbon_copy: 476 ↛ 478line 476 didn't jump to line 478, because the condition on line 476 was never false
477 Subst_common["__CC__"] += "\nCc: " + ", ".join(carbon_copy)
478 Subst_common["__SOURCES__"] = ", ".join(sorted(affected_sources))
479 Subst_common["__SUITE_LIST__"] = suites_list
480 Subst_common["__SUITES__"] = ", ".join(sorted(suites))
481 Subst_common["__SUBJECT__"] = "Removed package(s) from %s" % (suites_list)
482 Subst_common["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
483 Subst_common["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
484 Subst_common["__WHOAMI__"] = whoami
486 # Send the bug closing messages
487 if done_bugs: 487 ↛ 511line 487 didn't jump to line 511, because the condition on line 487 was never false
488 Subst_close_rm = Subst_common
489 bcc = []
490 if cnf.find("Dinstall::Bcc") != "": 490 ↛ 491line 490 didn't jump to line 491, because the condition on line 490 was never true
491 bcc.append(cnf["Dinstall::Bcc"])
492 if cnf.find("Rm::Bcc") != "": 492 ↛ 493line 492 didn't jump to line 493, because the condition on line 492 was never true
493 bcc.append(cnf["Rm::Bcc"])
494 if bcc: 494 ↛ 495line 494 didn't jump to line 495, because the condition on line 494 was never true
495 Subst_close_rm["__BCC__"] = "Bcc: " + ", ".join(bcc)
496 else:
497 Subst_close_rm["__BCC__"] = "X-Filler: 42"
498 summarymail = "%s\n------------------- Reason -------------------\n%s\n" % (summary, reason)
499 summarymail += "----------------------------------------------\n"
500 Subst_close_rm["__SUMMARY__"] = summarymail
502 for bug in done_bugs:
503 Subst_close_rm["__BUG_NUMBER__"] = bug
504 if close_related_bugs: 504 ↛ 505line 504 didn't jump to line 505, because the condition on line 504 was never true
505 mail_message = utils.TemplateSubst(Subst_close_rm, cnf["Dir::Templates"] + "/rm.bug-close-with-related")
506 else:
507 mail_message = utils.TemplateSubst(Subst_close_rm, cnf["Dir::Templates"] + "/rm.bug-close")
508 utils.send_mail(mail_message, whitelists=whitelists)
510 # close associated bug reports
511 if close_related_bugs: 511 ↛ 512line 511 didn't jump to line 512, because the condition on line 511 was never true
512 Subst_close_other = Subst_common
513 bcc = []
514 wnpp = utils.parse_wnpp_bug_file()
515 newest_source = re_bin_only_nmu.sub('', newest_source)
516 if len(set(s.split("_", 1)[0] for s in sources)) == 1:
517 source_pkg = source.split("_", 1)[0]
518 else:
519 logfile.write("=========================================================================\n")
520 logfile822.write("\n")
521 raise ValueError("Closing bugs for multiple source packages is not supported. Please do it yourself.")
522 if newest_source != '':
523 Subst_close_other["__VERSION__"] = newest_source
524 else:
525 logfile.write("=========================================================================\n")
526 logfile822.write("\n")
527 raise ValueError("No versions can be found. Close bugs yourself.")
528 if bcc:
529 Subst_close_other["__BCC__"] = "Bcc: " + ", ".join(bcc)
530 else:
531 Subst_close_other["__BCC__"] = "X-Filler: 42"
532 # at this point, I just assume, that the first closed bug gives
533 # some useful information on why the package got removed
534 Subst_close_other["__BUG_NUMBER__"] = done_bugs[0]
535 Subst_close_other["__BUG_NUMBER_ALSO__"] = ""
536 Subst_close_other["__SOURCE__"] = source_pkg
537 merged_bugs = set()
538 other_bugs = bts.get_bugs(src=source_pkg, status=('open', 'forwarded'))
539 if other_bugs:
540 for bugno in other_bugs:
541 if bugno not in merged_bugs:
542 for bug in bts.get_status(bugno):
543 for merged in bug.mergedwith:
544 other_bugs.remove(merged)
545 merged_bugs.add(merged)
546 logfile.write("Also closing bug(s):")
547 logfile822.write("Also-Bugs:")
548 for bug in other_bugs:
549 Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + ","
550 logfile.write(" " + str(bug))
551 logfile822.write(" " + str(bug))
552 logfile.write("\n")
553 logfile822.write("\n")
554 if source_pkg in wnpp:
555 logfile.write("Also closing WNPP bug(s):")
556 logfile822.write("Also-WNPP:")
557 for bug in wnpp[source_pkg]:
558 # the wnpp-rm file we parse also contains our removal
559 # bugs, filtering that out
560 if bug != Subst_close_other["__BUG_NUMBER__"]:
561 Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + ","
562 logfile.write(" " + str(bug))
563 logfile822.write(" " + str(bug))
564 logfile.write("\n")
565 logfile822.write("\n")
567 mail_message = utils.TemplateSubst(Subst_close_other, cnf["Dir::Templates"] + "/rm.bug-close-related")
568 if Subst_close_other["__BUG_NUMBER_ALSO__"]:
569 utils.send_mail(mail_message)
571 logfile.write("=========================================================================\n")
572 logfile822.write("\n")