1"""General purpose package removal code for ftpmaster
3@contact: Debian FTP Master <ftpmaster@debian.org>
4@copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
5@copyright: 2010 Alexander Reichle-Schmehl <tolimar@debian.org>
6@copyright: 2015 Niels Thykier <niels@thykier.net>
7@license: GNU General Public License version 2 or later
8"""
10# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
11# Copyright (C) 2010 Alexander Reichle-Schmehl <tolimar@debian.org>
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License as published by
15# the Free Software Foundation; either version 2 of the License, or
16# (at your option) any later version.
18# This program is distributed in the hope that it will be useful,
19# but WITHOUT ANY WARRANTY; without even the implied warranty of
20# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21# GNU General Public License for more details.
23# You should have received a copy of the GNU General Public License
24# along with this program; if not, write to the Free Software
25# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27################################################################################
29# From: Andrew Morton <akpm@osdl.org>
30# Subject: 2.6.6-mm5
31# To: linux-kernel@vger.kernel.org
32# Date: Sat, 22 May 2004 01:36:36 -0700
33# X-Mailer: Sylpheed version 0.9.7 (GTK+ 1.2.10; i386-redhat-linux-gnu)
34#
35# [...]
36#
37# Although this feature has been around for a while it is new code, and the
38# usual cautions apply. If it munches all your files please tell Jens and
39# he'll type them in again for you.
41################################################################################
43import email.utils
44import fcntl
45import functools
46from collections import defaultdict
47from re import sub
48from typing import Optional, Union
50import apt_pkg
51import debianbts as bts
52import sqlalchemy.sql as sql
54from daklib import utils
55from daklib.dbconn import (
56 get_component,
57 get_or_set_metadatakey,
58 get_override_type,
59 get_suite,
60 get_suite_architectures,
61)
62from daklib.regexes import re_bin_only_nmu
64from .regexes import re_build_dep_arch
66################################################################################
69class ReverseDependencyChecker:
70 """A bulk tester for reverse dependency checks
72 This class is similar to the check_reverse_depends method from "utils". However,
73 it is primarily focused on facilitating bulk testing of reverse dependencies.
74 It caches the state of the suite and then uses that as basis for answering queries.
75 This saves a significant amount of time if multiple reverse dependency checks are
76 required.
77 """
79 def __init__(self, session, suite: str):
80 """Creates a new ReverseDependencyChecker instance
82 This will spend a significant amount of time caching data.
84 :param session: The database session in use
85 :param suite: The name of the suite that is used as basis for removal tests.
86 """
87 self._session = session
88 dbsuite = get_suite(suite, session)
89 suite_archs2id = dict(
90 (x.arch_string, x.arch_id) for x in get_suite_architectures(suite)
91 )
92 package_dependencies, arch_providers_of, arch_provided_by = (
93 self._load_package_information(session, dbsuite.suite_id, suite_archs2id)
94 )
95 self._package_dependencies = package_dependencies
96 self._arch_providers_of = arch_providers_of
97 self._arch_provided_by = arch_provided_by
98 self._archs_in_suite = set(suite_archs2id)
100 @staticmethod
101 def _load_package_information(session, suite_id, suite_archs2id):
102 package_dependencies = defaultdict(lambda: defaultdict(set))
103 arch_providers_of = defaultdict(lambda: defaultdict(set))
104 arch_provided_by = defaultdict(lambda: defaultdict(set))
105 source_deps = defaultdict(set)
106 metakey_d = get_or_set_metadatakey("Depends", session)
107 metakey_p = get_or_set_metadatakey("Provides", session)
108 params = {
109 "suite_id": suite_id,
110 "arch_all_id": suite_archs2id["all"],
111 "metakey_d_id": metakey_d.key_id,
112 "metakey_p_id": metakey_p.key_id,
113 }
114 all_arches = set(suite_archs2id)
115 all_arches.discard("source")
117 package_dependencies["source"] = source_deps
119 for architecture in all_arches:
120 deps = defaultdict(set)
121 providers_of = defaultdict(set)
122 provided_by = defaultdict(set)
123 arch_providers_of[architecture] = providers_of
124 arch_provided_by[architecture] = provided_by
125 package_dependencies[architecture] = deps
127 params["arch_id"] = suite_archs2id[architecture]
129 statement = sql.text(
130 """
131 SELECT b.package,
132 (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
133 (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
134 FROM binaries b
135 JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
136 WHERE b.architecture = :arch_id OR b.architecture = :arch_all_id"""
137 )
138 query = (
139 session.query(
140 sql.column("package"), sql.column("depends"), sql.column("provides")
141 )
142 .from_statement(statement)
143 .params(params)
144 )
145 for package, depends, provides in query:
147 if depends is not None:
148 try:
149 parsed_dep = []
150 for dep in apt_pkg.parse_depends(depends):
151 parsed_dep.append(frozenset(d[0] for d in dep))
152 deps[package].update(parsed_dep)
153 except ValueError as e:
154 print("Error for package %s: %s" % (package, e))
155 # Maintain a counter for each virtual package. If a
156 # Provides: exists, set the counter to 0 and count all
157 # provides by a package not in the list for removal.
158 # If the counter stays 0 at the end, we know that only
159 # the to-be-removed packages provided this virtual
160 # package.
161 if provides is not None:
162 for virtual_pkg in provides.split(","):
163 virtual_pkg = virtual_pkg.strip()
164 if virtual_pkg == package:
165 continue
166 provided_by[virtual_pkg].add(package)
167 providers_of[package].add(virtual_pkg)
169 # Check source dependencies (Build-Depends and Build-Depends-Indep)
170 metakey_bd = get_or_set_metadatakey("Build-Depends", session)
171 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
172 params = {
173 "suite_id": suite_id,
174 "metakey_ids": (metakey_bd.key_id, metakey_bdi.key_id),
175 }
176 statement = sql.text(
177 """
178 SELECT s.source, string_agg(sm.value, ', ') as build_dep
179 FROM source s
180 JOIN source_metadata sm ON s.id = sm.src_id
181 WHERE s.id in
182 (SELECT src FROM newest_src_association
183 WHERE suite = :suite_id)
184 AND sm.key_id in :metakey_ids
185 GROUP BY s.id, s.source"""
186 )
187 query = (
188 session.query(sql.column("source"), sql.column("build_dep"))
189 .from_statement(statement)
190 .params(params)
191 )
192 for source, build_dep in query:
193 if build_dep is not None:
194 # Remove [arch] information since we want to see breakage on all arches
195 build_dep = re_build_dep_arch.sub("", build_dep)
196 try:
197 parsed_dep = []
198 for dep in apt_pkg.parse_src_depends(build_dep):
199 parsed_dep.append(frozenset(d[0] for d in dep))
200 source_deps[source].update(parsed_dep)
201 except ValueError as e:
202 print("Error for package %s: %s" % (source, e))
204 return package_dependencies, arch_providers_of, arch_provided_by
206 def check_reverse_depends(self, removal_requests: Union[dict, list[tuple]]) -> dict:
207 """Bulk check reverse dependencies
209 Example:
210 removal_request = {
211 "eclipse-rcp": None, # means ALL architectures (incl. source)
212 "eclipse": None, # means ALL architectures (incl. source)
213 "lintian": ["source", "all"], # Only these two "architectures".
214 }
215 obj.check_reverse_depends(removal_request)
217 :param removal_requests: A dictionary mapping a package name to a list of architectures. The list of
218 architectures decides from which the package will be removed - if the list is empty the package will
219 be removed on ALL architectures in the suite (including "source").
221 :return: A mapping of "removed package" (as a "(pkg, arch)"-tuple) to a set of broken
222 broken packages (also as "(pkg, arch)"-tuple). Note that the architecture values
223 in these tuples /can/ be "source" to reflect a breakage in build-dependencies.
224 """
226 archs_in_suite = self._archs_in_suite
227 removals_by_arch = defaultdict(set)
228 affected_virtual_by_arch = defaultdict(set)
229 package_dependencies = self._package_dependencies
230 arch_providers_of = self._arch_providers_of
231 arch_provided_by = self._arch_provided_by
232 arch_provides2removal = defaultdict(lambda: defaultdict(set))
233 dep_problems = defaultdict(set)
234 src_deps = package_dependencies["source"]
235 src_removals = set()
236 arch_all_removals = set()
238 if isinstance(removal_requests, dict):
239 removal_requests = removal_requests.items()
241 for pkg, arch_list in removal_requests:
242 if not arch_list:
243 arch_list = archs_in_suite
244 for arch in arch_list:
245 if arch == "source":
246 src_removals.add(pkg)
247 continue
248 if arch == "all":
249 arch_all_removals.add(pkg)
250 continue
251 removals_by_arch[arch].add(pkg)
252 if pkg in arch_providers_of[arch]:
253 affected_virtual_by_arch[arch].add(pkg)
255 if arch_all_removals:
256 for arch in archs_in_suite:
257 if arch in ("all", "source"):
258 continue
259 removals_by_arch[arch].update(arch_all_removals)
260 for pkg in arch_all_removals:
261 if pkg in arch_providers_of[arch]:
262 affected_virtual_by_arch[arch].add(pkg)
264 if not removals_by_arch:
265 # Nothing to remove => no problems
266 return dep_problems
268 for arch, removed_providers in affected_virtual_by_arch.items():
269 provides2removal = arch_provides2removal[arch]
270 removals = removals_by_arch[arch]
271 for virtual_pkg, virtual_providers in arch_provided_by[arch].items():
272 v = virtual_providers & removed_providers
273 if len(v) == len(virtual_providers):
274 # We removed all the providers of virtual_pkg
275 removals.add(virtual_pkg)
276 # Pick one to take the blame for the removal
277 # - we sort for determinism, optimally we would prefer to blame the same package
278 # to minimise the number of blamed packages.
279 provides2removal[virtual_pkg] = sorted(v)[0]
281 for arch, removals in removals_by_arch.items():
282 deps = package_dependencies[arch]
283 provides2removal = arch_provides2removal[arch]
285 # Check binary dependencies (Depends)
286 for package, dependencies in deps.items():
287 if package in removals:
288 continue
289 for clause in dependencies:
290 if not (clause <= removals):
291 # Something probably still satisfies this relation
292 continue
293 # whoops, we seemed to have removed all packages that could possibly satisfy
294 # this relation. Lets blame something for it
295 for dep_package in clause:
296 removal = dep_package
297 if dep_package in provides2removal:
298 removal = provides2removal[dep_package]
299 dep_problems[(removal, arch)].add((package, arch))
301 for source, build_dependencies in src_deps.items():
302 if source in src_removals:
303 continue
304 for clause in build_dependencies:
305 if not (clause <= removals):
306 # Something probably still satisfies this relation
307 continue
308 # whoops, we seemed to have removed all packages that could possibly satisfy
309 # this relation. Lets blame something for it
310 for dep_package in clause:
311 removal = dep_package
312 if dep_package in provides2removal:
313 removal = provides2removal[dep_package]
314 dep_problems[(removal, arch)].add((source, "source"))
316 return dep_problems
319def remove(
320 session,
321 reason: str,
322 suites: list,
323 removals: list,
324 whoami: Optional[str] = None,
325 partial: bool = False,
326 components: Optional[list] = None,
327 done_bugs: Optional[list] = None,
328 date: Optional[str] = None,
329 carbon_copy: Optional[list[str]] = None,
330 close_related_bugs: bool = False,
331) -> None:
332 """Batch remove a number of packages
333 Verify that the files listed in the Files field of the .dsc are
334 those expected given the announced Format.
336 :param session: The database session in use
337 :param reason: The reason for the removal (e.g. "[auto-cruft] NBS (no longer built by <source>)")
338 :param suites: A list of the suite names in which the removal should occur
339 :param removals: A list of the removals. Each element should be a tuple (or list) of at least the following
340 for 4 items from the database (in order): package, version, architecture, (database) id.
341 For source packages, the "architecture" should be set to "source".
342 :param whoami: The person (or entity) doing the removal. Defaults to utils.whoami()
343 :param partial: Whether the removal is "partial" (e.g. architecture specific).
344 :param components: List of components involved in a partial removal. Can be an empty list to not restrict the
345 removal to any components.
346 :param done_bugs: A list of bugs to be closed when doing this removal.
347 :param date: The date of the removal. Defaults to `date -R`
348 :param carbon_copy: A list of mail addresses to CC when doing removals. NB: all items are taken "as-is" unlike
349 "dak rm".
350 :param close_related_bugs: Whether bugs related to the package being removed should be closed as well. NB: Not implemented
351 for more than one suite.
352 """
353 # Generate the summary of what's to be removed
354 d = {}
355 summary = ""
356 affected_sources = set()
357 sources = []
358 binaries = []
359 whitelists = []
360 versions = []
361 newest_source = ""
362 suite_ids_list = []
363 suites_list = utils.join_with_commas_and(suites)
364 cnf = utils.get_conf()
365 con_components = ""
367 #######################################################################################################
369 if not reason: 369 ↛ 370line 369 didn't jump to line 370, because the condition on line 369 was never true
370 raise ValueError("Empty removal reason not permitted")
371 reason = reason.strip()
373 if not removals: 373 ↛ 374line 373 didn't jump to line 374, because the condition on line 373 was never true
374 raise ValueError("Nothing to remove!?")
376 if not suites: 376 ↛ 377line 376 didn't jump to line 377, because the condition on line 376 was never true
377 raise ValueError("Removals without a suite!?")
379 if whoami is None:
380 whoami = utils.whoami()
382 if date is None: 382 ↛ 385line 382 didn't jump to line 385, because the condition on line 382 was never false
383 date = email.utils.formatdate()
385 if partial and components: 385 ↛ 387line 385 didn't jump to line 387, because the condition on line 385 was never true
387 component_ids_list = []
388 for componentname in components:
389 component = get_component(componentname, session=session)
390 if component is None:
391 raise ValueError("component '%s' not recognised." % componentname)
392 else:
393 component_ids_list.append(component.component_id)
394 if component_ids_list:
395 con_components = "AND component IN (%s)" % ", ".join(
396 [str(i) for i in component_ids_list]
397 )
399 for i in removals:
400 package = i[0]
401 version = i[1]
402 architecture = i[2]
403 if package not in d:
404 d[package] = {}
405 if version not in d[package]:
406 d[package][version] = []
407 if architecture not in d[package][version]: 407 ↛ 399line 407 didn't jump to line 399, because the condition on line 407 was never false
408 d[package][version].append(architecture)
410 for package in sorted(d):
411 versions = sorted(d[package], key=functools.cmp_to_key(apt_pkg.version_compare))
412 for version in versions:
413 d[package][version].sort(key=utils.ArchKey)
414 summary += "%10s | %10s | %s\n" % (
415 package,
416 version,
417 ", ".join(d[package][version]),
418 )
419 if apt_pkg.version_compare(version, newest_source) > 0:
420 newest_source = version
422 for package in summary.split("\n"):
423 for row in package.split("\n"):
424 element = row.split("|")
425 if len(element) == 3:
426 if element[2].find("source") > 0:
427 sources.append(
428 "%s_%s" % tuple(elem.strip(" ") for elem in element[:2])
429 )
430 element[2] = sub(r"source\s?,?", "", element[2]).strip(" ")
431 if element[2]:
432 binaries.append(
433 "%s_%s [%s]" % tuple(elem.strip(" ") for elem in element)
434 )
436 dsc_type_id = get_override_type("dsc", session).overridetype_id
437 deb_type_id = get_override_type("deb", session).overridetype_id
439 for suite in suites:
440 s = get_suite(suite, session=session)
441 if s is not None: 441 ↛ 439line 441 didn't jump to line 439, because the condition on line 441 was never false
442 suite_ids_list.append(s.suite_id)
443 whitelists.append(s.mail_whitelist)
445 #######################################################################################################
446 log_filename = cnf["Rm::LogFile"]
447 log822_filename = cnf["Rm::LogFile822"]
448 with open(log_filename, "a") as logfile, open(log822_filename, "a") as logfile822:
449 fcntl.lockf(logfile, fcntl.LOCK_EX)
450 fcntl.lockf(logfile822, fcntl.LOCK_EX)
452 logfile.write(
453 "=========================================================================\n"
454 )
455 logfile.write("[Date: %s] [ftpmaster: %s]\n" % (date, whoami))
456 logfile.write(
457 "Removed the following packages from %s:\n\n%s" % (suites_list, summary)
458 )
459 if done_bugs:
460 logfile.write("Closed bugs: %s\n" % (", ".join(done_bugs)))
461 logfile.write("\n------------------- Reason -------------------\n%s\n" % reason)
462 logfile.write("----------------------------------------------\n")
464 logfile822.write("Date: %s\n" % date)
465 logfile822.write("Ftpmaster: %s\n" % whoami)
466 logfile822.write("Suite: %s\n" % suites_list)
468 if sources: 468 ↛ 473line 468 didn't jump to line 473, because the condition on line 468 was never false
469 logfile822.write("Sources:\n")
470 for source in sources:
471 logfile822.write(" %s\n" % source)
473 if binaries: 473 ↛ 478line 473 didn't jump to line 478, because the condition on line 473 was never false
474 logfile822.write("Binaries:\n")
475 for binary in binaries:
476 logfile822.write(" %s\n" % binary)
478 logfile822.write("Reason: %s\n" % reason.replace("\n", "\n "))
479 if done_bugs:
480 logfile822.write("Bug: %s\n" % (", ".join(done_bugs)))
482 for i in removals:
483 package = i[0]
484 architecture = i[2]
485 package_id = i[3]
486 for suite_id in suite_ids_list:
487 if architecture == "source":
488 q = session.execute(
489 "DELETE FROM src_associations sa USING source s WHERE sa.source = s.id AND sa.source = :packageid AND sa.suite = :suiteid RETURNING s.source",
490 {"packageid": package_id, "suiteid": suite_id},
491 )
492 affected_sources.add(q.scalar())
493 else:
494 q = session.execute(
495 "DELETE FROM bin_associations ba USING binaries b, source s WHERE ba.bin = b.id AND b.source = s.id AND ba.bin = :packageid AND ba.suite = :suiteid RETURNING s.source",
496 {"packageid": package_id, "suiteid": suite_id},
497 )
498 affected_sources.add(q.scalar())
499 # Delete from the override file
500 if not partial: 500 ↛ 486line 500 didn't jump to line 486, because the condition on line 500 was never false
501 if architecture == "source":
502 type_id = dsc_type_id
503 else:
504 type_id = deb_type_id
505 # TODO: Fix this properly to remove the remaining non-bind argument
506 session.execute(
507 "DELETE FROM override WHERE package = :package AND type = :typeid AND suite = :suiteid %s"
508 % (con_components),
509 {"package": package, "typeid": type_id, "suiteid": suite_id},
510 )
512 session.commit()
513 # ### REMOVAL COMPLETE - send mail time ### #
515 # If we don't have a Bug server configured, we're done
516 if "Dinstall::BugServer" not in cnf:
517 if done_bugs or close_related_bugs: 517 ↛ 518line 517 didn't jump to line 518, because the condition on line 517 was never true
518 utils.warn(
519 "Cannot send mail to BugServer as Dinstall::BugServer is not configured"
520 )
522 logfile.write(
523 "=========================================================================\n"
524 )
525 logfile822.write("\n")
526 return
528 # read common subst variables for all bug closure mails
529 Subst_common = {}
530 Subst_common["__RM_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
531 Subst_common["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
532 Subst_common["__CC__"] = "X-DAK: dak rm"
533 if carbon_copy: 533 ↛ 535line 533 didn't jump to line 535, because the condition on line 533 was never false
534 Subst_common["__CC__"] += "\nCc: " + ", ".join(carbon_copy)
535 Subst_common["__SOURCES__"] = ", ".join(sorted(affected_sources))
536 Subst_common["__SUITE_LIST__"] = suites_list
537 Subst_common["__SUITES__"] = ", ".join(sorted(suites))
538 Subst_common["__SUBJECT__"] = "Removed package(s) from %s" % (suites_list)
539 Subst_common["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
540 Subst_common["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
541 Subst_common["__WHOAMI__"] = whoami
543 # Send the bug closing messages
544 if done_bugs: 544 ↛ 576line 544 didn't jump to line 576, because the condition on line 544 was never false
545 Subst_close_rm = Subst_common
546 bcc = []
547 if cnf.find("Dinstall::Bcc") != "": 547 ↛ 548line 547 didn't jump to line 548, because the condition on line 547 was never true
548 bcc.append(cnf["Dinstall::Bcc"])
549 if cnf.find("Rm::Bcc") != "": 549 ↛ 550line 549 didn't jump to line 550, because the condition on line 549 was never true
550 bcc.append(cnf["Rm::Bcc"])
551 if bcc: 551 ↛ 552line 551 didn't jump to line 552, because the condition on line 551 was never true
552 Subst_close_rm["__BCC__"] = "Bcc: " + ", ".join(bcc)
553 else:
554 Subst_close_rm["__BCC__"] = "X-Filler: 42"
555 summarymail = "%s\n------------------- Reason -------------------\n%s\n" % (
556 summary,
557 reason,
558 )
559 summarymail += "----------------------------------------------\n"
560 Subst_close_rm["__SUMMARY__"] = summarymail
562 for bug in done_bugs:
563 Subst_close_rm["__BUG_NUMBER__"] = bug
564 if close_related_bugs: 564 ↛ 565line 564 didn't jump to line 565, because the condition on line 564 was never true
565 mail_message = utils.TemplateSubst(
566 Subst_close_rm,
567 cnf["Dir::Templates"] + "/rm.bug-close-with-related",
568 )
569 else:
570 mail_message = utils.TemplateSubst(
571 Subst_close_rm, cnf["Dir::Templates"] + "/rm.bug-close"
572 )
573 utils.send_mail(mail_message, whitelists=whitelists)
575 # close associated bug reports
576 if close_related_bugs: 576 ↛ 577line 576 didn't jump to line 577, because the condition on line 576 was never true
577 Subst_close_other = Subst_common
578 bcc = []
579 wnpp = utils.parse_wnpp_bug_file()
580 newest_source = re_bin_only_nmu.sub("", newest_source)
581 if len(set(s.split("_", 1)[0] for s in sources)) == 1:
582 source_pkg = source.split("_", 1)[0]
583 else:
584 logfile.write(
585 "=========================================================================\n"
586 )
587 logfile822.write("\n")
588 raise ValueError(
589 "Closing bugs for multiple source packages is not supported. Please do it yourself."
590 )
591 if newest_source != "":
592 Subst_close_other["__VERSION__"] = newest_source
593 else:
594 logfile.write(
595 "=========================================================================\n"
596 )
597 logfile822.write("\n")
598 raise ValueError("No versions can be found. Close bugs yourself.")
599 if bcc:
600 Subst_close_other["__BCC__"] = "Bcc: " + ", ".join(bcc)
601 else:
602 Subst_close_other["__BCC__"] = "X-Filler: 42"
603 # at this point, I just assume, that the first closed bug gives
604 # some useful information on why the package got removed
605 Subst_close_other["__BUG_NUMBER__"] = done_bugs[0]
606 Subst_close_other["__BUG_NUMBER_ALSO__"] = ""
607 Subst_close_other["__SOURCE__"] = source_pkg
608 merged_bugs = set()
609 other_bugs = bts.get_bugs(src=source_pkg, status=("open", "forwarded"))
610 if other_bugs:
611 for bugno in other_bugs:
612 if bugno not in merged_bugs:
613 for bug in bts.get_status(bugno):
614 for merged in bug.mergedwith:
615 other_bugs.remove(merged)
616 merged_bugs.add(merged)
617 logfile.write("Also closing bug(s):")
618 logfile822.write("Also-Bugs:")
619 for bug in other_bugs:
620 Subst_close_other["__BUG_NUMBER_ALSO__"] += (
621 str(bug) + "-done@" + cnf["Dinstall::BugServer"] + ","
622 )
623 logfile.write(" " + str(bug))
624 logfile822.write(" " + str(bug))
625 logfile.write("\n")
626 logfile822.write("\n")
627 if source_pkg in wnpp:
628 logfile.write("Also closing WNPP bug(s):")
629 logfile822.write("Also-WNPP:")
630 for bug in wnpp[source_pkg]:
631 # the wnpp-rm file we parse also contains our removal
632 # bugs, filtering that out
633 if bug != Subst_close_other["__BUG_NUMBER__"]:
634 Subst_close_other["__BUG_NUMBER_ALSO__"] += (
635 str(bug) + "-done@" + cnf["Dinstall::BugServer"] + ","
636 )
637 logfile.write(" " + str(bug))
638 logfile822.write(" " + str(bug))
639 logfile.write("\n")
640 logfile822.write("\n")
642 mail_message = utils.TemplateSubst(
643 Subst_close_other, cnf["Dir::Templates"] + "/rm.bug-close-related"
644 )
645 if Subst_close_other["__BUG_NUMBER_ALSO__"]:
646 utils.send_mail(mail_message)
648 logfile.write(
649 "=========================================================================\n"
650 )
651 logfile822.write("\n")