Coverage for daklib/rm.py: 41%

351 statements  

« prev     ^ index     » next       coverage.py v7.6.0, created at 2026-01-04 16:18 +0000

1"""General purpose package removal code for ftpmaster 

2 

3@contact: Debian FTP Master <ftpmaster@debian.org> 

4@copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org> 

5@copyright: 2010 Alexander Reichle-Schmehl <tolimar@debian.org> 

6@copyright: 2015 Niels Thykier <niels@thykier.net> 

7@license: GNU General Public License version 2 or later 

8""" 

9 

10# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org> 

11# Copyright (C) 2010 Alexander Reichle-Schmehl <tolimar@debian.org> 

12 

13# This program is free software; you can redistribute it and/or modify 

14# it under the terms of the GNU General Public License as published by 

15# the Free Software Foundation; either version 2 of the License, or 

16# (at your option) any later version. 

17 

18# This program is distributed in the hope that it will be useful, 

19# but WITHOUT ANY WARRANTY; without even the implied warranty of 

20# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

21# GNU General Public License for more details. 

22 

23# You should have received a copy of the GNU General Public License 

24# along with this program; if not, write to the Free Software 

25# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 

26 

27################################################################################ 

28 

29# From: Andrew Morton <akpm@osdl.org> 

30# Subject: 2.6.6-mm5 

31# To: linux-kernel@vger.kernel.org 

32# Date: Sat, 22 May 2004 01:36:36 -0700 

33# X-Mailer: Sylpheed version 0.9.7 (GTK+ 1.2.10; i386-redhat-linux-gnu) 

34# 

35# [...] 

36# 

37# Although this feature has been around for a while it is new code, and the 

38# usual cautions apply. If it munches all your files please tell Jens and 

39# he'll type them in again for you. 

40 

41################################################################################ 

42 

43import email.utils 

44import fcntl 

45import functools 

46from collections import defaultdict 

47from collections.abc import Collection 

48from re import sub 

49from typing import TYPE_CHECKING, Any, Optional 

50 

51import apt_pkg 

52import debianbts as bts 

53import sqlalchemy.sql as sql 

54 

55from daklib import utils 

56from daklib.dbconn import ( 

57 get_component, 

58 get_or_set_metadatakey, 

59 get_override_type, 

60 get_suite, 

61 get_suite_architectures, 

62) 

63from daklib.regexes import re_bin_only_nmu 

64 

65from .regexes import re_build_dep_arch 

66 

67if TYPE_CHECKING: 

68 from sqlalchemy.orm import Session 

69 

70################################################################################ 

71 

72 

73class ReverseDependencyChecker: 

74 """A bulk tester for reverse dependency checks 

75 

76 This class is similar to the check_reverse_depends method from "utils". However, 

77 it is primarily focused on facilitating bulk testing of reverse dependencies. 

78 It caches the state of the suite and then uses that as basis for answering queries. 

79 This saves a significant amount of time if multiple reverse dependency checks are 

80 required. 

81 """ 

82 

83 def __init__(self, session: "Session", suite: str): 

84 """Creates a new ReverseDependencyChecker instance 

85 

86 This will spend a significant amount of time caching data. 

87 

88 :param session: The database session in use 

89 :param suite: The name of the suite that is used as basis for removal tests. 

90 """ 

91 self._session = session 

92 dbsuite = get_suite(suite, session) 

93 assert dbsuite is not None 

94 suite_archs2id = dict( 

95 (x.arch_string, x.arch_id) for x in get_suite_architectures(suite) 

96 ) 

97 package_dependencies, arch_providers_of, arch_provided_by = ( 

98 self._load_package_information(session, dbsuite.suite_id, suite_archs2id) 

99 ) 

100 self._package_dependencies = package_dependencies 

101 self._arch_providers_of = arch_providers_of 

102 self._arch_provided_by = arch_provided_by 

103 self._archs_in_suite = set(suite_archs2id) 

104 

105 @staticmethod 

106 def _load_package_information( 

107 session: "Session", suite_id: int, suite_archs2id: dict[str, int] 

108 ) -> tuple[ 

109 dict[str, dict[str, set[frozenset[str]]]], 

110 dict[str, dict[str, set[str]]], 

111 dict[str, dict[str, set[str]]], 

112 ]: 

113 package_dependencies: dict[str, dict[str, set[frozenset[str]]]] = defaultdict( 

114 lambda: defaultdict(set) 

115 ) 

116 arch_providers_of: dict[str, dict[str, set[str]]] = defaultdict( 

117 lambda: defaultdict(set) 

118 ) 

119 arch_provided_by: dict[str, dict[str, set[str]]] = defaultdict( 

120 lambda: defaultdict(set) 

121 ) 

122 source_deps: dict[str, set[frozenset[str]]] = defaultdict(set) 

123 metakey_d = get_or_set_metadatakey("Depends", session) 

124 metakey_p = get_or_set_metadatakey("Provides", session) 

125 params: dict[str, Any] = { 

126 "suite_id": suite_id, 

127 "arch_all_id": suite_archs2id["all"], 

128 "metakey_d_id": metakey_d.key_id, 

129 "metakey_p_id": metakey_p.key_id, 

130 } 

131 all_arches = set(suite_archs2id) 

132 all_arches.discard("source") 

133 

134 package_dependencies["source"] = source_deps 

135 

136 for architecture in all_arches: 

137 deps: dict[str, set[frozenset[str]]] = defaultdict(set) 

138 providers_of: dict[str, set[str]] = defaultdict(set) 

139 provided_by: dict[str, set[str]] = defaultdict(set) 

140 arch_providers_of[architecture] = providers_of 

141 arch_provided_by[architecture] = provided_by 

142 package_dependencies[architecture] = deps 

143 

144 params["arch_id"] = suite_archs2id[architecture] 

145 

146 statement = sql.text( 

147 """ 

148 SELECT b.package, 

149 (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends, 

150 (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides 

151 FROM binaries b 

152 JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id 

153 WHERE b.architecture = :arch_id OR b.architecture = :arch_all_id""" 

154 ) 

155 query = session.execute(statement, params) 

156 for package, depends, provides in query: 

157 

158 if depends is not None: 

159 try: 

160 parsed_dep = [] 

161 for dep in apt_pkg.parse_depends(depends): 

162 parsed_dep.append(frozenset(d[0] for d in dep)) 

163 deps[package].update(parsed_dep) 

164 except ValueError as e: 

165 print("Error for package %s: %s" % (package, e)) 

166 # Maintain a counter for each virtual package. If a 

167 # Provides: exists, set the counter to 0 and count all 

168 # provides by a package not in the list for removal. 

169 # If the counter stays 0 at the end, we know that only 

170 # the to-be-removed packages provided this virtual 

171 # package. 

172 if provides is not None: 

173 for virtual_pkg in provides.split(","): 

174 virtual_pkg = virtual_pkg.strip() 

175 if virtual_pkg == package: 

176 continue 

177 provided_by[virtual_pkg].add(package) 

178 providers_of[package].add(virtual_pkg) 

179 

180 # Check source dependencies (Build-Depends and Build-Depends-Indep) 

181 metakey_bd = get_or_set_metadatakey("Build-Depends", session) 

182 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session) 

183 params = { 

184 "suite_id": suite_id, 

185 "metakey_ids": (metakey_bd.key_id, metakey_bdi.key_id), 

186 } 

187 statement = sql.text( 

188 """ 

189 SELECT s.source, string_agg(sm.value, ', ') as build_dep 

190 FROM source s 

191 JOIN source_metadata sm ON s.id = sm.src_id 

192 WHERE s.id in 

193 (SELECT src FROM newest_src_association 

194 WHERE suite = :suite_id) 

195 AND sm.key_id in :metakey_ids 

196 GROUP BY s.id, s.source""" 

197 ) 

198 query = session.execute(statement, params) 

199 for source, build_dep in query: 

200 if build_dep is not None: 

201 # Remove [arch] information since we want to see breakage on all arches 

202 build_dep = re_build_dep_arch.sub("", build_dep) 

203 try: 

204 parsed_dep = [] 

205 for dep in apt_pkg.parse_src_depends(build_dep): 

206 parsed_dep.append(frozenset(d[0] for d in dep)) 

207 source_deps[source].update(parsed_dep) 

208 except ValueError as e: 

209 print("Error for package %s: %s" % (source, e)) 

210 

211 return package_dependencies, arch_providers_of, arch_provided_by 

212 

213 def check_reverse_depends( 

214 self, removal_requests: Collection[tuple[str, Collection[str] | None]] 

215 ) -> dict[tuple[str, str], set[tuple[str, str]]]: 

216 """Bulk check reverse dependencies 

217 

218 Example: 

219 removal_request = { 

220 "eclipse-rcp": None, # means ALL architectures (incl. source) 

221 "eclipse": None, # means ALL architectures (incl. source) 

222 "lintian": ["source", "all"], # Only these two "architectures". 

223 } 

224 obj.check_reverse_depends(removal_request) 

225 

226 :param removal_requests: A dictionary mapping a package name to a list of architectures. The list of 

227 architectures decides from which the package will be removed - if the list is empty the package will 

228 be removed on ALL architectures in the suite (including "source"). 

229 

230 :return: A mapping of "removed package" (as a "(pkg, arch)"-tuple) to a set of broken 

231 broken packages (also as "(pkg, arch)"-tuple). Note that the architecture values 

232 in these tuples /can/ be "source" to reflect a breakage in build-dependencies. 

233 """ 

234 

235 archs_in_suite = self._archs_in_suite 

236 removals_by_arch: dict[str, set[str]] = defaultdict(set) 

237 affected_virtual_by_arch = defaultdict(set) 

238 package_dependencies = self._package_dependencies 

239 arch_providers_of = self._arch_providers_of 

240 arch_provided_by = self._arch_provided_by 

241 arch_provides2removal: dict[str, dict[str, str]] = defaultdict(dict) 

242 dep_problems: defaultdict[tuple[str, str], set[tuple[str, str]]] = defaultdict( 

243 set 

244 ) 

245 src_deps = package_dependencies["source"] 

246 src_removals = set() 

247 arch_all_removals = set() 

248 

249 for pkg, arch_list in removal_requests: 

250 if not arch_list: 

251 arch_list = archs_in_suite 

252 for arch in arch_list: 

253 if arch == "source": 

254 src_removals.add(pkg) 

255 continue 

256 if arch == "all": 

257 arch_all_removals.add(pkg) 

258 continue 

259 removals_by_arch[arch].add(pkg) 

260 if pkg in arch_providers_of[arch]: 

261 affected_virtual_by_arch[arch].add(pkg) 

262 

263 if arch_all_removals: 

264 for arch in archs_in_suite: 

265 if arch in ("all", "source"): 

266 continue 

267 removals_by_arch[arch].update(arch_all_removals) 

268 for pkg in arch_all_removals: 

269 if pkg in arch_providers_of[arch]: 

270 affected_virtual_by_arch[arch].add(pkg) 

271 

272 if not removals_by_arch: 

273 # Nothing to remove => no problems 

274 return dep_problems 

275 

276 for arch, removed_providers in affected_virtual_by_arch.items(): 

277 provides2removal = arch_provides2removal[arch] 

278 removals = removals_by_arch[arch] 

279 for virtual_pkg, virtual_providers in arch_provided_by[arch].items(): 

280 v = virtual_providers & removed_providers 

281 if len(v) == len(virtual_providers): 

282 # We removed all the providers of virtual_pkg 

283 removals.add(virtual_pkg) 

284 # Pick one to take the blame for the removal 

285 # - we sort for determinism, optimally we would prefer to blame the same package 

286 # to minimise the number of blamed packages. 

287 provides2removal[virtual_pkg] = sorted(v)[0] 

288 

289 for arch, removals in removals_by_arch.items(): 

290 deps = package_dependencies[arch] 

291 provides2removal = arch_provides2removal[arch] 

292 

293 # Check binary dependencies (Depends) 

294 for package, dependencies in deps.items(): 

295 if package in removals: 

296 continue 

297 for clause in dependencies: 

298 if not (clause <= removals): 

299 # Something probably still satisfies this relation 

300 continue 

301 # whoops, we seemed to have removed all packages that could possibly satisfy 

302 # this relation. Lets blame something for it 

303 for dep_package in clause: 

304 removal = dep_package 

305 if dep_package in provides2removal: 

306 removal = provides2removal[dep_package] 

307 dep_problems[(removal, arch)].add((package, arch)) 

308 

309 for source, build_dependencies in src_deps.items(): 

310 if source in src_removals: 

311 continue 

312 for clause in build_dependencies: 

313 if not (clause <= removals): 

314 # Something probably still satisfies this relation 

315 continue 

316 # whoops, we seemed to have removed all packages that could possibly satisfy 

317 # this relation. Lets blame something for it 

318 for dep_package in clause: 

319 removal = dep_package 

320 if dep_package in provides2removal: 

321 removal = provides2removal[dep_package] 

322 dep_problems[(removal, arch)].add((source, "source")) 

323 

324 return dep_problems 

325 

326 

327def remove( 

328 session: "Session", 

329 reason: str, 

330 suites: list[str], 

331 removals: list[tuple[str, str, str, int]], 

332 whoami: Optional[str] = None, 

333 partial: bool = False, 

334 components: Optional[list[str]] = None, 

335 done_bugs: Optional[list[str]] = None, 

336 date: Optional[str] = None, 

337 carbon_copy: Optional[list[str]] = None, 

338 close_related_bugs: bool = False, 

339) -> None: 

340 """Batch remove a number of packages 

341 Verify that the files listed in the Files field of the .dsc are 

342 those expected given the announced Format. 

343 

344 :param session: The database session in use 

345 :param reason: The reason for the removal (e.g. "[auto-cruft] NBS (no longer built by <source>)") 

346 :param suites: A list of the suite names in which the removal should occur 

347 :param removals: A list of the removals. Each element should be a tuple (or list) of at least the following 

348 for 4 items from the database (in order): package, version, architecture, (database) id. 

349 For source packages, the "architecture" should be set to "source". 

350 :param whoami: The person (or entity) doing the removal. Defaults to utils.whoami() 

351 :param partial: Whether the removal is "partial" (e.g. architecture specific). 

352 :param components: List of components involved in a partial removal. Can be an empty list to not restrict the 

353 removal to any components. 

354 :param done_bugs: A list of bugs to be closed when doing this removal. 

355 :param date: The date of the removal. Defaults to `date -R` 

356 :param carbon_copy: A list of mail addresses to CC when doing removals. NB: all items are taken "as-is" unlike 

357 "dak rm". 

358 :param close_related_bugs: Whether bugs related to the package being removed should be closed as well. NB: Not implemented 

359 for more than one suite. 

360 """ 

361 # Generate the summary of what's to be removed 

362 d: dict[str, dict[str, list[str]]] = {} 

363 summary = "" 

364 affected_sources: set[str] = set() 

365 sources = [] 

366 binaries = [] 

367 whitelists = [] 

368 versions = [] 

369 newest_source = "" 

370 suite_ids_list = [] 

371 suites_list = utils.join_with_commas_and(suites) 

372 cnf = utils.get_conf() 

373 con_components = "" 

374 

375 ####################################################################################################### 

376 

377 if not reason: 377 ↛ 378line 377 didn't jump to line 378 because the condition on line 377 was never true

378 raise ValueError("Empty removal reason not permitted") 

379 reason = reason.strip() 

380 

381 if not removals: 381 ↛ 382line 381 didn't jump to line 382 because the condition on line 381 was never true

382 raise ValueError("Nothing to remove!?") 

383 

384 if not suites: 384 ↛ 385line 384 didn't jump to line 385 because the condition on line 384 was never true

385 raise ValueError("Removals without a suite!?") 

386 

387 if whoami is None: 

388 whoami = utils.whoami() 

389 

390 if date is None: 390 ↛ 393line 390 didn't jump to line 393 because the condition on line 390 was always true

391 date = email.utils.formatdate() 

392 

393 if partial and components: 393 ↛ 395line 393 didn't jump to line 395 because the condition on line 393 was never true

394 

395 component_ids_list = [] 

396 for componentname in components: 

397 component = get_component(componentname, session=session) 

398 if component is None: 

399 raise ValueError("component '%s' not recognised." % componentname) 

400 else: 

401 component_ids_list.append(component.component_id) 

402 if component_ids_list: 

403 con_components = "AND component IN (%s)" % ", ".join( 

404 [str(i) for i in component_ids_list] 

405 ) 

406 

407 for i in removals: 

408 package = i[0] 

409 version = i[1] 

410 architecture = i[2] 

411 if package not in d: 

412 d[package] = {} 

413 if version not in d[package]: 

414 d[package][version] = [] 

415 if architecture not in d[package][version]: 415 ↛ 407line 415 didn't jump to line 407 because the condition on line 415 was always true

416 d[package][version].append(architecture) 

417 

418 for package in sorted(d): 

419 versions = sorted(d[package], key=functools.cmp_to_key(apt_pkg.version_compare)) 

420 for version in versions: 

421 d[package][version].sort(key=utils.ArchKey) 

422 summary += "%10s | %10s | %s\n" % ( 

423 package, 

424 version, 

425 ", ".join(d[package][version]), 

426 ) 

427 if apt_pkg.version_compare(version, newest_source) > 0: 

428 newest_source = version 

429 

430 for package in summary.split("\n"): 

431 for row in package.split("\n"): 

432 element = row.split("|") 

433 if len(element) == 3: 

434 if element[2].find("source") > 0: 

435 sources.append( 

436 "%s_%s" % tuple(elem.strip(" ") for elem in element[:2]) 

437 ) 

438 element[2] = sub(r"source\s?,?", "", element[2]).strip(" ") 

439 if element[2]: 

440 binaries.append( 

441 "%s_%s [%s]" % tuple(elem.strip(" ") for elem in element) 

442 ) 

443 

444 dsc_type = get_override_type("dsc", session) 

445 assert dsc_type is not None 

446 dsc_type_id = dsc_type.overridetype_id 

447 deb_type = get_override_type("deb", session) 

448 assert deb_type is not None 

449 deb_type_id = deb_type.overridetype_id 

450 

451 for suite in suites: 

452 s = get_suite(suite, session=session) 

453 if s is not None: 453 ↛ 451line 453 didn't jump to line 451 because the condition on line 453 was always true

454 suite_ids_list.append(s.suite_id) 

455 whitelists.append(s.mail_whitelist) 

456 

457 ####################################################################################################### 

458 log_filename = cnf["Rm::LogFile"] 

459 log822_filename = cnf["Rm::LogFile822"] 

460 with open(log_filename, "a") as logfile, open(log822_filename, "a") as logfile822: 

461 fcntl.lockf(logfile, fcntl.LOCK_EX) 

462 fcntl.lockf(logfile822, fcntl.LOCK_EX) 

463 

464 logfile.write( 

465 "=========================================================================\n" 

466 ) 

467 logfile.write("[Date: %s] [ftpmaster: %s]\n" % (date, whoami)) 

468 logfile.write( 

469 "Removed the following packages from %s:\n\n%s" % (suites_list, summary) 

470 ) 

471 if done_bugs: 

472 logfile.write("Closed bugs: %s\n" % (", ".join(done_bugs))) 

473 logfile.write("\n------------------- Reason -------------------\n%s\n" % reason) 

474 logfile.write("----------------------------------------------\n") 

475 

476 logfile822.write("Date: %s\n" % date) 

477 logfile822.write("Ftpmaster: %s\n" % whoami) 

478 logfile822.write("Suite: %s\n" % suites_list) 

479 

480 if sources: 480 ↛ 485line 480 didn't jump to line 485 because the condition on line 480 was always true

481 logfile822.write("Sources:\n") 

482 for source in sources: 

483 logfile822.write(" %s\n" % source) 

484 

485 if binaries: 485 ↛ 490line 485 didn't jump to line 490 because the condition on line 485 was always true

486 logfile822.write("Binaries:\n") 

487 for binary in binaries: 

488 logfile822.write(" %s\n" % binary) 

489 

490 logfile822.write("Reason: %s\n" % reason.replace("\n", "\n ")) 

491 if done_bugs: 

492 logfile822.write("Bug: %s\n" % (", ".join(done_bugs))) 

493 

494 for i in removals: 

495 package = i[0] 

496 architecture = i[2] 

497 package_id = i[3] 

498 for suite_id in suite_ids_list: 

499 if architecture == "source": 

500 q = session.execute( 

501 sql.text( 

502 "DELETE FROM src_associations sa USING source s WHERE sa.source = s.id AND sa.source = :packageid AND sa.suite = :suiteid RETURNING s.source" 

503 ), 

504 {"packageid": package_id, "suiteid": suite_id}, 

505 ) 

506 affected_sources.add(q.scalar_one()) 

507 else: 

508 q = session.execute( 

509 sql.text( 

510 "DELETE FROM bin_associations ba USING binaries b, source s WHERE ba.bin = b.id AND b.source = s.id AND ba.bin = :packageid AND ba.suite = :suiteid RETURNING s.source" 

511 ), 

512 {"packageid": package_id, "suiteid": suite_id}, 

513 ) 

514 affected_sources.add(q.scalar_one()) 

515 # Delete from the override file 

516 if not partial: 516 ↛ 498line 516 didn't jump to line 498 because the condition on line 516 was always true

517 if architecture == "source": 

518 type_id = dsc_type_id 

519 else: 

520 type_id = deb_type_id 

521 # TODO: Fix this properly to remove the remaining non-bind argument 

522 session.execute( 

523 sql.text( 

524 "DELETE FROM override WHERE package = :package AND type = :typeid AND suite = :suiteid %s" 

525 % (con_components) 

526 ), 

527 {"package": package, "typeid": type_id, "suiteid": suite_id}, 

528 ) 

529 

530 session.commit() 

531 # ### REMOVAL COMPLETE - send mail time ### # 

532 

533 # If we don't have a Bug server configured, we're done 

534 if "Dinstall::BugServer" not in cnf: 

535 if done_bugs or close_related_bugs: 535 ↛ 536line 535 didn't jump to line 536 because the condition on line 535 was never true

536 utils.warn( 

537 "Cannot send mail to BugServer as Dinstall::BugServer is not configured" 

538 ) 

539 

540 logfile.write( 

541 "=========================================================================\n" 

542 ) 

543 logfile822.write("\n") 

544 return 

545 

546 # read common subst variables for all bug closure mails 

547 Subst_common = {} 

548 Subst_common["__RM_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] 

549 Subst_common["__BUG_SERVER__"] = cnf["Dinstall::BugServer"] 

550 Subst_common["__CC__"] = "X-DAK: dak rm" 

551 if carbon_copy: 551 ↛ 553line 551 didn't jump to line 553 because the condition on line 551 was always true

552 Subst_common["__CC__"] += "\nCc: " + ", ".join(carbon_copy) 

553 Subst_common["__SOURCES__"] = ", ".join(sorted(affected_sources)) 

554 Subst_common["__SUITE_LIST__"] = suites_list 

555 Subst_common["__SUITES__"] = ", ".join(sorted(suites)) 

556 Subst_common["__SUBJECT__"] = "Removed package(s) from %s" % (suites_list) 

557 Subst_common["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"] 

558 Subst_common["__DISTRO__"] = cnf["Dinstall::MyDistribution"] 

559 Subst_common["__WHOAMI__"] = whoami 

560 

561 # Send the bug closing messages 

562 if done_bugs: 562 ↛ 594line 562 didn't jump to line 594 because the condition on line 562 was always true

563 Subst_close_rm = Subst_common 

564 bcc = [] 

565 if cnf.find("Dinstall::Bcc") != "": 565 ↛ 566line 565 didn't jump to line 566 because the condition on line 565 was never true

566 bcc.append(cnf["Dinstall::Bcc"]) 

567 if cnf.find("Rm::Bcc") != "": 567 ↛ 568line 567 didn't jump to line 568 because the condition on line 567 was never true

568 bcc.append(cnf["Rm::Bcc"]) 

569 if bcc: 569 ↛ 570line 569 didn't jump to line 570 because the condition on line 569 was never true

570 Subst_close_rm["__BCC__"] = "Bcc: " + ", ".join(bcc) 

571 else: 

572 Subst_close_rm["__BCC__"] = "X-Filler: 42" 

573 summarymail = "%s\n------------------- Reason -------------------\n%s\n" % ( 

574 summary, 

575 reason, 

576 ) 

577 summarymail += "----------------------------------------------\n" 

578 Subst_close_rm["__SUMMARY__"] = summarymail 

579 

580 for bug in done_bugs: 

581 Subst_close_rm["__BUG_NUMBER__"] = bug 

582 if close_related_bugs: 582 ↛ 583line 582 didn't jump to line 583 because the condition on line 582 was never true

583 mail_message = utils.TemplateSubst( 

584 Subst_close_rm, 

585 cnf["Dir::Templates"] + "/rm.bug-close-with-related", 

586 ) 

587 else: 

588 mail_message = utils.TemplateSubst( 

589 Subst_close_rm, cnf["Dir::Templates"] + "/rm.bug-close" 

590 ) 

591 utils.send_mail(mail_message, whitelists=whitelists) 

592 

593 # close associated bug reports 

594 if close_related_bugs: 594 ↛ 595line 594 didn't jump to line 595 because the condition on line 594 was never true

595 Subst_close_other = Subst_common 

596 bcc = [] 

597 wnpp = utils.parse_wnpp_bug_file() 

598 newest_source = re_bin_only_nmu.sub("", newest_source) 

599 if len(set(s.split("_", 1)[0] for s in sources)) == 1: 

600 source_pkg = source.split("_", 1)[0] 

601 else: 

602 logfile.write( 

603 "=========================================================================\n" 

604 ) 

605 logfile822.write("\n") 

606 raise ValueError( 

607 "Closing bugs for multiple source packages is not supported. Please do it yourself." 

608 ) 

609 if newest_source != "": 

610 Subst_close_other["__VERSION__"] = newest_source 

611 else: 

612 logfile.write( 

613 "=========================================================================\n" 

614 ) 

615 logfile822.write("\n") 

616 raise ValueError("No versions can be found. Close bugs yourself.") 

617 if bcc: 

618 Subst_close_other["__BCC__"] = "Bcc: " + ", ".join(bcc) 

619 else: 

620 Subst_close_other["__BCC__"] = "X-Filler: 42" 

621 # at this point, I just assume, that the first closed bug gives 

622 # some useful information on why the package got removed 

623 Subst_close_other["__BUG_NUMBER__"] = done_bugs[0] if done_bugs else "" 

624 Subst_close_other["__BUG_NUMBER_ALSO__"] = "" 

625 Subst_close_other["__SOURCE__"] = source_pkg 

626 merged_bugs = set() 

627 other_bugs = bts.get_bugs(src=source_pkg, status=("open", "forwarded")) # type: ignore[arg-type] 

628 if other_bugs: 

629 for bugno in other_bugs: 

630 if bugno not in merged_bugs: 

631 for bugreport in bts.get_status(bugno): 

632 for merged in bugreport.mergedwith: 

633 other_bugs.remove(merged) 

634 merged_bugs.add(merged) 

635 logfile.write("Also closing bug(s):") 

636 logfile822.write("Also-Bugs:") 

637 for bugno in other_bugs: 

638 Subst_close_other["__BUG_NUMBER_ALSO__"] += ( 

639 str(bugno) + "-done@" + cnf["Dinstall::BugServer"] + "," 

640 ) 

641 logfile.write(" " + str(bugno)) 

642 logfile822.write(" " + str(bugno)) 

643 logfile.write("\n") 

644 logfile822.write("\n") 

645 if source_pkg in wnpp: 

646 logfile.write("Also closing WNPP bug(s):") 

647 logfile822.write("Also-WNPP:") 

648 for bug in wnpp[source_pkg]: 

649 # the wnpp-rm file we parse also contains our removal 

650 # bugs, filtering that out 

651 if bug != Subst_close_other["__BUG_NUMBER__"]: 

652 Subst_close_other["__BUG_NUMBER_ALSO__"] += ( 

653 str(bug) + "-done@" + cnf["Dinstall::BugServer"] + "," 

654 ) 

655 logfile.write(" " + str(bug)) 

656 logfile822.write(" " + str(bug)) 

657 logfile.write("\n") 

658 logfile822.write("\n") 

659 

660 mail_message = utils.TemplateSubst( 

661 Subst_close_other, cnf["Dir::Templates"] + "/rm.bug-close-related" 

662 ) 

663 if Subst_close_other["__BUG_NUMBER_ALSO__"]: 

664 utils.send_mail(mail_message) 

665 

666 logfile.write( 

667 "=========================================================================\n" 

668 ) 

669 logfile822.write("\n")