Package daklib :: Module rm
[hide private]
[frames] | no frames]

Source Code for Module daklib.rm

  1  """General purpose package removal code for ftpmaster 
  2   
  3  @contact: Debian FTP Master <ftpmaster@debian.org> 
  4  @copyright: 2000, 2001, 2002, 2003, 2004, 2006  James Troup <james@nocrew.org> 
  5  @copyright: 2010 Alexander Reichle-Schmehl <tolimar@debian.org> 
  6  @copyright: 2015      Niels Thykier <niels@thykier.net> 
  7  @license: GNU General Public License version 2 or later 
  8  """ 
  9  # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006  James Troup <james@nocrew.org> 
 10  # Copyright (C) 2010 Alexander Reichle-Schmehl <tolimar@debian.org> 
 11   
 12  # This program is free software; you can redistribute it and/or modify 
 13  # it under the terms of the GNU General Public License as published by 
 14  # the Free Software Foundation; either version 2 of the License, or 
 15  # (at your option) any later version. 
 16   
 17  # This program is distributed in the hope that it will be useful, 
 18  # but WITHOUT ANY WARRANTY; without even the implied warranty of 
 19  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the 
 20  # GNU General Public License for more details. 
 21   
 22  # You should have received a copy of the GNU General Public License 
 23  # along with this program; if not, write to the Free Software 
 24  # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA 
 25   
 26  ################################################################################ 
 27   
 28  # From: Andrew Morton <akpm@osdl.org> 
 29  # Subject: 2.6.6-mm5 
 30  # To: linux-kernel@vger.kernel.org 
 31  # Date: Sat, 22 May 2004 01:36:36 -0700 
 32  # X-Mailer: Sylpheed version 0.9.7 (GTK+ 1.2.10; i386-redhat-linux-gnu) 
 33  # 
 34  # [...] 
 35  # 
 36  # Although this feature has been around for a while it is new code, and the 
 37  # usual cautions apply.  If it munches all your files please tell Jens and 
 38  # he'll type them in again for you. 
 39   
 40  ################################################################################ 
 41   
 42  import apt_pkg 
 43  import fcntl 
 44  import functools 
 45  import sqlalchemy.sql as sql 
 46  import email.utils 
 47  from re import sub 
 48  from collections import defaultdict 
 49  from .regexes import re_build_dep_arch 
 50   
 51  from daklib.dbconn import * 
 52  from daklib import utils 
 53  from daklib.regexes import re_bin_only_nmu 
 54  import debianbts as bts 
55 56 ################################################################################ 57 58 59 -class ReverseDependencyChecker:
60 """A bulk tester for reverse dependency checks 61 62 This class is similar to the check_reverse_depends method from "utils". However, 63 it is primarily focused on facilitating bulk testing of reverse dependencies. 64 It caches the state of the suite and then uses that as basis for answering queries. 65 This saves a significant amount of time if multiple reverse dependency checks are 66 required. 67 """ 68
69 - def __init__(self, session, suite):
70 """Creates a new ReverseDependencyChecker instance 71 72 This will spend a significant amount of time caching data. 73 74 @type session: SQLA Session 75 @param session: The database session in use 76 77 @type suite: str 78 @param suite: The name of the suite that is used as basis for removal tests. 79 """ 80 self._session = session 81 dbsuite = get_suite(suite, session) 82 suite_archs2id = dict((x.arch_string, x.arch_id) for x in get_suite_architectures(suite)) 83 package_dependencies, arch_providers_of, arch_provided_by = self._load_package_information(session, 84 dbsuite.suite_id, 85 suite_archs2id) 86 self._package_dependencies = package_dependencies 87 self._arch_providers_of = arch_providers_of 88 self._arch_provided_by = arch_provided_by 89 self._archs_in_suite = set(suite_archs2id)
90 91 @staticmethod
92 - def _load_package_information(session, suite_id, suite_archs2id):
93 package_dependencies = defaultdict(lambda: defaultdict(set)) 94 arch_providers_of = defaultdict(lambda: defaultdict(set)) 95 arch_provided_by = defaultdict(lambda: defaultdict(set)) 96 source_deps = defaultdict(set) 97 metakey_d = get_or_set_metadatakey("Depends", session) 98 metakey_p = get_or_set_metadatakey("Provides", session) 99 params = { 100 'suite_id': suite_id, 101 'arch_all_id': suite_archs2id['all'], 102 'metakey_d_id': metakey_d.key_id, 103 'metakey_p_id': metakey_p.key_id, 104 } 105 all_arches = set(suite_archs2id) 106 all_arches.discard('source') 107 108 package_dependencies['source'] = source_deps 109 110 for architecture in all_arches: 111 deps = defaultdict(set) 112 providers_of = defaultdict(set) 113 provided_by = defaultdict(set) 114 arch_providers_of[architecture] = providers_of 115 arch_provided_by[architecture] = provided_by 116 package_dependencies[architecture] = deps 117 118 params['arch_id'] = suite_archs2id[architecture] 119 120 statement = sql.text(''' 121 SELECT b.package, 122 (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends, 123 (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides 124 FROM binaries b 125 JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id 126 WHERE b.architecture = :arch_id OR b.architecture = :arch_all_id''') 127 query = session.query(sql.column('package'), sql.column('depends'), sql.column('provides')). \ 128 from_statement(statement).params(params) 129 for package, depends, provides in query: 130 131 if depends is not None: 132 try: 133 parsed_dep = [] 134 for dep in apt_pkg.parse_depends(depends): 135 parsed_dep.append(frozenset(d[0] for d in dep)) 136 deps[package].update(parsed_dep) 137 except ValueError as e: 138 print("Error for package %s: %s" % (package, e)) 139 # Maintain a counter for each virtual package. If a 140 # Provides: exists, set the counter to 0 and count all 141 # provides by a package not in the list for removal. 142 # If the counter stays 0 at the end, we know that only 143 # the to-be-removed packages provided this virtual 144 # package. 145 if provides is not None: 146 for virtual_pkg in provides.split(","): 147 virtual_pkg = virtual_pkg.strip() 148 if virtual_pkg == package: 149 continue 150 provided_by[virtual_pkg].add(package) 151 providers_of[package].add(virtual_pkg) 152 153 # Check source dependencies (Build-Depends and Build-Depends-Indep) 154 metakey_bd = get_or_set_metadatakey("Build-Depends", session) 155 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session) 156 params = { 157 'suite_id': suite_id, 158 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id), 159 } 160 statement = sql.text(''' 161 SELECT s.source, string_agg(sm.value, ', ') as build_dep 162 FROM source s 163 JOIN source_metadata sm ON s.id = sm.src_id 164 WHERE s.id in 165 (SELECT src FROM newest_src_association 166 WHERE suite = :suite_id) 167 AND sm.key_id in :metakey_ids 168 GROUP BY s.id, s.source''') 169 query = session.query(sql.column('source'), sql.column('build_dep')) \ 170 .from_statement(statement).params(params) 171 for source, build_dep in query: 172 if build_dep is not None: 173 # Remove [arch] information since we want to see breakage on all arches 174 build_dep = re_build_dep_arch.sub("", build_dep) 175 try: 176 parsed_dep = [] 177 for dep in apt_pkg.parse_src_depends(build_dep): 178 parsed_dep.append(frozenset(d[0] for d in dep)) 179 source_deps[source].update(parsed_dep) 180 except ValueError as e: 181 print("Error for package %s: %s" % (source, e)) 182 183 return package_dependencies, arch_providers_of, arch_provided_by
184
185 - def check_reverse_depends(self, removal_requests):
186 """Bulk check reverse dependencies 187 188 Example: 189 removal_request = { 190 "eclipse-rcp": None, # means ALL architectures (incl. source) 191 "eclipse": None, # means ALL architectures (incl. source) 192 "lintian": ["source", "all"], # Only these two "architectures". 193 } 194 obj.check_reverse_depends(removal_request) 195 196 @type removal_requests: dict (or a list of tuples) 197 @param removal_requests: A dictionary mapping a package name to a list of architectures. The list of 198 architectures decides from which the package will be removed - if the list is empty the package will 199 be removed on ALL architectures in the suite (including "source"). 200 201 @rtype: dict 202 @return: A mapping of "removed package" (as a "(pkg, arch)"-tuple) to a set of broken 203 broken packages (also as "(pkg, arch)"-tuple). Note that the architecture values 204 in these tuples /can/ be "source" to reflect a breakage in build-dependencies. 205 """ 206 207 archs_in_suite = self._archs_in_suite 208 removals_by_arch = defaultdict(set) 209 affected_virtual_by_arch = defaultdict(set) 210 package_dependencies = self._package_dependencies 211 arch_providers_of = self._arch_providers_of 212 arch_provided_by = self._arch_provided_by 213 arch_provides2removal = defaultdict(lambda: defaultdict(set)) 214 dep_problems = defaultdict(set) 215 src_deps = package_dependencies['source'] 216 src_removals = set() 217 arch_all_removals = set() 218 219 if isinstance(removal_requests, dict): 220 removal_requests = removal_requests.items() 221 222 for pkg, arch_list in removal_requests: 223 if not arch_list: 224 arch_list = archs_in_suite 225 for arch in arch_list: 226 if arch == 'source': 227 src_removals.add(pkg) 228 continue 229 if arch == 'all': 230 arch_all_removals.add(pkg) 231 continue 232 removals_by_arch[arch].add(pkg) 233 if pkg in arch_providers_of[arch]: 234 affected_virtual_by_arch[arch].add(pkg) 235 236 if arch_all_removals: 237 for arch in archs_in_suite: 238 if arch in ('all', 'source'): 239 continue 240 removals_by_arch[arch].update(arch_all_removals) 241 for pkg in arch_all_removals: 242 if pkg in arch_providers_of[arch]: 243 affected_virtual_by_arch[arch].add(pkg) 244 245 if not removals_by_arch: 246 # Nothing to remove => no problems 247 return dep_problems 248 249 for arch, removed_providers in affected_virtual_by_arch.items(): 250 provides2removal = arch_provides2removal[arch] 251 removals = removals_by_arch[arch] 252 for virtual_pkg, virtual_providers in arch_provided_by[arch].items(): 253 v = virtual_providers & removed_providers 254 if len(v) == len(virtual_providers): 255 # We removed all the providers of virtual_pkg 256 removals.add(virtual_pkg) 257 # Pick one to take the blame for the removal 258 # - we sort for determinism, optimally we would prefer to blame the same package 259 # to minimise the number of blamed packages. 260 provides2removal[virtual_pkg] = sorted(v)[0] 261 262 for arch, removals in removals_by_arch.items(): 263 deps = package_dependencies[arch] 264 provides2removal = arch_provides2removal[arch] 265 266 # Check binary dependencies (Depends) 267 for package, dependencies in deps.items(): 268 if package in removals: 269 continue 270 for clause in dependencies: 271 if not (clause <= removals): 272 # Something probably still satisfies this relation 273 continue 274 # whoops, we seemed to have removed all packages that could possibly satisfy 275 # this relation. Lets blame something for it 276 for dep_package in clause: 277 removal = dep_package 278 if dep_package in provides2removal: 279 removal = provides2removal[dep_package] 280 dep_problems[(removal, arch)].add((package, arch)) 281 282 for source, build_dependencies in src_deps.items(): 283 if source in src_removals: 284 continue 285 for clause in build_dependencies: 286 if not (clause <= removals): 287 # Something probably still satisfies this relation 288 continue 289 # whoops, we seemed to have removed all packages that could possibly satisfy 290 # this relation. Lets blame something for it 291 for dep_package in clause: 292 removal = dep_package 293 if dep_package in provides2removal: 294 removal = provides2removal[dep_package] 295 dep_problems[(removal, arch)].add((source, 'source')) 296 297 return dep_problems
298
299 300 -def remove(session, reason, suites, removals, 301 whoami=None, partial=False, components=None, done_bugs=None, date=None, 302 carbon_copy=None, close_related_bugs=False):
303 """Batch remove a number of packages 304 Verify that the files listed in the Files field of the .dsc are 305 those expected given the announced Format. 306 307 @type session: SQLA Session 308 @param session: The database session in use 309 310 @type reason: string 311 @param reason: The reason for the removal (e.g. "[auto-cruft] NBS (no longer built by <source>)") 312 313 @type suites: list 314 @param suites: A list of the suite names in which the removal should occur 315 316 @type removals: list 317 @param removals: A list of the removals. Each element should be a tuple (or list) of at least the following 318 for 4 items from the database (in order): package, version, architecture, (database) id. 319 For source packages, the "architecture" should be set to "source". 320 321 @type partial: bool 322 @param partial: Whether the removal is "partial" (e.g. architecture specific). 323 324 @type components: list 325 @param components: List of components involved in a partial removal. Can be an empty list to not restrict the 326 removal to any components. 327 328 @type whoami: string 329 @param whoami: The person (or entity) doing the removal. Defaults to utils.whoami() 330 331 @type date: string 332 @param date: The date of the removal. Defaults to `date -R` 333 334 @type done_bugs: list 335 @param done_bugs: A list of bugs to be closed when doing this removal. 336 337 @type close_related_bugs: bool 338 @param done_bugs: Whether bugs related to the package being removed should be closed as well. NB: Not implemented 339 for more than one suite. 340 341 @type carbon_copy: list 342 @param carbon_copy: A list of mail addresses to CC when doing removals. NB: all items are taken "as-is" unlike 343 "dak rm". 344 345 @rtype: None 346 @return: Nothing 347 """ 348 # Generate the summary of what's to be removed 349 d = {} 350 summary = "" 351 sources = [] 352 binaries = [] 353 whitelists = [] 354 versions = [] 355 newest_source = '' 356 suite_ids_list = [] 357 suites_list = utils.join_with_commas_and(suites) 358 cnf = utils.get_conf() 359 con_components = '' 360 361 ####################################################################################################### 362 363 if not reason: 364 raise ValueError("Empty removal reason not permitted") 365 reason = reason.strip() 366 367 if not removals: 368 raise ValueError("Nothing to remove!?") 369 370 if not suites: 371 raise ValueError("Removals without a suite!?") 372 373 if whoami is None: 374 whoami = utils.whoami() 375 376 if date is None: 377 date = email.utils.formatdate() 378 379 if partial and components: 380 381 component_ids_list = [] 382 for componentname in components: 383 component = get_component(componentname, session=session) 384 if component is None: 385 raise ValueError("component '%s' not recognised." % componentname) 386 else: 387 component_ids_list.append(component.component_id) 388 if component_ids_list: 389 con_components = "AND component IN (%s)" % ", ".join([str(i) for i in component_ids_list]) 390 391 for i in removals: 392 package = i[0] 393 version = i[1] 394 architecture = i[2] 395 if package not in d: 396 d[package] = {} 397 if version not in d[package]: 398 d[package][version] = [] 399 if architecture not in d[package][version]: 400 d[package][version].append(architecture) 401 402 for package in sorted(d): 403 versions = sorted(d[package], key=functools.cmp_to_key(apt_pkg.version_compare)) 404 for version in versions: 405 d[package][version].sort(key=utils.ArchKey) 406 summary += "%10s | %10s | %s\n" % (package, version, ", ".join(d[package][version])) 407 if apt_pkg.version_compare(version, newest_source) > 0: 408 newest_source = version 409 410 for package in summary.split("\n"): 411 for row in package.split("\n"): 412 element = row.split("|") 413 if len(element) == 3: 414 if element[2].find("source") > 0: 415 sources.append("%s_%s" % tuple(elem.strip(" ") for elem in element[:2])) 416 element[2] = sub(r"source\s?,?", "", element[2]).strip(" ") 417 if element[2]: 418 binaries.append("%s_%s [%s]" % tuple(elem.strip(" ") for elem in element)) 419 420 dsc_type_id = get_override_type('dsc', session).overridetype_id 421 deb_type_id = get_override_type('deb', session).overridetype_id 422 423 for suite in suites: 424 s = get_suite(suite, session=session) 425 if s is not None: 426 suite_ids_list.append(s.suite_id) 427 whitelists.append(s.mail_whitelist) 428 429 ####################################################################################################### 430 log_filename = cnf["Rm::LogFile"] 431 log822_filename = cnf["Rm::LogFile822"] 432 with open(log_filename, "a") as logfile, open(log822_filename, "a") as logfile822: 433 fcntl.lockf(logfile, fcntl.LOCK_EX) 434 fcntl.lockf(logfile822, fcntl.LOCK_EX) 435 436 logfile.write("=========================================================================\n") 437 logfile.write("[Date: %s] [ftpmaster: %s]\n" % (date, whoami)) 438 logfile.write("Removed the following packages from %s:\n\n%s" % (suites_list, summary)) 439 if done_bugs: 440 logfile.write("Closed bugs: %s\n" % (", ".join(done_bugs))) 441 logfile.write("\n------------------- Reason -------------------\n%s\n" % reason) 442 logfile.write("----------------------------------------------\n") 443 444 logfile822.write("Date: %s\n" % date) 445 logfile822.write("Ftpmaster: %s\n" % whoami) 446 logfile822.write("Suite: %s\n" % suites_list) 447 448 if sources: 449 logfile822.write("Sources:\n") 450 for source in sources: 451 logfile822.write(" %s\n" % source) 452 453 if binaries: 454 logfile822.write("Binaries:\n") 455 for binary in binaries: 456 logfile822.write(" %s\n" % binary) 457 458 logfile822.write("Reason: %s\n" % reason.replace('\n', '\n ')) 459 if done_bugs: 460 logfile822.write("Bug: %s\n" % (", ".join(done_bugs))) 461 462 for i in removals: 463 package = i[0] 464 architecture = i[2] 465 package_id = i[3] 466 for suite_id in suite_ids_list: 467 if architecture == "source": 468 session.execute("DELETE FROM src_associations WHERE source = :packageid AND suite = :suiteid", 469 {'packageid': package_id, 'suiteid': suite_id}) 470 else: 471 session.execute("DELETE FROM bin_associations WHERE bin = :packageid AND suite = :suiteid", 472 {'packageid': package_id, 'suiteid': suite_id}) 473 # Delete from the override file 474 if not partial: 475 if architecture == "source": 476 type_id = dsc_type_id 477 else: 478 type_id = deb_type_id 479 # TODO: Fix this properly to remove the remaining non-bind argument 480 session.execute("DELETE FROM override WHERE package = :package AND type = :typeid AND suite = :suiteid %s" % (con_components), {'package': package, 'typeid': type_id, 'suiteid': suite_id}) 481 482 session.commit() 483 # ### REMOVAL COMPLETE - send mail time ### # 484 485 # If we don't have a Bug server configured, we're done 486 if "Dinstall::BugServer" not in cnf: 487 if done_bugs or close_related_bugs: 488 utils.warn("Cannot send mail to BugServer as Dinstall::BugServer is not configured") 489 490 logfile.write("=========================================================================\n") 491 logfile822.write("\n") 492 return 493 494 # read common subst variables for all bug closure mails 495 Subst_common = {} 496 Subst_common["__RM_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] 497 Subst_common["__BUG_SERVER__"] = cnf["Dinstall::BugServer"] 498 Subst_common["__CC__"] = "X-DAK: dak rm" 499 if carbon_copy: 500 Subst_common["__CC__"] += "\nCc: " + ", ".join(carbon_copy) 501 Subst_common["__SUITE_LIST__"] = suites_list 502 Subst_common["__SUBJECT__"] = "Removed package(s) from %s" % (suites_list) 503 Subst_common["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"] 504 Subst_common["__DISTRO__"] = cnf["Dinstall::MyDistribution"] 505 Subst_common["__WHOAMI__"] = whoami 506 507 # Send the bug closing messages 508 if done_bugs: 509 Subst_close_rm = Subst_common 510 bcc = [] 511 if cnf.find("Dinstall::Bcc") != "": 512 bcc.append(cnf["Dinstall::Bcc"]) 513 if cnf.find("Rm::Bcc") != "": 514 bcc.append(cnf["Rm::Bcc"]) 515 if bcc: 516 Subst_close_rm["__BCC__"] = "Bcc: " + ", ".join(bcc) 517 else: 518 Subst_close_rm["__BCC__"] = "X-Filler: 42" 519 summarymail = "%s\n------------------- Reason -------------------\n%s\n" % (summary, reason) 520 summarymail += "----------------------------------------------\n" 521 Subst_close_rm["__SUMMARY__"] = summarymail 522 523 for bug in done_bugs: 524 Subst_close_rm["__BUG_NUMBER__"] = bug 525 if close_related_bugs: 526 mail_message = utils.TemplateSubst(Subst_close_rm, cnf["Dir::Templates"] + "/rm.bug-close-with-related") 527 else: 528 mail_message = utils.TemplateSubst(Subst_close_rm, cnf["Dir::Templates"] + "/rm.bug-close") 529 utils.send_mail(mail_message, whitelists=whitelists) 530 531 # close associated bug reports 532 if close_related_bugs: 533 Subst_close_other = Subst_common 534 bcc = [] 535 wnpp = utils.parse_wnpp_bug_file() 536 newest_source = re_bin_only_nmu.sub('', newest_source) 537 if len(set(s.split("_", 1)[0] for s in sources)) == 1: 538 source_pkg = source.split("_", 1)[0] 539 else: 540 logfile.write("=========================================================================\n") 541 logfile822.write("\n") 542 raise ValueError("Closing bugs for multiple source packages is not supported. Please do it yourself.") 543 if newest_source != '': 544 Subst_close_other["__VERSION__"] = newest_source 545 else: 546 logfile.write("=========================================================================\n") 547 logfile822.write("\n") 548 raise ValueError("No versions can be found. Close bugs yourself.") 549 if bcc: 550 Subst_close_other["__BCC__"] = "Bcc: " + ", ".join(bcc) 551 else: 552 Subst_close_other["__BCC__"] = "X-Filler: 42" 553 # at this point, I just assume, that the first closed bug gives 554 # some useful information on why the package got removed 555 Subst_close_other["__BUG_NUMBER__"] = done_bugs[0] 556 Subst_close_other["__BUG_NUMBER_ALSO__"] = "" 557 Subst_close_other["__SOURCE__"] = source_pkg 558 merged_bugs = set() 559 other_bugs = bts.get_bugs('src', source_pkg, 'status', 'open', 'status', 'forwarded') 560 if other_bugs: 561 for bugno in other_bugs: 562 if bugno not in merged_bugs: 563 for bug in bts.get_status(bugno): 564 for merged in bug.mergedwith: 565 other_bugs.remove(merged) 566 merged_bugs.add(merged) 567 logfile.write("Also closing bug(s):") 568 logfile822.write("Also-Bugs:") 569 for bug in other_bugs: 570 Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + "," 571 logfile.write(" " + str(bug)) 572 logfile822.write(" " + str(bug)) 573 logfile.write("\n") 574 logfile822.write("\n") 575 if source_pkg in wnpp: 576 logfile.write("Also closing WNPP bug(s):") 577 logfile822.write("Also-WNPP:") 578 for bug in wnpp[source_pkg]: 579 # the wnpp-rm file we parse also contains our removal 580 # bugs, filtering that out 581 if bug != Subst_close_other["__BUG_NUMBER__"]: 582 Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + "," 583 logfile.write(" " + str(bug)) 584 logfile822.write(" " + str(bug)) 585 logfile.write("\n") 586 logfile822.write("\n") 587 588 mail_message = utils.TemplateSubst(Subst_close_other, cnf["Dir::Templates"] + "/rm.bug-close-related") 589 if Subst_close_other["__BUG_NUMBER_ALSO__"]: 590 utils.send_mail(mail_message) 591 592 logfile.write("=========================================================================\n") 593 logfile822.write("\n")
594