Package daklib :: Module checks
[hide private]
[frames] | no frames]

Source Code for Module daklib.checks

   1  # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org> 
   2  # 
   3  # Parts based on code that is 
   4  # Copyright (C) 2001-2006, James Troup <james@nocrew.org> 
   5  # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org> 
   6  # 
   7  # This program is free software; you can redistribute it and/or modify 
   8  # it under the terms of the GNU General Public License as published by 
   9  # the Free Software Foundation; either version 2 of the License, or 
  10  # (at your option) any later version. 
  11  # 
  12  # This program is distributed in the hope that it will be useful, 
  13  # but WITHOUT ANY WARRANTY; without even the implied warranty of 
  14  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the 
  15  # GNU General Public License for more details. 
  16  # 
  17  # You should have received a copy of the GNU General Public License along 
  18  # with this program; if not, write to the Free Software Foundation, Inc., 
  19  # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 
  20   
  21  """module provided pre-acceptance tests 
  22   
  23  Please read the documentation for the L{Check} class for the interface. 
  24  """ 
  25   
  26  from daklib.config import Config 
  27  from daklib.dbconn import * 
  28  import daklib.dbconn as dbconn 
  29  from daklib.regexes import * 
  30  from daklib.textutils import fix_maintainer, ParseMaintError 
  31  import daklib.lintian as lintian 
  32  import daklib.utils as utils 
  33  import daklib.upload 
  34   
  35  import apt_inst 
  36  import apt_pkg 
  37  from apt_pkg import version_compare 
  38  import datetime 
  39  import os 
  40  import subprocess 
  41  import tempfile 
  42  import textwrap 
  43  import time 
  44  import yaml 
45 46 47 -def check_fields_for_valid_utf8(filename, control):
48 """Check all fields of a control file for valid UTF-8""" 49 for field in control.keys(): 50 try: 51 # Access the field value to make `TagSection` try to decode it. 52 # We should also do the same for the field name, but this requires 53 # https://bugs.debian.org/995118 to be fixed. 54 # TODO: make sure the field name `field` is valid UTF-8 too 55 control[field] 56 except UnicodeDecodeError: 57 raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field))
58
59 60 -class Reject(Exception):
61 """exception raised by failing checks""" 62 pass
63
64 65 -class RejectExternalFilesMismatch(Reject):
66 """exception raised by failing the external hashes check""" 67
68 - def __str__(self):
69 return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
70
71 72 -class RejectACL(Reject):
73 """exception raise by failing ACL checks""" 74
75 - def __init__(self, acl, reason):
76 self.acl = acl 77 self.reason = reason
78
79 - def __str__(self):
80 return "ACL {0}: {1}".format(self.acl.name, self.reason)
81
82 83 -class Check:
84 """base class for checks 85 86 checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should 87 raise a L{daklib.checks.Reject} exception including a human-readable 88 description why the upload should be rejected. 89 """ 90
91 - def check(self, upload):
92 """do checks 93 94 @type upload: L{daklib.archive.ArchiveUpload} 95 @param upload: upload to check 96 97 @raise daklib.checks.Reject: upload should be rejected 98 """ 99 raise NotImplementedError
100
101 - def per_suite_check(self, upload, suite):
102 """do per-suite checks 103 104 @type upload: L{daklib.archive.ArchiveUpload} 105 @param upload: upload to check 106 107 @type suite: L{daklib.dbconn.Suite} 108 @param suite: suite to check 109 110 @raise daklib.checks.Reject: upload should be rejected 111 """ 112 raise NotImplementedError
113 114 @property
115 - def forcable(self):
116 """allow to force ignore failing test 117 118 C{True} if it is acceptable to force ignoring a failing test, 119 C{False} otherwise 120 """ 121 return False
122
123 124 -class SignatureAndHashesCheck(Check):
125 - def check_replay(self, upload):
126 # Use private session as we want to remember having seen the .changes 127 # in all cases. 128 session = upload.session 129 history = SignatureHistory.from_signed_file(upload.changes) 130 r = history.query(session) 131 if r is not None: 132 raise Reject('Signature for changes file was already seen at {0}.\nPlease refresh the signature of the changes file if you want to upload it again.'.format(r.seen)) 133 return True
134 135 """Check signature of changes and dsc file (if included in upload) 136 137 Make sure the signature is valid and done by a known user. 138 """ 139
140 - def check(self, upload):
141 allow_source_untrusted_sig_keys = Config().value_list('Dinstall::AllowSourceUntrustedSigKeys') 142 143 changes = upload.changes 144 if not changes.valid_signature: 145 raise Reject("Signature for .changes not valid.") 146 self.check_replay(upload) 147 self._check_hashes(upload, changes.filename, changes.files.values()) 148 149 source = None 150 try: 151 source = changes.source 152 except Exception as e: 153 raise Reject("Invalid dsc file: {0}".format(e)) 154 if source is not None: 155 if changes.primary_fingerprint not in allow_source_untrusted_sig_keys: 156 if not source.valid_signature: 157 raise Reject("Signature for .dsc not valid.") 158 if source.primary_fingerprint != changes.primary_fingerprint: 159 raise Reject(".changes and .dsc not signed by the same key.") 160 self._check_hashes(upload, source.filename, source.files.values()) 161 162 if upload.fingerprint is None or upload.fingerprint.uid is None: 163 raise Reject(".changes signed by unknown key.")
164 165 """Make sure hashes match existing files 166 167 @type upload: L{daklib.archive.ArchiveUpload} 168 @param upload: upload we are processing 169 170 @type filename: str 171 @param filename: name of the file the expected hash values are taken from 172 173 @type files: sequence of L{daklib.upload.HashedFile} 174 @param files: files to check the hashes for 175 """ 176
177 - def _check_hashes(self, upload, filename, files):
178 try: 179 for f in files: 180 f.check(upload.directory) 181 except daklib.upload.FileDoesNotExist as e: 182 raise Reject('{0}: {1}\n' 183 'Perhaps you need to include the file in your upload?\n\n' 184 'If the orig tarball is missing, the -sa flag for dpkg-buildpackage will be your friend.' 185 .format(filename, str(e))) 186 except daklib.upload.UploadException as e: 187 raise Reject('{0}: {1}'.format(filename, str(e)))
188
189 190 -class WeakSignatureCheck(Check):
191 """Check that .changes and .dsc are not signed using a weak algorithm""" 192
193 - def check(self, upload):
194 changes = upload.changes 195 if changes.weak_signature: 196 raise Reject("The .changes was signed using a weak algorithm (such as SHA-1)") 197 198 source = changes.source 199 if source is not None: 200 if source.weak_signature: 201 raise Reject("The source package was signed using a weak algorithm (such as SHA-1)") 202 203 return True
204
205 206 -class SignatureTimestampCheck(Check):
207 """Check timestamp of .changes signature""" 208
209 - def check(self, upload):
210 changes = upload.changes 211 212 now = datetime.datetime.utcnow() 213 timestamp = changes.signature_timestamp 214 age = now - timestamp 215 216 age_max = datetime.timedelta(days=365) 217 age_min = datetime.timedelta(days=-7) 218 219 if age > age_max: 220 raise Reject('{0}: Signature from {1} is too old (maximum age is {2} days)'.format(changes.filename, timestamp, age_max.days)) 221 if age < age_min: 222 raise Reject('{0}: Signature from {1} is too far in the future (tolerance is {2} days)'.format(changes.filename, timestamp, abs(age_min.days))) 223 224 return True
225
226 227 -class ChangesCheck(Check):
228 """Check changes file for syntax errors.""" 229
230 - def check(self, upload):
231 changes = upload.changes 232 control = changes.changes 233 fn = changes.filename 234 235 for field in ('Distribution', 'Source', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes'): 236 if field not in control: 237 raise Reject('{0}: misses mandatory field {1}'.format(fn, field)) 238 239 if len(changes.binaries) > 0: 240 for field in ('Binary', 'Description'): 241 if field not in control: 242 raise Reject('{0}: binary upload requires {1} field'.format(fn, field)) 243 244 check_fields_for_valid_utf8(fn, control) 245 246 source_match = re_field_source.match(control['Source']) 247 if not source_match: 248 raise Reject('{0}: Invalid Source field'.format(fn)) 249 version_match = re_field_version.match(control['Version']) 250 if not version_match: 251 raise Reject('{0}: Invalid Version field'.format(fn)) 252 version_without_epoch = version_match.group('without_epoch') 253 254 match = re_file_changes.match(fn) 255 if not match: 256 raise Reject('{0}: Does not match re_file_changes'.format(fn)) 257 if match.group('package') != source_match.group('package'): 258 raise Reject('{0}: Filename does not match Source field'.format(fn)) 259 if match.group('version') != version_without_epoch: 260 raise Reject('{0}: Filename does not match Version field'.format(fn)) 261 262 for bn in changes.binary_names: 263 if not re_field_package.match(bn): 264 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn)) 265 266 if changes.sourceful and changes.source is None: 267 raise Reject("Changes has architecture source, but no source found.") 268 if changes.source is not None and not changes.sourceful: 269 raise Reject("Upload includes source, but changes does not say so.") 270 271 try: 272 fix_maintainer(changes.changes['Maintainer']) 273 except ParseMaintError as e: 274 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e)) 275 276 try: 277 changed_by = changes.changes.get('Changed-By') 278 if changed_by is not None: 279 fix_maintainer(changed_by) 280 except ParseMaintError as e: 281 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e)) 282 283 try: 284 changes.byhand_files 285 except daklib.upload.InvalidChangesException as e: 286 raise Reject('{0}'.format(e)) 287 288 if len(changes.files) == 0: 289 raise Reject("Changes includes no files.") 290 291 for bugnum in changes.closed_bugs: 292 if not re_isanum.match(bugnum): 293 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum)) 294 295 return True
296
297 298 -class ExternalHashesCheck(Check):
299 """Checks hashes in .changes and .dsc against an external database.""" 300
301 - def check_single(self, session, f):
302 q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE :pattern", {'pattern': '%/{}'.format(f.filename)}) 303 (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None) 304 305 if not ext_size: 306 return 307 308 if ext_size != f.size: 309 raise RejectExternalFilesMismatch(f.filename, 'size', f.size, ext_size) 310 311 if ext_md5sum != f.md5sum: 312 raise RejectExternalFilesMismatch(f.filename, 'md5sum', f.md5sum, ext_md5sum) 313 314 if ext_sha1sum != f.sha1sum: 315 raise RejectExternalFilesMismatch(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum) 316 317 if ext_sha256sum != f.sha256sum: 318 raise RejectExternalFilesMismatch(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
319
320 - def check(self, upload):
321 cnf = Config() 322 323 if not cnf.use_extfiles: 324 return 325 326 session = upload.session 327 changes = upload.changes 328 329 for f in changes.files.values(): 330 self.check_single(session, f) 331 source = changes.source 332 if source is not None: 333 for f in source.files.values(): 334 self.check_single(session, f)
335
336 337 -class BinaryCheck(Check):
338 """Check binary packages for syntax errors.""" 339
340 - def check(self, upload):
341 debug_deb_name_postfix = "-dbgsym" 342 # XXX: Handle dynamic debug section name here 343 344 self._architectures = set() 345 346 for binary in upload.changes.binaries: 347 self.check_binary(upload, binary) 348 349 for arch in upload.changes.architectures: 350 if arch == 'source': 351 continue 352 if arch not in self._architectures: 353 raise Reject('{}: Architecture field includes {}, but no binary packages for {} are included in the upload'.format(upload.changes.filename, arch, arch)) 354 355 binaries = {binary.control['Package']: binary 356 for binary in upload.changes.binaries} 357 358 for name, binary in list(binaries.items()): 359 if name in upload.changes.binary_names: 360 # Package is listed in Binary field. Everything is good. 361 pass 362 elif daklib.utils.is_in_debug_section(binary.control): 363 # If we have a binary package in the debug section, we 364 # can allow it to not be present in the Binary field 365 # in the .changes file, so long as its name (without 366 # -dbgsym) is present in the Binary list. 367 if not name.endswith(debug_deb_name_postfix): 368 raise Reject('Package {0} is in the debug section, but ' 369 'does not end in {1}.'.format(name, debug_deb_name_postfix)) 370 371 # Right, so, it's named properly, let's check that 372 # the corresponding package is in the Binary list 373 origin_package_name = name[:-len(debug_deb_name_postfix)] 374 if origin_package_name not in upload.changes.binary_names: 375 raise Reject( 376 "Debug package {debug}'s corresponding binary package " 377 "{origin} is not present in the Binary field.".format( 378 debug=name, origin=origin_package_name)) 379 else: 380 # Someone was a nasty little hacker and put a package 381 # into the .changes that isn't in debian/control. Bad, 382 # bad person. 383 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(name)) 384 385 return True
386
387 - def check_binary(self, upload, binary):
388 fn = binary.hashed_file.filename 389 control = binary.control 390 391 for field in ('Package', 'Architecture', 'Version', 'Description', 'Section'): 392 if field not in control: 393 raise Reject('{0}: Missing mandatory field {1}.'.format(fn, field)) 394 395 check_fields_for_valid_utf8(fn, control) 396 397 # check fields 398 399 package = control['Package'] 400 if not re_field_package.match(package): 401 raise Reject('{0}: Invalid Package field'.format(fn)) 402 403 version = control['Version'] 404 version_match = re_field_version.match(version) 405 if not version_match: 406 raise Reject('{0}: Invalid Version field'.format(fn)) 407 version_without_epoch = version_match.group('without_epoch') 408 409 architecture = control['Architecture'] 410 if architecture not in upload.changes.architectures: 411 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn)) 412 if architecture == 'source': 413 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn)) 414 self._architectures.add(architecture) 415 416 source = control.get('Source') 417 if source is not None and not re_field_source.match(source): 418 raise Reject('{0}: Invalid Source field'.format(fn)) 419 420 # check filename 421 422 match = re_file_binary.match(fn) 423 if package != match.group('package'): 424 raise Reject('{0}: filename does not match Package field'.format(fn)) 425 if version_without_epoch != match.group('version'): 426 raise Reject('{0}: filename does not match Version field'.format(fn)) 427 if architecture != match.group('architecture'): 428 raise Reject('{0}: filename does not match Architecture field'.format(fn)) 429 430 # check dependency field syntax 431 432 def check_dependency_field( 433 field, control, 434 dependency_parser=apt_pkg.parse_depends, 435 allow_alternatives=True, 436 allow_relations=('', '<', '<=', '=', '>=', '>')): 437 value = control.get(field) 438 if value is not None: 439 if value.strip() == '': 440 raise Reject('{0}: empty {1} field'.format(fn, field)) 441 try: 442 depends = dependency_parser(value) 443 except: 444 raise Reject('{0}: APT could not parse {1} field'.format(fn, field)) 445 for group in depends: 446 if not allow_alternatives and len(group) != 1: 447 raise Reject('{0}: {1}: alternatives are not allowed'.format(fn, field)) 448 for dep_pkg, dep_ver, dep_rel in group: 449 if dep_rel not in allow_relations: 450 raise Reject('{}: {}: depends on {}, but only relations {} are allowed for this field'.format(fn, field, " ".join(dep_pkg, dep_rel, dep_ver), allow_relations))
451 452 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends', 453 'Recommends', 'Replaces', 'Suggests'): 454 check_dependency_field(field, control) 455 456 check_dependency_field("Provides", control, 457 allow_alternatives=False, 458 allow_relations=('', '=')) 459 check_dependency_field("Built-Using", control, 460 dependency_parser=apt_pkg.parse_src_depends, 461 allow_alternatives=False, 462 allow_relations=('=',))
463
464 465 -class BinaryTimestampCheck(Check):
466 """check timestamps of files in binary packages 467 468 Files in the near future cause ugly warnings and extreme time travel 469 can cause errors on extraction. 470 """ 471
472 - def check(self, upload):
473 cnf = Config() 474 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24 * 3600) 475 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y')) 476 477 class TarTime: 478 def __init__(self): 479 self.future_files = dict() 480 self.past_files = dict()
481 482 def callback(self, member, data): 483 if member.mtime > future_cutoff: 484 self.future_files[member.name] = member.mtime 485 elif member.mtime < past_cutoff: 486 self.past_files[member.name] = member.mtime
487 488 def format_reason(filename, direction, files): 489 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction) 490 for fn, ts in files.items(): 491 reason += " {0} ({1})".format(fn, time.ctime(ts)) 492 return reason 493 494 for binary in upload.changes.binaries: 495 filename = binary.hashed_file.filename 496 path = os.path.join(upload.directory, filename) 497 deb = apt_inst.DebFile(path) 498 tar = TarTime() 499 for archive in (deb.control, deb.data): 500 archive.go(tar.callback) 501 if tar.future_files: 502 raise Reject(format_reason(filename, 'future', tar.future_files)) 503 if tar.past_files: 504 raise Reject(format_reason(filename, 'past', tar.past_files)) 505
506 507 -class SourceCheck(Check):
508 """Check source package for syntax errors.""" 509
510 - def check_filename(self, control, filename, regex):
511 # In case we have an .orig.tar.*, we have to strip the Debian revison 512 # from the version number. So handle this special case first. 513 is_orig = True 514 match = re_file_orig.match(filename) 515 if not match: 516 is_orig = False 517 match = regex.match(filename) 518 519 if not match: 520 raise Reject('{0}: does not match regular expression for source filenames'.format(filename)) 521 if match.group('package') != control['Source']: 522 raise Reject('{0}: filename does not match Source field'.format(filename)) 523 524 version = control['Version'] 525 if is_orig: 526 upstream_match = re_field_version_upstream.match(version) 527 if not upstream_match: 528 raise Reject('{0}: Source package includes upstream tarball, but {1} has no Debian revision.'.format(filename, version)) 529 version = upstream_match.group('upstream') 530 version_match = re_field_version.match(version) 531 version_without_epoch = version_match.group('without_epoch') 532 if match.group('version') != version_without_epoch: 533 raise Reject('{0}: filename does not match Version field'.format(filename))
534
535 - def check(self, upload):
536 if upload.changes.source is None: 537 if upload.changes.sourceful: 538 raise Reject("{}: Architecture field includes source, but no source package is included in the upload".format(upload.changes.filename)) 539 return True 540 541 if not upload.changes.sourceful: 542 raise Reject("{}: Architecture field does not include source, but a source package is included in the upload".format(upload.changes.filename)) 543 544 changes = upload.changes.changes 545 source = upload.changes.source 546 control = source.dsc 547 dsc_fn = source._dsc_file.filename 548 549 check_fields_for_valid_utf8(dsc_fn, control) 550 551 # check fields 552 if not re_field_package.match(control['Source']): 553 raise Reject('{0}: Invalid Source field'.format(dsc_fn)) 554 if control['Source'] != changes['Source']: 555 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn)) 556 if control['Version'] != changes['Version']: 557 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn)) 558 559 # check filenames 560 self.check_filename(control, dsc_fn, re_file_dsc) 561 for f in source.files.values(): 562 self.check_filename(control, f.filename, re_file_source) 563 564 # check dependency field syntax 565 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'): 566 value = control.get(field) 567 if value is not None: 568 if value.strip() == '': 569 raise Reject('{0}: empty {1} field'.format(dsc_fn, field)) 570 try: 571 apt_pkg.parse_src_depends(value) 572 except Exception as e: 573 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e)) 574 575 rejects = utils.check_dsc_files(dsc_fn, control, list(source.files.keys())) 576 if len(rejects) > 0: 577 raise Reject("\n".join(rejects)) 578 579 return True
580
581 582 -class SingleDistributionCheck(Check):
583 """Check that the .changes targets only a single distribution.""" 584
585 - def check(self, upload):
586 if len(upload.changes.distributions) != 1: 587 raise Reject("Only uploads to a single distribution are allowed.")
588
589 590 -class ACLCheck(Check):
591 """Check the uploader is allowed to upload the packages in .changes""" 592
593 - def _does_hijack(self, session, upload, suite):
594 # Try to catch hijacks. 595 # This doesn't work correctly. Uploads to experimental can still 596 # "hijack" binaries from unstable. Also one can hijack packages 597 # via buildds (but people who try this should not be DMs). 598 for binary_name in upload.changes.binary_names: 599 binaries = session.query(DBBinary).join(DBBinary.source) \ 600 .filter(DBBinary.suites.contains(suite)) \ 601 .filter(DBBinary.package == binary_name) 602 for binary in binaries: 603 if binary.source.source != upload.changes.changes['Source']: 604 return True, binary.package, binary.source.source 605 return False, None, None
606
607 - def _check_acl(self, session, upload, acl):
608 source_name = upload.changes.source_name 609 610 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints: 611 return None, None 612 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring: 613 return None, None 614 615 if not acl.allow_new: 616 if upload.new: 617 return False, "NEW uploads are not allowed" 618 for f in upload.changes.files.values(): 619 if f.section == 'byhand' or f.section.startswith("raw-"): 620 return False, "BYHAND uploads are not allowed" 621 if not acl.allow_source and upload.changes.source is not None: 622 return False, "sourceful uploads are not allowed" 623 binaries = upload.changes.binaries 624 if len(binaries) != 0: 625 if not acl.allow_binary: 626 return False, "binary uploads are not allowed" 627 if upload.changes.source is None and not acl.allow_binary_only: 628 return False, "binary-only uploads are not allowed" 629 if not acl.allow_binary_all: 630 uploaded_arches = set(upload.changes.architectures) 631 uploaded_arches.discard('source') 632 allowed_arches = set(a.arch_string for a in acl.architectures) 633 forbidden_arches = uploaded_arches - allowed_arches 634 if len(forbidden_arches) != 0: 635 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches)) 636 if not acl.allow_hijack: 637 for suite in upload.final_suites: 638 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite) 639 if does_hijack: 640 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from) 641 642 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first() 643 if acl.allow_per_source: 644 if acl_per_source is None: 645 return False, "not allowed to upload source package '{0}'".format(source_name) 646 if acl.deny_per_source and acl_per_source is not None: 647 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name) 648 649 return True, None
650
651 - def check(self, upload):
652 session = upload.session 653 fingerprint = upload.fingerprint 654 keyring = fingerprint.keyring 655 656 if keyring is None: 657 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint)) 658 if not keyring.active: 659 raise Reject('Keyring {0} is not active'.format(keyring.name)) 660 661 acl = fingerprint.acl or keyring.acl 662 if acl is None: 663 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint)) 664 result, reason = self._check_acl(session, upload, acl) 665 if not result: 666 raise RejectACL(acl, reason) 667 668 for acl in session.query(ACL).filter_by(is_global=True): 669 result, reason = self._check_acl(session, upload, acl) 670 if result is False: 671 raise RejectACL(acl, reason) 672 673 return True
674
675 - def per_suite_check(self, upload, suite):
676 acls = suite.acls 677 if len(acls) != 0: 678 accept = False 679 for acl in acls: 680 result, reason = self._check_acl(upload.session, upload, acl) 681 if result is False: 682 raise Reject(reason) 683 accept = accept or result 684 if not accept: 685 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name)) 686 return True
687
688 689 -class TransitionCheck(Check):
690 """check for a transition""" 691
692 - def check(self, upload):
693 if not upload.changes.sourceful: 694 return True 695 696 transitions = self.get_transitions() 697 if transitions is None: 698 return True 699 700 session = upload.session 701 702 control = upload.changes.changes 703 source = re_field_source.match(control['Source']).group('package') 704 705 for trans in transitions: 706 t = transitions[trans] 707 transition_source = t["source"] 708 expected = t["new"] 709 710 # Will be None if nothing is in testing. 711 current = get_source_in_suite(transition_source, "testing", session) 712 if current is not None: 713 compare = apt_pkg.version_compare(current.version, expected) 714 715 if current is None or compare < 0: 716 # This is still valid, the current version in testing is older than 717 # the new version we wait for, or there is none in testing yet 718 719 # Check if the source we look at is affected by this. 720 if source in t['packages']: 721 # The source is affected, lets reject it. 722 723 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans) 724 725 if current is not None: 726 currentlymsg = "at version {0}".format(current.version) 727 else: 728 currentlymsg = "not present in testing" 729 730 rejectmsg += "Transition description: {0}\n\n".format(t["reason"]) 731 732 rejectmsg += "\n".join(textwrap.wrap("""Your package 733 is part of a testing transition designed to get {0} migrated (it is 734 currently {1}, we need version {2}). This transition is managed by the 735 Release Team, and {3} is the Release-Team member responsible for it. 736 Please mail debian-release@lists.debian.org or contact {3} directly if you 737 need further assistance. You might want to upload to experimental until this 738 transition is done.""".format(transition_source, currentlymsg, expected, t["rm"]))) 739 740 raise Reject(rejectmsg) 741 742 return True
743
744 - def get_transitions(self):
745 cnf = Config() 746 path = cnf.get('Dinstall::ReleaseTransitions', '') 747 if path == '' or not os.path.exists(path): 748 return None 749 750 with open(path, 'r') as fd: 751 contents = fd.read() 752 try: 753 transitions = yaml.safe_load(contents) 754 return transitions 755 except yaml.YAMLError as msg: 756 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg)) 757 758 return None
759
760 761 -class NoSourceOnlyCheck(Check):
762 - def is_source_only_upload(self, upload):
763 changes = upload.changes 764 if changes.source is not None and len(changes.binaries) == 0: 765 return True 766 return False
767 768 """Check for source-only upload 769 770 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is 771 set. Otherwise they are rejected. 772 773 Source-only uploads are only accepted for source packages having a 774 Package-List field that also lists architectures per package. This 775 check can be disabled via 776 Dinstall::AllowSourceOnlyUploadsWithoutPackageList. 777 778 Source-only uploads to NEW are only allowed if 779 Dinstall::AllowSourceOnlyNew is set. 780 781 Uploads not including architecture-independent packages are only 782 allowed if Dinstall::AllowNoArchIndepUploads is set. 783 784 """ 785
786 - def check(self, upload):
787 if not self.is_source_only_upload(upload): 788 return True 789 790 allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads') 791 allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList') 792 allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew') 793 allow_source_only_new_keys = Config().value_list('Dinstall::AllowSourceOnlyNewKeys') 794 allow_source_only_new_sources = Config().value_list('Dinstall::AllowSourceOnlyNewSources') 795 allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads', True) 796 changes = upload.changes 797 798 if not allow_source_only_uploads: 799 raise Reject('Source-only uploads are not allowed.') 800 if not allow_source_only_uploads_without_package_list \ 801 and changes.source.package_list.fallback: 802 raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.') 803 if not allow_source_only_new and upload.new \ 804 and changes.primary_fingerprint not in allow_source_only_new_keys \ 805 and changes.source_name not in allow_source_only_new_sources: 806 raise Reject('Source-only uploads to NEW are not allowed.') 807 808 if 'all' not in changes.architectures and changes.source.package_list.has_arch_indep_packages(): 809 if not allow_no_arch_indep_uploads: 810 raise Reject('Uploads must include architecture-independent packages.') 811 812 return True
813
814 815 -class NewOverrideCheck(Check):
816 """Override NEW requirement 817 """
818 - def check(self, upload):
819 if not upload.new: 820 return True 821 822 new_override_keys = Config().value_list('Dinstall::NewOverrideKeys') 823 changes = upload.changes 824 825 if changes.primary_fingerprint in new_override_keys: 826 upload.new = False 827 828 return True
829
830 831 -class ArchAllBinNMUCheck(Check):
832 """Check for arch:all binNMUs""" 833
834 - def check(self, upload):
835 changes = upload.changes 836 837 if 'all' in changes.architectures and changes.changes.get('Binary-Only') == 'yes': 838 raise Reject('arch:all binNMUs are not allowed.') 839 840 return True
841
842 843 -class LintianCheck(Check):
844 """Check package using lintian""" 845
846 - def check(self, upload):
847 changes = upload.changes 848 849 # Only check sourceful uploads. 850 if changes.source is None: 851 return True 852 # Only check uploads to unstable or experimental. 853 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions: 854 return True 855 856 cnf = Config() 857 if 'Dinstall::LintianTags' not in cnf: 858 return True 859 tagfile = cnf['Dinstall::LintianTags'] 860 861 with open(tagfile, 'r') as sourcefile: 862 sourcecontent = sourcefile.read() 863 try: 864 lintiantags = yaml.safe_load(sourcecontent)['lintian'] 865 except yaml.YAMLError as msg: 866 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg)) 867 868 with tempfile.NamedTemporaryFile(mode="w+t") as temptagfile: 869 os.fchmod(temptagfile.fileno(), 0o644) 870 for tags in lintiantags.values(): 871 for tag in tags: 872 print(tag, file=temptagfile) 873 temptagfile.flush() 874 875 changespath = os.path.join(upload.directory, changes.filename) 876 877 cmd = [] 878 user = cnf.get('Dinstall::UnprivUser') or None 879 if user is not None: 880 cmd.extend(['sudo', '-H', '-u', user]) 881 cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temptagfile.name, changespath]) 882 process = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="utf-8") 883 output = process.stdout 884 result = process.returncode 885 886 if result == 2: 887 utils.warn("lintian failed for %s [return code: %s]." % 888 (changespath, result)) 889 utils.warn(utils.prefix_multi_line_string(output, 890 " [possible output:] ")) 891 892 parsed_tags = lintian.parse_lintian_output(output) 893 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags)) 894 if len(rejects) != 0: 895 raise Reject('\n'.join(rejects)) 896 897 return True
898
899 900 -class SourceFormatCheck(Check):
901 """Check source format is allowed in the target suite""" 902
903 - def per_suite_check(self, upload, suite):
904 source = upload.changes.source 905 session = upload.session 906 if source is None: 907 return True 908 909 source_format = source.dsc['Format'] 910 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite)) 911 if query.first() is None: 912 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
913
914 915 -class SuiteCheck(Check):
916 - def per_suite_check(self, upload, suite):
917 if not suite.accept_source_uploads and upload.changes.source is not None: 918 raise Reject('The suite "{0}" does not accept source uploads.'.format(suite.suite_name)) 919 if not suite.accept_binary_uploads and len(upload.changes.binaries) != 0: 920 raise Reject('The suite "{0}" does not accept binary uploads.'.format(suite.suite_name)) 921 return True
922
923 924 -class SuiteArchitectureCheck(Check):
925 - def per_suite_check(self, upload, suite):
926 session = upload.session 927 for arch in upload.changes.architectures: 928 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite)) 929 if query.first() is None: 930 raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name)) 931 932 return True
933
934 935 -class VersionCheck(Check):
936 """Check version constraints""" 937
938 - def _highest_source_version(self, session, source_name, suite):
939 db_source = session.query(DBSource).filter_by(source=source_name) \ 940 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first() 941 if db_source is None: 942 return None 943 else: 944 return db_source.version
945
946 - def _highest_binary_version(self, session, binary_name, suite, architecture):
947 db_binary = session.query(DBBinary).filter_by(package=binary_name) \ 948 .filter(DBBinary.suites.contains(suite)) \ 949 .join(DBBinary.architecture) \ 950 .filter(Architecture.arch_string.in_(['all', architecture])) \ 951 .order_by(DBBinary.version.desc()).first() 952 if db_binary is None: 953 return None 954 else: 955 return db_binary.version
956
957 - def _version_checks(self, upload, suite, other_suite, op, op_name):
958 session = upload.session 959 960 if upload.changes.source is not None: 961 source_name = upload.changes.source.dsc['Source'] 962 source_version = upload.changes.source.dsc['Version'] 963 v = self._highest_source_version(session, source_name, other_suite) 964 if v is not None and not op(version_compare(source_version, v)): 965 raise Reject("Version check failed:\n" 966 "Your upload included the source package {0}, version {1},\n" 967 "however {3} already has version {2}.\n" 968 "Uploads to {5} must have a {4} version than present in {3}." 969 .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name)) 970 971 for binary in upload.changes.binaries: 972 binary_name = binary.control['Package'] 973 binary_version = binary.control['Version'] 974 architecture = binary.control['Architecture'] 975 v = self._highest_binary_version(session, binary_name, other_suite, architecture) 976 if v is not None and not op(version_compare(binary_version, v)): 977 raise Reject("Version check failed:\n" 978 "Your upload included the binary package {0}, version {1}, for {2},\n" 979 "however {4} already has version {3}.\n" 980 "Uploads to {6} must have a {5} version than present in {4}." 981 .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
982
983 - def per_suite_check(self, upload, suite):
984 session = upload.session 985 986 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \ 987 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances'])) 988 must_be_newer_than = [vc.reference for vc in vc_newer] 989 # Must be newer than old versions in `suite` 990 must_be_newer_than.append(suite) 991 992 for s in must_be_newer_than: 993 self._version_checks(upload, suite, s, lambda result: result > 0, 'higher') 994 995 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan') 996 must_be_older_than = [vc.reference for vc in vc_older] 997 998 for s in must_be_older_than: 999 self._version_checks(upload, suite, s, lambda result: result < 0, 'lower') 1000 1001 return True
1002 1003 @property
1004 - def forcable(self):
1005 return True
1006