1# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
2#
3# Parts based on code that is
4# Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5# Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
6#
7# This program is free software; you can redistribute it and/or modify
8# it under the terms of the GNU General Public License as published by
9# the Free Software Foundation; either version 2 of the License, or
10# (at your option) any later version.
11#
12# This program is distributed in the hope that it will be useful,
13# but WITHOUT ANY WARRANTY; without even the implied warranty of
14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15# GNU General Public License for more details.
16#
17# You should have received a copy of the GNU General Public License along
18# with this program; if not, write to the Free Software Foundation, Inc.,
19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21"""module provided pre-acceptance tests
23Please read the documentation for the :class:`Check` class for the interface.
24"""
26from daklib.config import Config
27from daklib.dbconn import *
28import daklib.dbconn as dbconn
29from daklib.regexes import *
30from daklib.textutils import fix_maintainer, ParseMaintError
31import daklib.lintian as lintian
32import daklib.utils as utils
33import daklib.upload
35import apt_inst
36import apt_pkg
37from apt_pkg import version_compare
38from collections.abc import Iterable
39import datetime
40import os
41import subprocess
42import tempfile
43import textwrap
44import time
45from typing import TYPE_CHECKING
46import yaml
48if TYPE_CHECKING: 48 ↛ 49line 48 didn't jump to line 49, because the condition on line 48 was never true
49 import daklib.archive
50 import re
53def check_fields_for_valid_utf8(filename, control):
54 """Check all fields of a control file for valid UTF-8"""
55 for field in control.keys():
56 try:
57 # Access the field value to make `TagSection` try to decode it.
58 # We should also do the same for the field name, but this requires
59 # https://bugs.debian.org/995118 to be fixed.
60 # TODO: make sure the field name `field` is valid UTF-8 too
61 control[field]
62 except UnicodeDecodeError:
63 raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field))
66class Reject(Exception):
67 """exception raised by failing checks"""
68 pass
71class RejectExternalFilesMismatch(Reject):
72 """exception raised by failing the external hashes check"""
74 def __str__(self):
75 return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
78class RejectACL(Reject):
79 """exception raise by failing ACL checks"""
81 def __init__(self, acl, reason):
82 self.acl = acl
83 self.reason = reason
85 def __str__(self):
86 return "ACL {0}: {1}".format(self.acl.name, self.reason)
89class Check:
90 """base class for checks
92 checks are called by :class:`daklib.archive.ArchiveUpload`. Failing tests should
93 raise a :exc:`daklib.checks.Reject` exception including a human-readable
94 description why the upload should be rejected.
95 """
97 def check(self, upload: 'daklib.archive.ArchiveUpload'):
98 """do checks
100 :param upload: upload to check
102 :raises Reject: upload should be rejected
103 """
104 raise NotImplementedError
106 def per_suite_check(self, upload: 'daklib.archive.ArchiveUpload', suite: Suite):
107 """do per-suite checks
109 :param upload: upload to check
110 :param suite: suite to check
112 :raises Reject: upload should be rejected
113 """
114 raise NotImplementedError
116 @property
117 def forcable(self) -> bool:
118 """allow to force ignore failing test
120 :const:`True` if it is acceptable to force ignoring a failing test,
121 :const:`False` otherwise
122 """
123 return False
126class SignatureAndHashesCheck(Check):
127 """Check signature of changes and dsc file (if included in upload)
129 Make sure the signature is valid and done by a known user.
130 """
132 def check_replay(self, upload) -> bool:
133 # Use private session as we want to remember having seen the .changes
134 # in all cases.
135 session = upload.session
136 history = SignatureHistory.from_signed_file(upload.changes)
137 r = history.query(session)
138 if r is not None: 138 ↛ 139line 138 didn't jump to line 139, because the condition on line 138 was never true
139 raise Reject('Signature for changes file was already seen at {0}.\nPlease refresh the signature of the changes file if you want to upload it again.'.format(r.seen))
140 return True
142 def check(self, upload):
143 allow_source_untrusted_sig_keys = Config().value_list('Dinstall::AllowSourceUntrustedSigKeys')
145 changes = upload.changes
146 if not changes.valid_signature: 146 ↛ 147line 146 didn't jump to line 147, because the condition on line 146 was never true
147 raise Reject("Signature for .changes not valid.")
148 self.check_replay(upload)
149 self._check_hashes(upload, changes.filename, changes.files.values())
151 source = None
152 try:
153 source = changes.source
154 except Exception as e:
155 raise Reject("Invalid dsc file: {0}".format(e))
156 if source is not None:
157 if changes.primary_fingerprint not in allow_source_untrusted_sig_keys: 157 ↛ 162line 157 didn't jump to line 162, because the condition on line 157 was never false
158 if not source.valid_signature: 158 ↛ 159line 158 didn't jump to line 159, because the condition on line 158 was never true
159 raise Reject("Signature for .dsc not valid.")
160 if source.primary_fingerprint != changes.primary_fingerprint: 160 ↛ 161line 160 didn't jump to line 161, because the condition on line 160 was never true
161 raise Reject(".changes and .dsc not signed by the same key.")
162 self._check_hashes(upload, source.filename, source.files.values())
164 if upload.fingerprint is None or upload.fingerprint.uid is None:
165 raise Reject(".changes signed by unknown key.")
167 def _check_hashes(self, upload: 'daklib.archive.ArchiveUpload', filename: str, files: Iterable[daklib.upload.HashedFile]):
168 """Make sure hashes match existing files
170 :param upload: upload we are processing
171 :param filename: name of the file the expected hash values are taken from
172 :param files: files to check the hashes for
173 """
174 try:
175 for f in files:
176 f.check(upload.directory)
177 except daklib.upload.FileDoesNotExist as e: 177 ↛ 182line 177 didn't jump to line 182
178 raise Reject('{0}: {1}\n'
179 'Perhaps you need to include the file in your upload?\n\n'
180 'If the orig tarball is missing, the -sa flag for dpkg-buildpackage will be your friend.'
181 .format(filename, str(e)))
182 except daklib.upload.UploadException as e:
183 raise Reject('{0}: {1}'.format(filename, str(e)))
186class WeakSignatureCheck(Check):
187 """Check that .changes and .dsc are not signed using a weak algorithm"""
189 def check(self, upload):
190 changes = upload.changes
191 if changes.weak_signature: 191 ↛ 192line 191 didn't jump to line 192, because the condition on line 191 was never true
192 raise Reject("The .changes was signed using a weak algorithm (such as SHA-1)")
194 source = changes.source
195 if source is not None:
196 if source.weak_signature: 196 ↛ 197line 196 didn't jump to line 197, because the condition on line 196 was never true
197 raise Reject("The source package was signed using a weak algorithm (such as SHA-1)")
199 return True
202class SignatureTimestampCheck(Check):
203 """Check timestamp of .changes signature"""
205 def check(self, upload):
206 changes = upload.changes
208 now = datetime.datetime.utcnow()
209 timestamp = changes.signature_timestamp
210 age = now - timestamp
212 age_max = datetime.timedelta(days=365)
213 age_min = datetime.timedelta(days=-7)
215 if age > age_max: 215 ↛ 216line 215 didn't jump to line 216, because the condition on line 215 was never true
216 raise Reject('{0}: Signature from {1} is too old (maximum age is {2} days)'.format(changes.filename, timestamp, age_max.days))
217 if age < age_min: 217 ↛ 218line 217 didn't jump to line 218, because the condition on line 217 was never true
218 raise Reject('{0}: Signature from {1} is too far in the future (tolerance is {2} days)'.format(changes.filename, timestamp, abs(age_min.days)))
220 return True
223class ChangesCheck(Check):
224 """Check changes file for syntax errors."""
226 def check(self, upload):
227 changes = upload.changes
228 control = changes.changes
229 fn = changes.filename
231 for field in ('Distribution', 'Source', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes'):
232 if field not in control: 232 ↛ 233line 232 didn't jump to line 233, because the condition on line 232 was never true
233 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
235 if len(changes.binaries) > 0:
236 for field in ('Binary', 'Description'):
237 if field not in control: 237 ↛ 238line 237 didn't jump to line 238, because the condition on line 237 was never true
238 raise Reject('{0}: binary upload requires {1} field'.format(fn, field))
240 check_fields_for_valid_utf8(fn, control)
242 source_match = re_field_source.match(control['Source'])
243 if not source_match: 243 ↛ 244line 243 didn't jump to line 244, because the condition on line 243 was never true
244 raise Reject('{0}: Invalid Source field'.format(fn))
245 version_match = re_field_version.match(control['Version'])
246 if not version_match: 246 ↛ 247line 246 didn't jump to line 247, because the condition on line 246 was never true
247 raise Reject('{0}: Invalid Version field'.format(fn))
248 version_without_epoch = version_match.group('without_epoch')
250 match = re_file_changes.match(fn)
251 if not match: 251 ↛ 252line 251 didn't jump to line 252, because the condition on line 251 was never true
252 raise Reject('{0}: Does not match re_file_changes'.format(fn))
253 if match.group('package') != source_match.group('package'): 253 ↛ 254line 253 didn't jump to line 254, because the condition on line 253 was never true
254 raise Reject('{0}: Filename does not match Source field'.format(fn))
255 if match.group('version') != version_without_epoch: 255 ↛ 256line 255 didn't jump to line 256, because the condition on line 255 was never true
256 raise Reject('{0}: Filename does not match Version field'.format(fn))
258 for bn in changes.binary_names:
259 if not re_field_package.match(bn): 259 ↛ 260line 259 didn't jump to line 260, because the condition on line 259 was never true
260 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
262 if changes.sourceful and changes.source is None: 262 ↛ 263line 262 didn't jump to line 263, because the condition on line 262 was never true
263 raise Reject("Changes has architecture source, but no source found.")
264 if changes.source is not None and not changes.sourceful: 264 ↛ 265line 264 didn't jump to line 265, because the condition on line 264 was never true
265 raise Reject("Upload includes source, but changes does not say so.")
267 try:
268 fix_maintainer(changes.changes['Maintainer'])
269 except ParseMaintError as e:
270 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
272 try:
273 changed_by = changes.changes.get('Changed-By')
274 if changed_by is not None: 274 ↛ 279line 274 didn't jump to line 279, because the condition on line 274 was never false
275 fix_maintainer(changed_by)
276 except ParseMaintError as e:
277 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
279 try:
280 changes.byhand_files
281 except daklib.upload.InvalidChangesException as e:
282 raise Reject('{0}'.format(e))
284 if len(changes.files) == 0: 284 ↛ 285line 284 didn't jump to line 285, because the condition on line 284 was never true
285 raise Reject("Changes includes no files.")
287 for bugnum in changes.closed_bugs: 287 ↛ 288line 287 didn't jump to line 288, because the loop on line 287 never started
288 if not re_isanum.match(bugnum):
289 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
291 return True
294class ExternalHashesCheck(Check):
295 """Checks hashes in .changes and .dsc against an external database."""
297 def check_single(self, session, f):
298 q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE :pattern", {'pattern': '%/{}'.format(f.filename)})
299 (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
301 if not ext_size:
302 return
304 if ext_size != f.size:
305 raise RejectExternalFilesMismatch(f.filename, 'size', f.size, ext_size)
307 if ext_md5sum != f.md5sum:
308 raise RejectExternalFilesMismatch(f.filename, 'md5sum', f.md5sum, ext_md5sum)
310 if ext_sha1sum != f.sha1sum:
311 raise RejectExternalFilesMismatch(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
313 if ext_sha256sum != f.sha256sum:
314 raise RejectExternalFilesMismatch(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
316 def check(self, upload):
317 cnf = Config()
319 if not cnf.use_extfiles: 319 ↛ 322line 319 didn't jump to line 322, because the condition on line 319 was never false
320 return
322 session = upload.session
323 changes = upload.changes
325 for f in changes.files.values():
326 self.check_single(session, f)
327 source = changes.source
328 if source is not None:
329 for f in source.files.values():
330 self.check_single(session, f)
333class BinaryCheck(Check):
334 """Check binary packages for syntax errors."""
336 def check(self, upload):
337 debug_deb_name_postfix = "-dbgsym"
338 # XXX: Handle dynamic debug section name here
340 self._architectures = set()
342 for binary in upload.changes.binaries:
343 self.check_binary(upload, binary)
345 for arch in upload.changes.architectures:
346 if arch == 'source':
347 continue
348 if arch not in self._architectures: 348 ↛ 349line 348 didn't jump to line 349, because the condition on line 348 was never true
349 raise Reject('{}: Architecture field includes {}, but no binary packages for {} are included in the upload'.format(upload.changes.filename, arch, arch))
351 binaries = {binary.control['Package']: binary
352 for binary in upload.changes.binaries}
354 for name, binary in list(binaries.items()):
355 if name in upload.changes.binary_names: 355 ↛ 358line 355 didn't jump to line 358, because the condition on line 355 was never false
356 # Package is listed in Binary field. Everything is good.
357 pass
358 elif daklib.utils.is_in_debug_section(binary.control):
359 # If we have a binary package in the debug section, we
360 # can allow it to not be present in the Binary field
361 # in the .changes file, so long as its name (without
362 # -dbgsym) is present in the Binary list.
363 if not name.endswith(debug_deb_name_postfix):
364 raise Reject('Package {0} is in the debug section, but '
365 'does not end in {1}.'.format(name, debug_deb_name_postfix))
367 # Right, so, it's named properly, let's check that
368 # the corresponding package is in the Binary list
369 origin_package_name = name[:-len(debug_deb_name_postfix)]
370 if origin_package_name not in upload.changes.binary_names:
371 raise Reject(
372 "Debug package {debug}'s corresponding binary package "
373 "{origin} is not present in the Binary field.".format(
374 debug=name, origin=origin_package_name))
375 else:
376 # Someone was a nasty little hacker and put a package
377 # into the .changes that isn't in debian/control. Bad,
378 # bad person.
379 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(name))
381 return True
383 def check_binary(self, upload, binary):
384 fn = binary.hashed_file.filename
385 control = binary.control
387 for field in ('Package', 'Architecture', 'Version', 'Description', 'Section'):
388 if field not in control: 388 ↛ 389line 388 didn't jump to line 389, because the condition on line 388 was never true
389 raise Reject('{0}: Missing mandatory field {1}.'.format(fn, field))
391 check_fields_for_valid_utf8(fn, control)
393 # check fields
395 package = control['Package']
396 if not re_field_package.match(package): 396 ↛ 397line 396 didn't jump to line 397, because the condition on line 396 was never true
397 raise Reject('{0}: Invalid Package field'.format(fn))
399 version = control['Version']
400 version_match = re_field_version.match(version)
401 if not version_match: 401 ↛ 402line 401 didn't jump to line 402, because the condition on line 401 was never true
402 raise Reject('{0}: Invalid Version field'.format(fn))
403 version_without_epoch = version_match.group('without_epoch')
405 architecture = control['Architecture']
406 if architecture not in upload.changes.architectures: 406 ↛ 407line 406 didn't jump to line 407, because the condition on line 406 was never true
407 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
408 if architecture == 'source': 408 ↛ 409line 408 didn't jump to line 409, because the condition on line 408 was never true
409 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
410 self._architectures.add(architecture)
412 source = control.get('Source')
413 if source is not None and not re_field_source.match(source): 413 ↛ 414line 413 didn't jump to line 414, because the condition on line 413 was never true
414 raise Reject('{0}: Invalid Source field'.format(fn))
416 section = control.get('Section', '')
417 if section == '' or section == 'unknown' or section.endswith("/unknown"): 417 ↛ 418line 417 didn't jump to line 418, because the condition on line 417 was never true
418 raise Reject('{0}: The "Section" field must be present and use a real section name.'.format(fn))
420 # check filename
422 match = re_file_binary.match(fn)
423 if package != match.group('package'): 423 ↛ 424line 423 didn't jump to line 424, because the condition on line 423 was never true
424 raise Reject('{0}: filename does not match Package field'.format(fn))
425 if version_without_epoch != match.group('version'): 425 ↛ 426line 425 didn't jump to line 426, because the condition on line 425 was never true
426 raise Reject('{0}: filename does not match Version field'.format(fn))
427 if architecture != match.group('architecture'): 427 ↛ 428line 427 didn't jump to line 428, because the condition on line 427 was never true
428 raise Reject('{0}: filename does not match Architecture field'.format(fn))
430 # check dependency field syntax
432 def check_dependency_field(
433 field, control,
434 dependency_parser=apt_pkg.parse_depends,
435 allow_alternatives=True,
436 allow_relations=('', '<', '<=', '=', '>=', '>')):
437 value = control.get(field)
438 if value is not None:
439 if value.strip() == '': 439 ↛ 440line 439 didn't jump to line 440, because the condition on line 439 was never true
440 raise Reject('{0}: empty {1} field'.format(fn, field))
441 try:
442 depends = dependency_parser(value)
443 except:
444 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
445 for group in depends:
446 if not allow_alternatives and len(group) != 1: 446 ↛ 447line 446 didn't jump to line 447, because the condition on line 446 was never true
447 raise Reject('{0}: {1}: alternatives are not allowed'.format(fn, field))
448 for dep_pkg, dep_ver, dep_rel in group:
449 if dep_rel not in allow_relations: 449 ↛ 450line 449 didn't jump to line 450, because the condition on line 449 was never true
450 raise Reject('{}: {}: depends on {}, but only relations {} are allowed for this field'.format(fn, field, " ".join(dep_pkg, dep_rel, dep_ver), allow_relations))
452 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
453 'Recommends', 'Replaces', 'Suggests'):
454 check_dependency_field(field, control)
456 check_dependency_field("Provides", control,
457 allow_alternatives=False,
458 allow_relations=('', '='))
459 check_dependency_field("Built-Using", control,
460 dependency_parser=apt_pkg.parse_src_depends,
461 allow_alternatives=False,
462 allow_relations=('=',))
465_DEB_ALLOWED_MEMBERS = {
466 "debian-binary",
467 *(f"control.tar.{comp}" for comp in ("gz", "xz")),
468 *(f"data.tar.{comp}" for comp in ("gz", "bz2", "xz")),
469}
472class BinaryMembersCheck(Check):
473 """check members of .deb file"""
475 def check(self, upload):
476 for binary in upload.changes.binaries:
477 filename = binary.hashed_file.filename
478 path = os.path.join(upload.directory, filename)
479 self._check_binary(filename, path)
480 return True
482 def _check_binary(self, filename: str, path: str) -> None:
483 deb = apt_inst.DebFile(path)
484 members = set(member.name for member in deb.getmembers())
485 if blocked_members := members - _DEB_ALLOWED_MEMBERS: 485 ↛ 486line 485 didn't jump to line 486, because the condition on line 485 was never true
486 raise Reject(f"{filename}: Contains blocked members {', '.join(blocked_members)}")
489class BinaryTimestampCheck(Check):
490 """check timestamps of files in binary packages
492 Files in the near future cause ugly warnings and extreme time travel
493 can cause errors on extraction.
494 """
496 def check(self, upload):
497 cnf = Config()
498 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24 * 3600)
499 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))
501 class TarTime:
502 def __init__(self):
503 self.future_files: dict[str, int] = {}
504 self.past_files: dict[str, int] = {}
506 def callback(self, member, data) -> None:
507 if member.mtime > future_cutoff: 507 ↛ 508line 507 didn't jump to line 508, because the condition on line 507 was never true
508 self.future_files[member.name] = member.mtime
509 elif member.mtime < past_cutoff: 509 ↛ 510line 509 didn't jump to line 510, because the condition on line 509 was never true
510 self.past_files[member.name] = member.mtime
512 def format_reason(filename, direction, files) -> str:
513 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
514 for fn, ts in files.items():
515 reason += " {0} ({1})".format(fn, time.ctime(ts))
516 return reason
518 for binary in upload.changes.binaries:
519 filename = binary.hashed_file.filename
520 path = os.path.join(upload.directory, filename)
521 deb = apt_inst.DebFile(path)
522 tar = TarTime()
523 for archive in (deb.control, deb.data):
524 archive.go(tar.callback)
525 if tar.future_files: 525 ↛ 526line 525 didn't jump to line 526, because the condition on line 525 was never true
526 raise Reject(format_reason(filename, 'future', tar.future_files))
527 if tar.past_files: 527 ↛ 528line 527 didn't jump to line 528, because the condition on line 527 was never true
528 raise Reject(format_reason(filename, 'past', tar.past_files))
531class SourceCheck(Check):
532 """Check source package for syntax errors."""
534 def check_filename(self, control, filename, regex: re.Pattern) -> None:
535 # In case we have an .orig.tar.*, we have to strip the Debian revison
536 # from the version number. So handle this special case first.
537 is_orig = True
538 match = re_file_orig.match(filename)
539 if not match:
540 is_orig = False
541 match = regex.match(filename)
543 if not match: 543 ↛ 544line 543 didn't jump to line 544, because the condition on line 543 was never true
544 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
545 if match.group('package') != control['Source']: 545 ↛ 546line 545 didn't jump to line 546, because the condition on line 545 was never true
546 raise Reject('{0}: filename does not match Source field'.format(filename))
548 version = control['Version']
549 if is_orig:
550 upstream_match = re_field_version_upstream.match(version)
551 if not upstream_match: 551 ↛ 552line 551 didn't jump to line 552, because the condition on line 551 was never true
552 raise Reject('{0}: Source package includes upstream tarball, but {1} has no Debian revision.'.format(filename, version))
553 version = upstream_match.group('upstream')
554 version_match = re_field_version.match(version)
555 version_without_epoch = version_match.group('without_epoch')
556 if match.group('version') != version_without_epoch: 556 ↛ 557line 556 didn't jump to line 557, because the condition on line 556 was never true
557 raise Reject('{0}: filename does not match Version field'.format(filename))
559 def check(self, upload):
560 if upload.changes.source is None:
561 if upload.changes.sourceful: 561 ↛ 562line 561 didn't jump to line 562, because the condition on line 561 was never true
562 raise Reject("{}: Architecture field includes source, but no source package is included in the upload".format(upload.changes.filename))
563 return True
565 if not upload.changes.sourceful: 565 ↛ 566line 565 didn't jump to line 566, because the condition on line 565 was never true
566 raise Reject("{}: Architecture field does not include source, but a source package is included in the upload".format(upload.changes.filename))
568 changes = upload.changes.changes
569 source = upload.changes.source
570 control = source.dsc
571 dsc_fn = source._dsc_file.filename
573 check_fields_for_valid_utf8(dsc_fn, control)
575 # check fields
576 if not re_field_package.match(control['Source']): 576 ↛ 577line 576 didn't jump to line 577, because the condition on line 576 was never true
577 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
578 if control['Source'] != changes['Source']: 578 ↛ 579line 578 didn't jump to line 579, because the condition on line 578 was never true
579 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
580 if control['Version'] != changes['Version']: 580 ↛ 581line 580 didn't jump to line 581, because the condition on line 580 was never true
581 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
583 # check filenames
584 self.check_filename(control, dsc_fn, re_file_dsc)
585 for f in source.files.values():
586 self.check_filename(control, f.filename, re_file_source)
588 # check dependency field syntax
589 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
590 value = control.get(field)
591 if value is not None:
592 if value.strip() == '': 592 ↛ 593line 592 didn't jump to line 593, because the condition on line 592 was never true
593 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
594 try:
595 apt_pkg.parse_src_depends(value)
596 except Exception as e:
597 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
599 rejects = utils.check_dsc_files(dsc_fn, control, list(source.files.keys()))
600 if len(rejects) > 0: 600 ↛ 601line 600 didn't jump to line 601, because the condition on line 600 was never true
601 raise Reject("\n".join(rejects))
603 return True
606class SingleDistributionCheck(Check):
607 """Check that the .changes targets only a single distribution."""
609 def check(self, upload):
610 if len(upload.changes.distributions) != 1: 610 ↛ 611line 610 didn't jump to line 611, because the condition on line 610 was never true
611 raise Reject("Only uploads to a single distribution are allowed.")
614class ACLCheck(Check):
615 """Check the uploader is allowed to upload the packages in .changes"""
617 def _does_hijack(self, session, upload, suite):
618 # Try to catch hijacks.
619 # This doesn't work correctly. Uploads to experimental can still
620 # "hijack" binaries from unstable. Also one can hijack packages
621 # via buildds (but people who try this should not be DMs).
622 for binary_name in upload.changes.binary_names:
623 binaries = session.query(DBBinary).join(DBBinary.source) \
624 .filter(DBBinary.suites.contains(suite)) \
625 .filter(DBBinary.package == binary_name)
626 for binary in binaries:
627 if binary.source.source != upload.changes.changes['Source']: 627 ↛ 628line 627 didn't jump to line 628, because the condition on line 627 was never true
628 return True, binary.package, binary.source.source
629 return False, None, None
631 def _check_acl(self, session, upload, acl):
632 source_name = upload.changes.source_name
634 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints: 634 ↛ 635line 634 didn't jump to line 635, because the condition on line 634 was never true
635 return None, None
636 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring: 636 ↛ 637line 636 didn't jump to line 637, because the condition on line 636 was never true
637 return None, None
639 if not acl.allow_new:
640 if upload.new:
641 return False, "NEW uploads are not allowed"
642 for f in upload.changes.files.values():
643 if f.section == 'byhand' or f.section.startswith("raw-"): 643 ↛ 644line 643 didn't jump to line 644, because the condition on line 643 was never true
644 return False, "BYHAND uploads are not allowed"
645 if not acl.allow_source and upload.changes.source is not None: 645 ↛ 646line 645 didn't jump to line 646, because the condition on line 645 was never true
646 return False, "sourceful uploads are not allowed"
647 binaries = upload.changes.binaries
648 if len(binaries) != 0:
649 if not acl.allow_binary: 649 ↛ 650line 649 didn't jump to line 650, because the condition on line 649 was never true
650 return False, "binary uploads are not allowed"
651 if upload.changes.source is None and not acl.allow_binary_only: 651 ↛ 652line 651 didn't jump to line 652, because the condition on line 651 was never true
652 return False, "binary-only uploads are not allowed"
653 if not acl.allow_binary_all: 653 ↛ 654line 653 didn't jump to line 654, because the condition on line 653 was never true
654 uploaded_arches = set(upload.changes.architectures)
655 uploaded_arches.discard('source')
656 allowed_arches = set(a.arch_string for a in acl.architectures)
657 forbidden_arches = uploaded_arches - allowed_arches
658 if len(forbidden_arches) != 0:
659 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
660 if not acl.allow_hijack:
661 for suite in upload.final_suites:
662 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
663 if does_hijack: 663 ↛ 664line 663 didn't jump to line 664, because the condition on line 663 was never true
664 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
666 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
667 if acl.allow_per_source:
668 if acl_per_source is None:
669 return False, "not allowed to upload source package '{0}'".format(source_name)
670 if acl.deny_per_source and acl_per_source is not None: 670 ↛ 671line 670 didn't jump to line 671, because the condition on line 670 was never true
671 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
673 return True, None
675 def check(self, upload):
676 session = upload.session
677 fingerprint = upload.fingerprint
678 keyring = fingerprint.keyring
680 if keyring is None: 680 ↛ 681line 680 didn't jump to line 681, because the condition on line 680 was never true
681 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
682 if not keyring.active: 682 ↛ 683line 682 didn't jump to line 683, because the condition on line 682 was never true
683 raise Reject('Keyring {0} is not active'.format(keyring.name))
685 acl = fingerprint.acl or keyring.acl
686 if acl is None: 686 ↛ 687line 686 didn't jump to line 687, because the condition on line 686 was never true
687 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
688 result, reason = self._check_acl(session, upload, acl)
689 if not result:
690 raise RejectACL(acl, reason)
692 for acl in session.query(ACL).filter_by(is_global=True):
693 result, reason = self._check_acl(session, upload, acl)
694 if result is False: 694 ↛ 695line 694 didn't jump to line 695, because the condition on line 694 was never true
695 raise RejectACL(acl, reason)
697 return True
699 def per_suite_check(self, upload, suite):
700 acls = suite.acls
701 if len(acls) != 0: 701 ↛ 702line 701 didn't jump to line 702, because the condition on line 701 was never true
702 accept = False
703 for acl in acls:
704 result, reason = self._check_acl(upload.session, upload, acl)
705 if result is False:
706 raise Reject(reason)
707 accept = accept or result
708 if not accept:
709 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
710 return True
713class TransitionCheck(Check):
714 """check for a transition"""
716 def check(self, upload):
717 if not upload.changes.sourceful:
718 return True
720 transitions = self.get_transitions()
721 if transitions is None: 721 ↛ 724line 721 didn't jump to line 724, because the condition on line 721 was never false
722 return True
724 session = upload.session
726 control = upload.changes.changes
727 source = re_field_source.match(control['Source']).group('package')
729 for trans in transitions:
730 t = transitions[trans]
731 transition_source = t["source"]
732 expected = t["new"]
734 # Will be None if nothing is in testing.
735 current = get_source_in_suite(transition_source, "testing", session)
736 if current is not None:
737 compare = apt_pkg.version_compare(current.version, expected)
739 if current is None or compare < 0:
740 # This is still valid, the current version in testing is older than
741 # the new version we wait for, or there is none in testing yet
743 # Check if the source we look at is affected by this.
744 if source in t['packages']:
745 # The source is affected, lets reject it.
747 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
749 if current is not None:
750 currentlymsg = "at version {0}".format(current.version)
751 else:
752 currentlymsg = "not present in testing"
754 rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
756 rejectmsg += "\n".join(textwrap.wrap("""Your package
757is part of a testing transition designed to get {0} migrated (it is
758currently {1}, we need version {2}). This transition is managed by the
759Release Team, and {3} is the Release-Team member responsible for it.
760Please mail debian-release@lists.debian.org or contact {3} directly if you
761need further assistance. You might want to upload to experimental until this
762transition is done.""".format(transition_source, currentlymsg, expected, t["rm"])))
764 raise Reject(rejectmsg)
766 return True
768 def get_transitions(self):
769 cnf = Config()
770 path = cnf.get('Dinstall::ReleaseTransitions', '')
771 if path == '' or not os.path.exists(path): 771 ↛ 774line 771 didn't jump to line 774, because the condition on line 771 was never false
772 return None
774 with open(path, 'r') as fd:
775 contents = fd.read()
776 try:
777 transitions = yaml.safe_load(contents)
778 return transitions
779 except yaml.YAMLError as msg:
780 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
782 return None
785class NoSourceOnlyCheck(Check):
786 def is_source_only_upload(self, upload) -> bool:
787 changes = upload.changes
788 if changes.source is not None and len(changes.binaries) == 0:
789 return True
790 return False
792 """Check for source-only upload
794 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
795 set. Otherwise they are rejected.
797 Source-only uploads are only accepted for source packages having a
798 Package-List field that also lists architectures per package. This
799 check can be disabled via
800 Dinstall::AllowSourceOnlyUploadsWithoutPackageList.
802 Source-only uploads to NEW are only allowed if
803 Dinstall::AllowSourceOnlyNew is set.
805 Uploads not including architecture-independent packages are only
806 allowed if Dinstall::AllowNoArchIndepUploads is set.
808 """
810 def check(self, upload):
811 if not self.is_source_only_upload(upload):
812 return True
814 allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads')
815 allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList')
816 allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew')
817 allow_source_only_new_keys = Config().value_list('Dinstall::AllowSourceOnlyNewKeys')
818 allow_source_only_new_sources = Config().value_list('Dinstall::AllowSourceOnlyNewSources')
819 allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads', True)
820 changes = upload.changes
822 if not allow_source_only_uploads: 822 ↛ 823line 822 didn't jump to line 823, because the condition on line 822 was never true
823 raise Reject('Source-only uploads are not allowed.')
824 if not allow_source_only_uploads_without_package_list \ 824 ↛ 826line 824 didn't jump to line 826, because the condition on line 824 was never true
825 and changes.source.package_list.fallback:
826 raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.')
827 if not allow_source_only_new and upload.new \ 827 ↛ 830line 827 didn't jump to line 830, because the condition on line 827 was never true
828 and changes.primary_fingerprint not in allow_source_only_new_keys \
829 and changes.source_name not in allow_source_only_new_sources:
830 raise Reject('Source-only uploads to NEW are not allowed.')
832 if 'all' not in changes.architectures and changes.source.package_list.has_arch_indep_packages():
833 if not allow_no_arch_indep_uploads: 833 ↛ 834line 833 didn't jump to line 834, because the condition on line 833 was never true
834 raise Reject('Uploads must include architecture-independent packages.')
836 return True
839class NewOverrideCheck(Check):
840 """Override NEW requirement
841 """
842 def check(self, upload):
843 if not upload.new:
844 return True
846 new_override_keys = Config().value_list('Dinstall::NewOverrideKeys')
847 changes = upload.changes
849 if changes.primary_fingerprint in new_override_keys: 849 ↛ 850line 849 didn't jump to line 850, because the condition on line 849 was never true
850 upload.new = False
852 return True
855class ArchAllBinNMUCheck(Check):
856 """Check for arch:all binNMUs"""
858 def check(self, upload):
859 changes = upload.changes
861 if 'all' in changes.architectures and changes.changes.get('Binary-Only') == 'yes': 861 ↛ 862line 861 didn't jump to line 862, because the condition on line 861 was never true
862 raise Reject('arch:all binNMUs are not allowed.')
864 return True
867class LintianCheck(Check):
868 """Check package using lintian"""
870 def check(self, upload):
871 changes = upload.changes
873 # Only check sourceful uploads.
874 if changes.source is None:
875 return True
876 # Only check uploads to unstable or experimental.
877 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions: 877 ↛ 878line 877 didn't jump to line 878, because the condition on line 877 was never true
878 return True
880 cnf = Config()
881 if 'Dinstall::LintianTags' not in cnf:
882 return True
883 tagfile = cnf['Dinstall::LintianTags']
885 with open(tagfile, 'r') as sourcefile:
886 sourcecontent = sourcefile.read()
887 try:
888 lintiantags = yaml.safe_load(sourcecontent)['lintian']
889 except yaml.YAMLError as msg:
890 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
892 with tempfile.NamedTemporaryFile(mode="w+t") as temptagfile:
893 os.fchmod(temptagfile.fileno(), 0o644)
894 for tags in lintiantags.values():
895 for tag in tags:
896 print(tag, file=temptagfile)
897 temptagfile.flush()
899 changespath = os.path.join(upload.directory, changes.filename)
901 cmd = []
902 user = cnf.get('Dinstall::UnprivUser') or None
903 if user is not None: 903 ↛ 904line 903 didn't jump to line 904, because the condition on line 903 was never true
904 cmd.extend(['sudo', '-H', '-u', user])
905 cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temptagfile.name, changespath])
906 process = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="utf-8")
907 output = process.stdout
908 result = process.returncode
910 if result == 2: 910 ↛ 911line 910 didn't jump to line 911, because the condition on line 910 was never true
911 utils.warn("lintian failed for %s [return code: %s]." %
912 (changespath, result))
913 utils.warn(utils.prefix_multi_line_string(output,
914 " [possible output:] "))
916 parsed_tags = lintian.parse_lintian_output(output)
917 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
918 if len(rejects) != 0: 918 ↛ 919line 918 didn't jump to line 919, because the condition on line 918 was never true
919 raise Reject('\n'.join(rejects))
921 return True
924class SourceFormatCheck(Check):
925 """Check source format is allowed in the target suite"""
927 def per_suite_check(self, upload, suite):
928 source = upload.changes.source
929 session = upload.session
930 if source is None:
931 return True
933 source_format = source.dsc['Format']
934 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
935 if query.first() is None:
936 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
939class SuiteCheck(Check):
940 def per_suite_check(self, upload, suite):
941 if not suite.accept_source_uploads and upload.changes.source is not None: 941 ↛ 942line 941 didn't jump to line 942, because the condition on line 941 was never true
942 raise Reject('The suite "{0}" does not accept source uploads.'.format(suite.suite_name))
943 if not suite.accept_binary_uploads and len(upload.changes.binaries) != 0: 943 ↛ 944line 943 didn't jump to line 944, because the condition on line 943 was never true
944 raise Reject('The suite "{0}" does not accept binary uploads.'.format(suite.suite_name))
945 return True
948class SuiteArchitectureCheck(Check):
949 def per_suite_check(self, upload, suite):
950 session = upload.session
951 for arch in upload.changes.architectures:
952 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
953 if query.first() is None:
954 raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
956 return True
959class VersionCheck(Check):
960 """Check version constraints"""
962 def _highest_source_version(self, session, source_name, suite):
963 db_source = session.query(DBSource).filter_by(source=source_name) \
964 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
965 if db_source is None:
966 return None
967 else:
968 return db_source.version
970 def _highest_binary_version(self, session, binary_name, suite, architecture):
971 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
972 .filter(DBBinary.suites.contains(suite)) \
973 .join(DBBinary.architecture) \
974 .filter(Architecture.arch_string.in_(['all', architecture])) \
975 .order_by(DBBinary.version.desc()).first()
976 if db_binary is None:
977 return None
978 else:
979 return db_binary.version
981 def _version_checks(self, upload, suite, other_suite, op, op_name):
982 session = upload.session
984 if upload.changes.source is not None:
985 source_name = upload.changes.source.dsc['Source']
986 source_version = upload.changes.source.dsc['Version']
987 v = self._highest_source_version(session, source_name, other_suite)
988 if v is not None and not op(version_compare(source_version, v)): 988 ↛ 989line 988 didn't jump to line 989, because the condition on line 988 was never true
989 raise Reject("Version check failed:\n"
990 "Your upload included the source package {0}, version {1},\n"
991 "however {3} already has version {2}.\n"
992 "Uploads to {5} must have a {4} version than present in {3}."
993 .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name))
995 for binary in upload.changes.binaries:
996 binary_name = binary.control['Package']
997 binary_version = binary.control['Version']
998 architecture = binary.control['Architecture']
999 v = self._highest_binary_version(session, binary_name, other_suite, architecture)
1000 if v is not None and not op(version_compare(binary_version, v)): 1000 ↛ 1001line 1000 didn't jump to line 1001, because the condition on line 1000 was never true
1001 raise Reject("Version check failed:\n"
1002 "Your upload included the binary package {0}, version {1}, for {2},\n"
1003 "however {4} already has version {3}.\n"
1004 "Uploads to {6} must have a {5} version than present in {4}."
1005 .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
1007 def per_suite_check(self, upload, suite):
1008 session = upload.session
1010 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
1011 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
1012 must_be_newer_than = [vc.reference for vc in vc_newer]
1013 # Must be newer than old versions in `suite`
1014 must_be_newer_than.append(suite)
1016 for s in must_be_newer_than:
1017 self._version_checks(upload, suite, s, lambda result: result > 0, 'higher')
1019 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
1020 must_be_older_than = [vc.reference for vc in vc_older]
1022 for s in must_be_older_than: 1022 ↛ 1023line 1022 didn't jump to line 1023, because the loop on line 1022 never started
1023 self._version_checks(upload, suite, s, lambda result: result < 0, 'lower')
1025 return True
1027 @property
1028 def forcable(self) -> bool:
1029 return True