1# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org> 

2# 

3# Parts based on code that is 

4# Copyright (C) 2001-2006, James Troup <james@nocrew.org> 

5# Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org> 

6# 

7# This program is free software; you can redistribute it and/or modify 

8# it under the terms of the GNU General Public License as published by 

9# the Free Software Foundation; either version 2 of the License, or 

10# (at your option) any later version. 

11# 

12# This program is distributed in the hope that it will be useful, 

13# but WITHOUT ANY WARRANTY; without even the implied warranty of 

14# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

15# GNU General Public License for more details. 

16# 

17# You should have received a copy of the GNU General Public License along 

18# with this program; if not, write to the Free Software Foundation, Inc., 

19# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 

20 

21"""module provided pre-acceptance tests 

22 

23Please read the documentation for the :class:`Check` class for the interface. 

24""" 

25 

26from daklib.config import Config 

27from daklib.dbconn import * 

28import daklib.dbconn as dbconn 

29from daklib.regexes import * 

30from daklib.textutils import fix_maintainer, ParseMaintError 

31import daklib.lintian as lintian 

32import daklib.utils as utils 

33import daklib.upload 

34 

35import apt_inst 

36import apt_pkg 

37from apt_pkg import version_compare 

38from collections.abc import Iterable 

39import datetime 

40import os 

41import subprocess 

42import tempfile 

43import textwrap 

44import time 

45from typing import TYPE_CHECKING 

46import yaml 

47 

48if TYPE_CHECKING: 48 ↛ 49line 48 didn't jump to line 49, because the condition on line 48 was never true

49 import daklib.archive 

50 import re 

51 

52 

53def check_fields_for_valid_utf8(filename, control): 

54 """Check all fields of a control file for valid UTF-8""" 

55 for field in control.keys(): 

56 try: 

57 # Access the field value to make `TagSection` try to decode it. 

58 # We should also do the same for the field name, but this requires 

59 # https://bugs.debian.org/995118 to be fixed. 

60 # TODO: make sure the field name `field` is valid UTF-8 too 

61 control[field] 

62 except UnicodeDecodeError: 

63 raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field)) 

64 

65 

66class Reject(Exception): 

67 """exception raised by failing checks""" 

68 pass 

69 

70 

71class RejectExternalFilesMismatch(Reject): 

72 """exception raised by failing the external hashes check""" 

73 

74 def __str__(self): 

75 return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4] 

76 

77 

78class RejectACL(Reject): 

79 """exception raise by failing ACL checks""" 

80 

81 def __init__(self, acl, reason): 

82 self.acl = acl 

83 self.reason = reason 

84 

85 def __str__(self): 

86 return "ACL {0}: {1}".format(self.acl.name, self.reason) 

87 

88 

89class Check: 

90 """base class for checks 

91 

92 checks are called by :class:`daklib.archive.ArchiveUpload`. Failing tests should 

93 raise a :exc:`daklib.checks.Reject` exception including a human-readable 

94 description why the upload should be rejected. 

95 """ 

96 

97 def check(self, upload: 'daklib.archive.ArchiveUpload'): 

98 """do checks 

99 

100 :param upload: upload to check 

101 

102 :raises Reject: upload should be rejected 

103 """ 

104 raise NotImplementedError 

105 

106 def per_suite_check(self, upload: 'daklib.archive.ArchiveUpload', suite: Suite): 

107 """do per-suite checks 

108 

109 :param upload: upload to check 

110 :param suite: suite to check 

111 

112 :raises Reject: upload should be rejected 

113 """ 

114 raise NotImplementedError 

115 

116 @property 

117 def forcable(self) -> bool: 

118 """allow to force ignore failing test 

119 

120 :const:`True` if it is acceptable to force ignoring a failing test, 

121 :const:`False` otherwise 

122 """ 

123 return False 

124 

125 

126class SignatureAndHashesCheck(Check): 

127 """Check signature of changes and dsc file (if included in upload) 

128 

129 Make sure the signature is valid and done by a known user. 

130 """ 

131 

132 def check_replay(self, upload) -> bool: 

133 # Use private session as we want to remember having seen the .changes 

134 # in all cases. 

135 session = upload.session 

136 history = SignatureHistory.from_signed_file(upload.changes) 

137 r = history.query(session) 

138 if r is not None: 138 ↛ 139line 138 didn't jump to line 139, because the condition on line 138 was never true

139 raise Reject('Signature for changes file was already seen at {0}.\nPlease refresh the signature of the changes file if you want to upload it again.'.format(r.seen)) 

140 return True 

141 

142 def check(self, upload): 

143 allow_source_untrusted_sig_keys = Config().value_list('Dinstall::AllowSourceUntrustedSigKeys') 

144 

145 changes = upload.changes 

146 if not changes.valid_signature: 146 ↛ 147line 146 didn't jump to line 147, because the condition on line 146 was never true

147 raise Reject("Signature for .changes not valid.") 

148 self.check_replay(upload) 

149 self._check_hashes(upload, changes.filename, changes.files.values()) 

150 

151 source = None 

152 try: 

153 source = changes.source 

154 except Exception as e: 

155 raise Reject("Invalid dsc file: {0}".format(e)) 

156 if source is not None: 

157 if changes.primary_fingerprint not in allow_source_untrusted_sig_keys: 157 ↛ 162line 157 didn't jump to line 162, because the condition on line 157 was never false

158 if not source.valid_signature: 158 ↛ 159line 158 didn't jump to line 159, because the condition on line 158 was never true

159 raise Reject("Signature for .dsc not valid.") 

160 if source.primary_fingerprint != changes.primary_fingerprint: 160 ↛ 161line 160 didn't jump to line 161, because the condition on line 160 was never true

161 raise Reject(".changes and .dsc not signed by the same key.") 

162 self._check_hashes(upload, source.filename, source.files.values()) 

163 

164 if upload.fingerprint is None or upload.fingerprint.uid is None: 

165 raise Reject(".changes signed by unknown key.") 

166 

167 def _check_hashes(self, upload: 'daklib.archive.ArchiveUpload', filename: str, files: Iterable[daklib.upload.HashedFile]): 

168 """Make sure hashes match existing files 

169 

170 :param upload: upload we are processing 

171 :param filename: name of the file the expected hash values are taken from 

172 :param files: files to check the hashes for 

173 """ 

174 try: 

175 for f in files: 

176 f.check(upload.directory) 

177 except daklib.upload.FileDoesNotExist as e: 177 ↛ 182line 177 didn't jump to line 182

178 raise Reject('{0}: {1}\n' 

179 'Perhaps you need to include the file in your upload?\n\n' 

180 'If the orig tarball is missing, the -sa flag for dpkg-buildpackage will be your friend.' 

181 .format(filename, str(e))) 

182 except daklib.upload.UploadException as e: 

183 raise Reject('{0}: {1}'.format(filename, str(e))) 

184 

185 

186class WeakSignatureCheck(Check): 

187 """Check that .changes and .dsc are not signed using a weak algorithm""" 

188 

189 def check(self, upload): 

190 changes = upload.changes 

191 if changes.weak_signature: 191 ↛ 192line 191 didn't jump to line 192, because the condition on line 191 was never true

192 raise Reject("The .changes was signed using a weak algorithm (such as SHA-1)") 

193 

194 source = changes.source 

195 if source is not None: 

196 if source.weak_signature: 196 ↛ 197line 196 didn't jump to line 197, because the condition on line 196 was never true

197 raise Reject("The source package was signed using a weak algorithm (such as SHA-1)") 

198 

199 return True 

200 

201 

202class SignatureTimestampCheck(Check): 

203 """Check timestamp of .changes signature""" 

204 

205 def check(self, upload): 

206 changes = upload.changes 

207 

208 now = datetime.datetime.utcnow() 

209 timestamp = changes.signature_timestamp 

210 age = now - timestamp 

211 

212 age_max = datetime.timedelta(days=365) 

213 age_min = datetime.timedelta(days=-7) 

214 

215 if age > age_max: 215 ↛ 216line 215 didn't jump to line 216, because the condition on line 215 was never true

216 raise Reject('{0}: Signature from {1} is too old (maximum age is {2} days)'.format(changes.filename, timestamp, age_max.days)) 

217 if age < age_min: 217 ↛ 218line 217 didn't jump to line 218, because the condition on line 217 was never true

218 raise Reject('{0}: Signature from {1} is too far in the future (tolerance is {2} days)'.format(changes.filename, timestamp, abs(age_min.days))) 

219 

220 return True 

221 

222 

223class ChangesCheck(Check): 

224 """Check changes file for syntax errors.""" 

225 

226 def check(self, upload): 

227 changes = upload.changes 

228 control = changes.changes 

229 fn = changes.filename 

230 

231 for field in ('Distribution', 'Source', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes'): 

232 if field not in control: 232 ↛ 233line 232 didn't jump to line 233, because the condition on line 232 was never true

233 raise Reject('{0}: misses mandatory field {1}'.format(fn, field)) 

234 

235 if len(changes.binaries) > 0: 

236 for field in ('Binary', 'Description'): 

237 if field not in control: 237 ↛ 238line 237 didn't jump to line 238, because the condition on line 237 was never true

238 raise Reject('{0}: binary upload requires {1} field'.format(fn, field)) 

239 

240 check_fields_for_valid_utf8(fn, control) 

241 

242 source_match = re_field_source.match(control['Source']) 

243 if not source_match: 243 ↛ 244line 243 didn't jump to line 244, because the condition on line 243 was never true

244 raise Reject('{0}: Invalid Source field'.format(fn)) 

245 version_match = re_field_version.match(control['Version']) 

246 if not version_match: 246 ↛ 247line 246 didn't jump to line 247, because the condition on line 246 was never true

247 raise Reject('{0}: Invalid Version field'.format(fn)) 

248 version_without_epoch = version_match.group('without_epoch') 

249 

250 match = re_file_changes.match(fn) 

251 if not match: 251 ↛ 252line 251 didn't jump to line 252, because the condition on line 251 was never true

252 raise Reject('{0}: Does not match re_file_changes'.format(fn)) 

253 if match.group('package') != source_match.group('package'): 253 ↛ 254line 253 didn't jump to line 254, because the condition on line 253 was never true

254 raise Reject('{0}: Filename does not match Source field'.format(fn)) 

255 if match.group('version') != version_without_epoch: 255 ↛ 256line 255 didn't jump to line 256, because the condition on line 255 was never true

256 raise Reject('{0}: Filename does not match Version field'.format(fn)) 

257 

258 for bn in changes.binary_names: 

259 if not re_field_package.match(bn): 259 ↛ 260line 259 didn't jump to line 260, because the condition on line 259 was never true

260 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn)) 

261 

262 if changes.sourceful and changes.source is None: 262 ↛ 263line 262 didn't jump to line 263, because the condition on line 262 was never true

263 raise Reject("Changes has architecture source, but no source found.") 

264 if changes.source is not None and not changes.sourceful: 264 ↛ 265line 264 didn't jump to line 265, because the condition on line 264 was never true

265 raise Reject("Upload includes source, but changes does not say so.") 

266 

267 try: 

268 fix_maintainer(changes.changes['Maintainer']) 

269 except ParseMaintError as e: 

270 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e)) 

271 

272 try: 

273 changed_by = changes.changes.get('Changed-By') 

274 if changed_by is not None: 274 ↛ 279line 274 didn't jump to line 279, because the condition on line 274 was never false

275 fix_maintainer(changed_by) 

276 except ParseMaintError as e: 

277 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e)) 

278 

279 try: 

280 changes.byhand_files 

281 except daklib.upload.InvalidChangesException as e: 

282 raise Reject('{0}'.format(e)) 

283 

284 if len(changes.files) == 0: 284 ↛ 285line 284 didn't jump to line 285, because the condition on line 284 was never true

285 raise Reject("Changes includes no files.") 

286 

287 for bugnum in changes.closed_bugs: 287 ↛ 288line 287 didn't jump to line 288, because the loop on line 287 never started

288 if not re_isanum.match(bugnum): 

289 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum)) 

290 

291 return True 

292 

293 

294class ExternalHashesCheck(Check): 

295 """Checks hashes in .changes and .dsc against an external database.""" 

296 

297 def check_single(self, session, f): 

298 q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE :pattern", {'pattern': '%/{}'.format(f.filename)}) 

299 (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None) 

300 

301 if not ext_size: 

302 return 

303 

304 if ext_size != f.size: 

305 raise RejectExternalFilesMismatch(f.filename, 'size', f.size, ext_size) 

306 

307 if ext_md5sum != f.md5sum: 

308 raise RejectExternalFilesMismatch(f.filename, 'md5sum', f.md5sum, ext_md5sum) 

309 

310 if ext_sha1sum != f.sha1sum: 

311 raise RejectExternalFilesMismatch(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum) 

312 

313 if ext_sha256sum != f.sha256sum: 

314 raise RejectExternalFilesMismatch(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum) 

315 

316 def check(self, upload): 

317 cnf = Config() 

318 

319 if not cnf.use_extfiles: 319 ↛ 322line 319 didn't jump to line 322, because the condition on line 319 was never false

320 return 

321 

322 session = upload.session 

323 changes = upload.changes 

324 

325 for f in changes.files.values(): 

326 self.check_single(session, f) 

327 source = changes.source 

328 if source is not None: 

329 for f in source.files.values(): 

330 self.check_single(session, f) 

331 

332 

333class BinaryCheck(Check): 

334 """Check binary packages for syntax errors.""" 

335 

336 def check(self, upload): 

337 debug_deb_name_postfix = "-dbgsym" 

338 # XXX: Handle dynamic debug section name here 

339 

340 self._architectures = set() 

341 

342 for binary in upload.changes.binaries: 

343 self.check_binary(upload, binary) 

344 

345 for arch in upload.changes.architectures: 

346 if arch == 'source': 

347 continue 

348 if arch not in self._architectures: 348 ↛ 349line 348 didn't jump to line 349, because the condition on line 348 was never true

349 raise Reject('{}: Architecture field includes {}, but no binary packages for {} are included in the upload'.format(upload.changes.filename, arch, arch)) 

350 

351 binaries = {binary.control['Package']: binary 

352 for binary in upload.changes.binaries} 

353 

354 for name, binary in list(binaries.items()): 

355 if name in upload.changes.binary_names: 355 ↛ 358line 355 didn't jump to line 358, because the condition on line 355 was never false

356 # Package is listed in Binary field. Everything is good. 

357 pass 

358 elif daklib.utils.is_in_debug_section(binary.control): 

359 # If we have a binary package in the debug section, we 

360 # can allow it to not be present in the Binary field 

361 # in the .changes file, so long as its name (without 

362 # -dbgsym) is present in the Binary list. 

363 if not name.endswith(debug_deb_name_postfix): 

364 raise Reject('Package {0} is in the debug section, but ' 

365 'does not end in {1}.'.format(name, debug_deb_name_postfix)) 

366 

367 # Right, so, it's named properly, let's check that 

368 # the corresponding package is in the Binary list 

369 origin_package_name = name[:-len(debug_deb_name_postfix)] 

370 if origin_package_name not in upload.changes.binary_names: 

371 raise Reject( 

372 "Debug package {debug}'s corresponding binary package " 

373 "{origin} is not present in the Binary field.".format( 

374 debug=name, origin=origin_package_name)) 

375 else: 

376 # Someone was a nasty little hacker and put a package 

377 # into the .changes that isn't in debian/control. Bad, 

378 # bad person. 

379 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(name)) 

380 

381 return True 

382 

383 def check_binary(self, upload, binary): 

384 fn = binary.hashed_file.filename 

385 control = binary.control 

386 

387 for field in ('Package', 'Architecture', 'Version', 'Description', 'Section'): 

388 if field not in control: 388 ↛ 389line 388 didn't jump to line 389, because the condition on line 388 was never true

389 raise Reject('{0}: Missing mandatory field {1}.'.format(fn, field)) 

390 

391 check_fields_for_valid_utf8(fn, control) 

392 

393 # check fields 

394 

395 package = control['Package'] 

396 if not re_field_package.match(package): 396 ↛ 397line 396 didn't jump to line 397, because the condition on line 396 was never true

397 raise Reject('{0}: Invalid Package field'.format(fn)) 

398 

399 version = control['Version'] 

400 version_match = re_field_version.match(version) 

401 if not version_match: 401 ↛ 402line 401 didn't jump to line 402, because the condition on line 401 was never true

402 raise Reject('{0}: Invalid Version field'.format(fn)) 

403 version_without_epoch = version_match.group('without_epoch') 

404 

405 architecture = control['Architecture'] 

406 if architecture not in upload.changes.architectures: 406 ↛ 407line 406 didn't jump to line 407, because the condition on line 406 was never true

407 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn)) 

408 if architecture == 'source': 408 ↛ 409line 408 didn't jump to line 409, because the condition on line 408 was never true

409 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn)) 

410 self._architectures.add(architecture) 

411 

412 source = control.get('Source') 

413 if source is not None and not re_field_source.match(source): 413 ↛ 414line 413 didn't jump to line 414, because the condition on line 413 was never true

414 raise Reject('{0}: Invalid Source field'.format(fn)) 

415 

416 # check filename 

417 

418 match = re_file_binary.match(fn) 

419 if package != match.group('package'): 419 ↛ 420line 419 didn't jump to line 420, because the condition on line 419 was never true

420 raise Reject('{0}: filename does not match Package field'.format(fn)) 

421 if version_without_epoch != match.group('version'): 421 ↛ 422line 421 didn't jump to line 422, because the condition on line 421 was never true

422 raise Reject('{0}: filename does not match Version field'.format(fn)) 

423 if architecture != match.group('architecture'): 423 ↛ 424line 423 didn't jump to line 424, because the condition on line 423 was never true

424 raise Reject('{0}: filename does not match Architecture field'.format(fn)) 

425 

426 # check dependency field syntax 

427 

428 def check_dependency_field( 

429 field, control, 

430 dependency_parser=apt_pkg.parse_depends, 

431 allow_alternatives=True, 

432 allow_relations=('', '<', '<=', '=', '>=', '>')): 

433 value = control.get(field) 

434 if value is not None: 

435 if value.strip() == '': 435 ↛ 436line 435 didn't jump to line 436, because the condition on line 435 was never true

436 raise Reject('{0}: empty {1} field'.format(fn, field)) 

437 try: 

438 depends = dependency_parser(value) 

439 except: 

440 raise Reject('{0}: APT could not parse {1} field'.format(fn, field)) 

441 for group in depends: 

442 if not allow_alternatives and len(group) != 1: 442 ↛ 443line 442 didn't jump to line 443, because the condition on line 442 was never true

443 raise Reject('{0}: {1}: alternatives are not allowed'.format(fn, field)) 

444 for dep_pkg, dep_ver, dep_rel in group: 

445 if dep_rel not in allow_relations: 445 ↛ 446line 445 didn't jump to line 446, because the condition on line 445 was never true

446 raise Reject('{}: {}: depends on {}, but only relations {} are allowed for this field'.format(fn, field, " ".join(dep_pkg, dep_rel, dep_ver), allow_relations)) 

447 

448 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends', 

449 'Recommends', 'Replaces', 'Suggests'): 

450 check_dependency_field(field, control) 

451 

452 check_dependency_field("Provides", control, 

453 allow_alternatives=False, 

454 allow_relations=('', '=')) 

455 check_dependency_field("Built-Using", control, 

456 dependency_parser=apt_pkg.parse_src_depends, 

457 allow_alternatives=False, 

458 allow_relations=('=',)) 

459 

460 

461_DEB_ALLOWED_MEMBERS = { 

462 "debian-binary", 

463 *(f"control.tar.{comp}" for comp in ("gz", "xz")), 

464 *(f"data.tar.{comp}" for comp in ("gz", "bz2", "xz")), 

465} 

466 

467 

468class BinaryMembersCheck(Check): 

469 """check members of .deb file""" 

470 

471 def check(self, upload): 

472 for binary in upload.changes.binaries: 

473 filename = binary.hashed_file.filename 

474 path = os.path.join(upload.directory, filename) 

475 self._check_binary(filename, path) 

476 return True 

477 

478 def _check_binary(self, filename: str, path: str) -> None: 

479 deb = apt_inst.DebFile(path) 

480 members = set(member.name for member in deb.getmembers()) 

481 if blocked_members := members - _DEB_ALLOWED_MEMBERS: 481 ↛ 482line 481 didn't jump to line 482, because the condition on line 481 was never true

482 raise Reject(f"{filename}: Contains blocked members {', '.join(blocked_members)}") 

483 

484 

485class BinaryTimestampCheck(Check): 

486 """check timestamps of files in binary packages 

487 

488 Files in the near future cause ugly warnings and extreme time travel 

489 can cause errors on extraction. 

490 """ 

491 

492 def check(self, upload): 

493 cnf = Config() 

494 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24 * 3600) 

495 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y')) 

496 

497 class TarTime: 

498 def __init__(self): 

499 self.future_files: dict[str, int] = {} 

500 self.past_files: dict[str, int] = {} 

501 

502 def callback(self, member, data) -> None: 

503 if member.mtime > future_cutoff: 503 ↛ 504line 503 didn't jump to line 504, because the condition on line 503 was never true

504 self.future_files[member.name] = member.mtime 

505 elif member.mtime < past_cutoff: 505 ↛ 506line 505 didn't jump to line 506, because the condition on line 505 was never true

506 self.past_files[member.name] = member.mtime 

507 

508 def format_reason(filename, direction, files) -> str: 

509 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction) 

510 for fn, ts in files.items(): 

511 reason += " {0} ({1})".format(fn, time.ctime(ts)) 

512 return reason 

513 

514 for binary in upload.changes.binaries: 

515 filename = binary.hashed_file.filename 

516 path = os.path.join(upload.directory, filename) 

517 deb = apt_inst.DebFile(path) 

518 tar = TarTime() 

519 for archive in (deb.control, deb.data): 

520 archive.go(tar.callback) 

521 if tar.future_files: 521 ↛ 522line 521 didn't jump to line 522, because the condition on line 521 was never true

522 raise Reject(format_reason(filename, 'future', tar.future_files)) 

523 if tar.past_files: 523 ↛ 524line 523 didn't jump to line 524, because the condition on line 523 was never true

524 raise Reject(format_reason(filename, 'past', tar.past_files)) 

525 

526 

527class SourceCheck(Check): 

528 """Check source package for syntax errors.""" 

529 

530 def check_filename(self, control, filename, regex: re.Pattern) -> None: 

531 # In case we have an .orig.tar.*, we have to strip the Debian revison 

532 # from the version number. So handle this special case first. 

533 is_orig = True 

534 match = re_file_orig.match(filename) 

535 if not match: 

536 is_orig = False 

537 match = regex.match(filename) 

538 

539 if not match: 539 ↛ 540line 539 didn't jump to line 540, because the condition on line 539 was never true

540 raise Reject('{0}: does not match regular expression for source filenames'.format(filename)) 

541 if match.group('package') != control['Source']: 541 ↛ 542line 541 didn't jump to line 542, because the condition on line 541 was never true

542 raise Reject('{0}: filename does not match Source field'.format(filename)) 

543 

544 version = control['Version'] 

545 if is_orig: 

546 upstream_match = re_field_version_upstream.match(version) 

547 if not upstream_match: 547 ↛ 548line 547 didn't jump to line 548, because the condition on line 547 was never true

548 raise Reject('{0}: Source package includes upstream tarball, but {1} has no Debian revision.'.format(filename, version)) 

549 version = upstream_match.group('upstream') 

550 version_match = re_field_version.match(version) 

551 version_without_epoch = version_match.group('without_epoch') 

552 if match.group('version') != version_without_epoch: 552 ↛ 553line 552 didn't jump to line 553, because the condition on line 552 was never true

553 raise Reject('{0}: filename does not match Version field'.format(filename)) 

554 

555 def check(self, upload): 

556 if upload.changes.source is None: 

557 if upload.changes.sourceful: 557 ↛ 558line 557 didn't jump to line 558, because the condition on line 557 was never true

558 raise Reject("{}: Architecture field includes source, but no source package is included in the upload".format(upload.changes.filename)) 

559 return True 

560 

561 if not upload.changes.sourceful: 561 ↛ 562line 561 didn't jump to line 562, because the condition on line 561 was never true

562 raise Reject("{}: Architecture field does not include source, but a source package is included in the upload".format(upload.changes.filename)) 

563 

564 changes = upload.changes.changes 

565 source = upload.changes.source 

566 control = source.dsc 

567 dsc_fn = source._dsc_file.filename 

568 

569 check_fields_for_valid_utf8(dsc_fn, control) 

570 

571 # check fields 

572 if not re_field_package.match(control['Source']): 572 ↛ 573line 572 didn't jump to line 573, because the condition on line 572 was never true

573 raise Reject('{0}: Invalid Source field'.format(dsc_fn)) 

574 if control['Source'] != changes['Source']: 574 ↛ 575line 574 didn't jump to line 575, because the condition on line 574 was never true

575 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn)) 

576 if control['Version'] != changes['Version']: 576 ↛ 577line 576 didn't jump to line 577, because the condition on line 576 was never true

577 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn)) 

578 

579 # check filenames 

580 self.check_filename(control, dsc_fn, re_file_dsc) 

581 for f in source.files.values(): 

582 self.check_filename(control, f.filename, re_file_source) 

583 

584 # check dependency field syntax 

585 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'): 

586 value = control.get(field) 

587 if value is not None: 

588 if value.strip() == '': 588 ↛ 589line 588 didn't jump to line 589, because the condition on line 588 was never true

589 raise Reject('{0}: empty {1} field'.format(dsc_fn, field)) 

590 try: 

591 apt_pkg.parse_src_depends(value) 

592 except Exception as e: 

593 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e)) 

594 

595 rejects = utils.check_dsc_files(dsc_fn, control, list(source.files.keys())) 

596 if len(rejects) > 0: 596 ↛ 597line 596 didn't jump to line 597, because the condition on line 596 was never true

597 raise Reject("\n".join(rejects)) 

598 

599 return True 

600 

601 

602class SingleDistributionCheck(Check): 

603 """Check that the .changes targets only a single distribution.""" 

604 

605 def check(self, upload): 

606 if len(upload.changes.distributions) != 1: 606 ↛ 607line 606 didn't jump to line 607, because the condition on line 606 was never true

607 raise Reject("Only uploads to a single distribution are allowed.") 

608 

609 

610class ACLCheck(Check): 

611 """Check the uploader is allowed to upload the packages in .changes""" 

612 

613 def _does_hijack(self, session, upload, suite): 

614 # Try to catch hijacks. 

615 # This doesn't work correctly. Uploads to experimental can still 

616 # "hijack" binaries from unstable. Also one can hijack packages 

617 # via buildds (but people who try this should not be DMs). 

618 for binary_name in upload.changes.binary_names: 

619 binaries = session.query(DBBinary).join(DBBinary.source) \ 

620 .filter(DBBinary.suites.contains(suite)) \ 

621 .filter(DBBinary.package == binary_name) 

622 for binary in binaries: 

623 if binary.source.source != upload.changes.changes['Source']: 623 ↛ 624line 623 didn't jump to line 624, because the condition on line 623 was never true

624 return True, binary.package, binary.source.source 

625 return False, None, None 

626 

627 def _check_acl(self, session, upload, acl): 

628 source_name = upload.changes.source_name 

629 

630 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints: 630 ↛ 631line 630 didn't jump to line 631, because the condition on line 630 was never true

631 return None, None 

632 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring: 632 ↛ 633line 632 didn't jump to line 633, because the condition on line 632 was never true

633 return None, None 

634 

635 if not acl.allow_new: 

636 if upload.new: 

637 return False, "NEW uploads are not allowed" 

638 for f in upload.changes.files.values(): 

639 if f.section == 'byhand' or f.section.startswith("raw-"): 639 ↛ 640line 639 didn't jump to line 640, because the condition on line 639 was never true

640 return False, "BYHAND uploads are not allowed" 

641 if not acl.allow_source and upload.changes.source is not None: 641 ↛ 642line 641 didn't jump to line 642, because the condition on line 641 was never true

642 return False, "sourceful uploads are not allowed" 

643 binaries = upload.changes.binaries 

644 if len(binaries) != 0: 

645 if not acl.allow_binary: 645 ↛ 646line 645 didn't jump to line 646, because the condition on line 645 was never true

646 return False, "binary uploads are not allowed" 

647 if upload.changes.source is None and not acl.allow_binary_only: 647 ↛ 648line 647 didn't jump to line 648, because the condition on line 647 was never true

648 return False, "binary-only uploads are not allowed" 

649 if not acl.allow_binary_all: 649 ↛ 650line 649 didn't jump to line 650, because the condition on line 649 was never true

650 uploaded_arches = set(upload.changes.architectures) 

651 uploaded_arches.discard('source') 

652 allowed_arches = set(a.arch_string for a in acl.architectures) 

653 forbidden_arches = uploaded_arches - allowed_arches 

654 if len(forbidden_arches) != 0: 

655 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches)) 

656 if not acl.allow_hijack: 

657 for suite in upload.final_suites: 

658 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite) 

659 if does_hijack: 659 ↛ 660line 659 didn't jump to line 660, because the condition on line 659 was never true

660 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from) 

661 

662 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first() 

663 if acl.allow_per_source: 

664 if acl_per_source is None: 

665 return False, "not allowed to upload source package '{0}'".format(source_name) 

666 if acl.deny_per_source and acl_per_source is not None: 666 ↛ 667line 666 didn't jump to line 667, because the condition on line 666 was never true

667 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name) 

668 

669 return True, None 

670 

671 def check(self, upload): 

672 session = upload.session 

673 fingerprint = upload.fingerprint 

674 keyring = fingerprint.keyring 

675 

676 if keyring is None: 676 ↛ 677line 676 didn't jump to line 677, because the condition on line 676 was never true

677 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint)) 

678 if not keyring.active: 678 ↛ 679line 678 didn't jump to line 679, because the condition on line 678 was never true

679 raise Reject('Keyring {0} is not active'.format(keyring.name)) 

680 

681 acl = fingerprint.acl or keyring.acl 

682 if acl is None: 682 ↛ 683line 682 didn't jump to line 683, because the condition on line 682 was never true

683 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint)) 

684 result, reason = self._check_acl(session, upload, acl) 

685 if not result: 

686 raise RejectACL(acl, reason) 

687 

688 for acl in session.query(ACL).filter_by(is_global=True): 

689 result, reason = self._check_acl(session, upload, acl) 

690 if result is False: 690 ↛ 691line 690 didn't jump to line 691, because the condition on line 690 was never true

691 raise RejectACL(acl, reason) 

692 

693 return True 

694 

695 def per_suite_check(self, upload, suite): 

696 acls = suite.acls 

697 if len(acls) != 0: 697 ↛ 698line 697 didn't jump to line 698, because the condition on line 697 was never true

698 accept = False 

699 for acl in acls: 

700 result, reason = self._check_acl(upload.session, upload, acl) 

701 if result is False: 

702 raise Reject(reason) 

703 accept = accept or result 

704 if not accept: 

705 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name)) 

706 return True 

707 

708 

709class TransitionCheck(Check): 

710 """check for a transition""" 

711 

712 def check(self, upload): 

713 if not upload.changes.sourceful: 

714 return True 

715 

716 transitions = self.get_transitions() 

717 if transitions is None: 717 ↛ 720line 717 didn't jump to line 720, because the condition on line 717 was never false

718 return True 

719 

720 session = upload.session 

721 

722 control = upload.changes.changes 

723 source = re_field_source.match(control['Source']).group('package') 

724 

725 for trans in transitions: 

726 t = transitions[trans] 

727 transition_source = t["source"] 

728 expected = t["new"] 

729 

730 # Will be None if nothing is in testing. 

731 current = get_source_in_suite(transition_source, "testing", session) 

732 if current is not None: 

733 compare = apt_pkg.version_compare(current.version, expected) 

734 

735 if current is None or compare < 0: 

736 # This is still valid, the current version in testing is older than 

737 # the new version we wait for, or there is none in testing yet 

738 

739 # Check if the source we look at is affected by this. 

740 if source in t['packages']: 

741 # The source is affected, lets reject it. 

742 

743 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans) 

744 

745 if current is not None: 

746 currentlymsg = "at version {0}".format(current.version) 

747 else: 

748 currentlymsg = "not present in testing" 

749 

750 rejectmsg += "Transition description: {0}\n\n".format(t["reason"]) 

751 

752 rejectmsg += "\n".join(textwrap.wrap("""Your package 

753is part of a testing transition designed to get {0} migrated (it is 

754currently {1}, we need version {2}). This transition is managed by the 

755Release Team, and {3} is the Release-Team member responsible for it. 

756Please mail debian-release@lists.debian.org or contact {3} directly if you 

757need further assistance. You might want to upload to experimental until this 

758transition is done.""".format(transition_source, currentlymsg, expected, t["rm"]))) 

759 

760 raise Reject(rejectmsg) 

761 

762 return True 

763 

764 def get_transitions(self): 

765 cnf = Config() 

766 path = cnf.get('Dinstall::ReleaseTransitions', '') 

767 if path == '' or not os.path.exists(path): 767 ↛ 770line 767 didn't jump to line 770, because the condition on line 767 was never false

768 return None 

769 

770 with open(path, 'r') as fd: 

771 contents = fd.read() 

772 try: 

773 transitions = yaml.safe_load(contents) 

774 return transitions 

775 except yaml.YAMLError as msg: 

776 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg)) 

777 

778 return None 

779 

780 

781class NoSourceOnlyCheck(Check): 

782 def is_source_only_upload(self, upload) -> bool: 

783 changes = upload.changes 

784 if changes.source is not None and len(changes.binaries) == 0: 

785 return True 

786 return False 

787 

788 """Check for source-only upload 

789 

790 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is 

791 set. Otherwise they are rejected. 

792 

793 Source-only uploads are only accepted for source packages having a 

794 Package-List field that also lists architectures per package. This 

795 check can be disabled via 

796 Dinstall::AllowSourceOnlyUploadsWithoutPackageList. 

797 

798 Source-only uploads to NEW are only allowed if 

799 Dinstall::AllowSourceOnlyNew is set. 

800 

801 Uploads not including architecture-independent packages are only 

802 allowed if Dinstall::AllowNoArchIndepUploads is set. 

803 

804 """ 

805 

806 def check(self, upload): 

807 if not self.is_source_only_upload(upload): 

808 return True 

809 

810 allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads') 

811 allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList') 

812 allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew') 

813 allow_source_only_new_keys = Config().value_list('Dinstall::AllowSourceOnlyNewKeys') 

814 allow_source_only_new_sources = Config().value_list('Dinstall::AllowSourceOnlyNewSources') 

815 allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads', True) 

816 changes = upload.changes 

817 

818 if not allow_source_only_uploads: 818 ↛ 819line 818 didn't jump to line 819, because the condition on line 818 was never true

819 raise Reject('Source-only uploads are not allowed.') 

820 if not allow_source_only_uploads_without_package_list \ 820 ↛ 822line 820 didn't jump to line 822, because the condition on line 820 was never true

821 and changes.source.package_list.fallback: 

822 raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.') 

823 if not allow_source_only_new and upload.new \ 823 ↛ 826line 823 didn't jump to line 826, because the condition on line 823 was never true

824 and changes.primary_fingerprint not in allow_source_only_new_keys \ 

825 and changes.source_name not in allow_source_only_new_sources: 

826 raise Reject('Source-only uploads to NEW are not allowed.') 

827 

828 if 'all' not in changes.architectures and changes.source.package_list.has_arch_indep_packages(): 

829 if not allow_no_arch_indep_uploads: 829 ↛ 830line 829 didn't jump to line 830, because the condition on line 829 was never true

830 raise Reject('Uploads must include architecture-independent packages.') 

831 

832 return True 

833 

834 

835class NewOverrideCheck(Check): 

836 """Override NEW requirement 

837 """ 

838 def check(self, upload): 

839 if not upload.new: 

840 return True 

841 

842 new_override_keys = Config().value_list('Dinstall::NewOverrideKeys') 

843 changes = upload.changes 

844 

845 if changes.primary_fingerprint in new_override_keys: 845 ↛ 846line 845 didn't jump to line 846, because the condition on line 845 was never true

846 upload.new = False 

847 

848 return True 

849 

850 

851class ArchAllBinNMUCheck(Check): 

852 """Check for arch:all binNMUs""" 

853 

854 def check(self, upload): 

855 changes = upload.changes 

856 

857 if 'all' in changes.architectures and changes.changes.get('Binary-Only') == 'yes': 857 ↛ 858line 857 didn't jump to line 858, because the condition on line 857 was never true

858 raise Reject('arch:all binNMUs are not allowed.') 

859 

860 return True 

861 

862 

863class LintianCheck(Check): 

864 """Check package using lintian""" 

865 

866 def check(self, upload): 

867 changes = upload.changes 

868 

869 # Only check sourceful uploads. 

870 if changes.source is None: 

871 return True 

872 # Only check uploads to unstable or experimental. 

873 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions: 873 ↛ 874line 873 didn't jump to line 874, because the condition on line 873 was never true

874 return True 

875 

876 cnf = Config() 

877 if 'Dinstall::LintianTags' not in cnf: 

878 return True 

879 tagfile = cnf['Dinstall::LintianTags'] 

880 

881 with open(tagfile, 'r') as sourcefile: 

882 sourcecontent = sourcefile.read() 

883 try: 

884 lintiantags = yaml.safe_load(sourcecontent)['lintian'] 

885 except yaml.YAMLError as msg: 

886 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg)) 

887 

888 with tempfile.NamedTemporaryFile(mode="w+t") as temptagfile: 

889 os.fchmod(temptagfile.fileno(), 0o644) 

890 for tags in lintiantags.values(): 

891 for tag in tags: 

892 print(tag, file=temptagfile) 

893 temptagfile.flush() 

894 

895 changespath = os.path.join(upload.directory, changes.filename) 

896 

897 cmd = [] 

898 user = cnf.get('Dinstall::UnprivUser') or None 

899 if user is not None: 899 ↛ 900line 899 didn't jump to line 900, because the condition on line 899 was never true

900 cmd.extend(['sudo', '-H', '-u', user]) 

901 cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temptagfile.name, changespath]) 

902 process = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="utf-8") 

903 output = process.stdout 

904 result = process.returncode 

905 

906 if result == 2: 906 ↛ 907line 906 didn't jump to line 907, because the condition on line 906 was never true

907 utils.warn("lintian failed for %s [return code: %s]." % 

908 (changespath, result)) 

909 utils.warn(utils.prefix_multi_line_string(output, 

910 " [possible output:] ")) 

911 

912 parsed_tags = lintian.parse_lintian_output(output) 

913 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags)) 

914 if len(rejects) != 0: 914 ↛ 915line 914 didn't jump to line 915, because the condition on line 914 was never true

915 raise Reject('\n'.join(rejects)) 

916 

917 return True 

918 

919 

920class SourceFormatCheck(Check): 

921 """Check source format is allowed in the target suite""" 

922 

923 def per_suite_check(self, upload, suite): 

924 source = upload.changes.source 

925 session = upload.session 

926 if source is None: 

927 return True 

928 

929 source_format = source.dsc['Format'] 

930 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite)) 

931 if query.first() is None: 

932 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name)) 

933 

934 

935class SuiteCheck(Check): 

936 def per_suite_check(self, upload, suite): 

937 if not suite.accept_source_uploads and upload.changes.source is not None: 937 ↛ 938line 937 didn't jump to line 938, because the condition on line 937 was never true

938 raise Reject('The suite "{0}" does not accept source uploads.'.format(suite.suite_name)) 

939 if not suite.accept_binary_uploads and len(upload.changes.binaries) != 0: 939 ↛ 940line 939 didn't jump to line 940, because the condition on line 939 was never true

940 raise Reject('The suite "{0}" does not accept binary uploads.'.format(suite.suite_name)) 

941 return True 

942 

943 

944class SuiteArchitectureCheck(Check): 

945 def per_suite_check(self, upload, suite): 

946 session = upload.session 

947 for arch in upload.changes.architectures: 

948 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite)) 

949 if query.first() is None: 

950 raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name)) 

951 

952 return True 

953 

954 

955class VersionCheck(Check): 

956 """Check version constraints""" 

957 

958 def _highest_source_version(self, session, source_name, suite): 

959 db_source = session.query(DBSource).filter_by(source=source_name) \ 

960 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first() 

961 if db_source is None: 

962 return None 

963 else: 

964 return db_source.version 

965 

966 def _highest_binary_version(self, session, binary_name, suite, architecture): 

967 db_binary = session.query(DBBinary).filter_by(package=binary_name) \ 

968 .filter(DBBinary.suites.contains(suite)) \ 

969 .join(DBBinary.architecture) \ 

970 .filter(Architecture.arch_string.in_(['all', architecture])) \ 

971 .order_by(DBBinary.version.desc()).first() 

972 if db_binary is None: 

973 return None 

974 else: 

975 return db_binary.version 

976 

977 def _version_checks(self, upload, suite, other_suite, op, op_name): 

978 session = upload.session 

979 

980 if upload.changes.source is not None: 

981 source_name = upload.changes.source.dsc['Source'] 

982 source_version = upload.changes.source.dsc['Version'] 

983 v = self._highest_source_version(session, source_name, other_suite) 

984 if v is not None and not op(version_compare(source_version, v)): 984 ↛ 985line 984 didn't jump to line 985, because the condition on line 984 was never true

985 raise Reject("Version check failed:\n" 

986 "Your upload included the source package {0}, version {1},\n" 

987 "however {3} already has version {2}.\n" 

988 "Uploads to {5} must have a {4} version than present in {3}." 

989 .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name)) 

990 

991 for binary in upload.changes.binaries: 

992 binary_name = binary.control['Package'] 

993 binary_version = binary.control['Version'] 

994 architecture = binary.control['Architecture'] 

995 v = self._highest_binary_version(session, binary_name, other_suite, architecture) 

996 if v is not None and not op(version_compare(binary_version, v)): 996 ↛ 997line 996 didn't jump to line 997, because the condition on line 996 was never true

997 raise Reject("Version check failed:\n" 

998 "Your upload included the binary package {0}, version {1}, for {2},\n" 

999 "however {4} already has version {3}.\n" 

1000 "Uploads to {6} must have a {5} version than present in {4}." 

1001 .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name)) 

1002 

1003 def per_suite_check(self, upload, suite): 

1004 session = upload.session 

1005 

1006 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \ 

1007 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances'])) 

1008 must_be_newer_than = [vc.reference for vc in vc_newer] 

1009 # Must be newer than old versions in `suite` 

1010 must_be_newer_than.append(suite) 

1011 

1012 for s in must_be_newer_than: 

1013 self._version_checks(upload, suite, s, lambda result: result > 0, 'higher') 

1014 

1015 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan') 

1016 must_be_older_than = [vc.reference for vc in vc_older] 

1017 

1018 for s in must_be_older_than: 1018 ↛ 1019line 1018 didn't jump to line 1019, because the loop on line 1018 never started

1019 self._version_checks(upload, suite, s, lambda result: result < 0, 'lower') 

1020 

1021 return True 

1022 

1023 @property 

1024 def forcable(self) -> bool: 

1025 return True