Coverage for daklib/archive.py: 75%

829 statements  

« prev     ^ index     » next       coverage.py v7.6.0, created at 2026-01-04 16:18 +0000

1# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org> 

2# 

3# This program is free software; you can redistribute it and/or modify 

4# it under the terms of the GNU General Public License as published by 

5# the Free Software Foundation; either version 2 of the License, or 

6# (at your option) any later version. 

7# 

8# This program is distributed in the hope that it will be useful, 

9# but WITHOUT ANY WARRANTY; without even the implied warranty of 

10# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

11# GNU General Public License for more details. 

12# 

13# You should have received a copy of the GNU General Public License along 

14# with this program; if not, write to the Free Software Foundation, Inc., 

15# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 

16 

17"""module to manipulate the archive 

18 

19This module provides classes to manipulate the archive. 

20""" 

21 

22import os 

23import shutil 

24import subprocess 

25import traceback 

26from collections.abc import Callable, Collection, Iterable 

27from typing import TYPE_CHECKING, Optional, Union 

28 

29import sqlalchemy.exc 

30from sqlalchemy import sql 

31from sqlalchemy.orm import object_session 

32from sqlalchemy.orm.exc import NoResultFound 

33 

34import daklib.checks as checks 

35import daklib.upload 

36import daklib.utils 

37from daklib.config import Config 

38from daklib.dbconn import ( 

39 Archive, 

40 ArchiveFile, 

41 Component, 

42 DBBinary, 

43 DBChange, 

44 DBConn, 

45 DBSource, 

46 DSCFile, 

47 Fingerprint, 

48 Maintainer, 

49 Override, 

50 OverrideType, 

51 PolicyQueue, 

52 PolicyQueueByhandFile, 

53 PolicyQueueUpload, 

54 PoolFile, 

55 Suite, 

56 VersionCheck, 

57 get_architecture, 

58 get_mapped_component, 

59 get_or_set_maintainer, 

60 import_metadata_into_db, 

61) 

62from daklib.externalsignature import check_upload_for_external_signature_request 

63from daklib.fstransactions import FilesystemTransaction 

64from daklib.regexes import re_bin_only_nmu, re_changelog_versions 

65from daklib.tag2upload import get_tag2upload_info_for_upload, parse_git_tag_info 

66 

67if TYPE_CHECKING: 

68 import daklib.packagelist 

69 

70 

71class ArchiveException(Exception): 

72 pass 

73 

74 

75class HashMismatchException(ArchiveException): 

76 pass 

77 

78 

79class ArchiveTransaction: 

80 """manipulate the archive in a transaction""" 

81 

82 def __init__(self): 

83 self.fs = FilesystemTransaction() 

84 self.session = DBConn().session() 

85 

86 def get_file( 

87 self, 

88 hashed_file: daklib.upload.HashedFile, 

89 source_name: str, 

90 check_hashes: bool = True, 

91 ) -> PoolFile: 

92 """Look for file `hashed_file` in database 

93 

94 :param hashed_file: file to look for in the database 

95 :param source_name: source package name 

96 :param check_hashes: check size and hashes match 

97 :return: database entry for the file 

98 :raises KeyError: file was not found in the database 

99 :raises HashMismatchException: hash mismatch 

100 """ 

101 poolname = os.path.join(daklib.utils.poolify(source_name), hashed_file.filename) 

102 try: 

103 poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one() 

104 if check_hashes and ( 104 ↛ 110line 104 didn't jump to line 110 because the condition on line 104 was never true

105 poolfile.filesize != hashed_file.size 

106 or poolfile.md5sum != hashed_file.md5sum 

107 or poolfile.sha1sum != hashed_file.sha1sum 

108 or poolfile.sha256sum != hashed_file.sha256sum 

109 ): 

110 raise HashMismatchException( 

111 "{0}: Does not match file already existing in the pool.".format( 

112 hashed_file.filename 

113 ) 

114 ) 

115 return poolfile 

116 except NoResultFound: 

117 raise KeyError("{0} not found in database.".format(poolname)) 

118 

119 def _install_file( 

120 self, directory, hashed_file, archive, component, source_name 

121 ) -> PoolFile: 

122 """Install a file 

123 

124 Will not give an error when the file is already present. 

125 

126 :return: database object for the new file 

127 """ 

128 session = self.session 

129 

130 poolname = os.path.join(daklib.utils.poolify(source_name), hashed_file.filename) 

131 try: 

132 poolfile = self.get_file(hashed_file, source_name) 

133 except KeyError: 

134 poolfile = PoolFile(filename=poolname, filesize=hashed_file.size) 

135 poolfile.md5sum = hashed_file.md5sum 

136 poolfile.sha1sum = hashed_file.sha1sum 

137 poolfile.sha256sum = hashed_file.sha256sum 

138 session.add(poolfile) 

139 session.flush() 

140 

141 try: 

142 session.query(ArchiveFile).filter_by( 

143 archive=archive, component=component, file=poolfile 

144 ).one() 

145 except NoResultFound: 

146 archive_file = ArchiveFile(archive, component, poolfile) 

147 session.add(archive_file) 

148 session.flush() 

149 

150 path = os.path.join( 

151 archive.path, "pool", component.component_name, poolname 

152 ) 

153 hashed_file_path = os.path.join(directory, hashed_file.input_filename) 

154 self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode) 

155 

156 return poolfile 

157 

158 def install_binary( 

159 self, 

160 directory: str, 

161 binary: daklib.upload.Binary, 

162 suite: Suite, 

163 component: Component, 

164 *, 

165 allow_tainted: bool = False, 

166 fingerprint: Optional[Fingerprint] = None, 

167 authorized_by_fingerprint: Optional[Fingerprint] = None, 

168 source_suites=None, 

169 extra_source_archives: Optional[Iterable[Archive]] = None, 

170 ) -> DBBinary: 

171 """Install a binary package 

172 

173 :param directory: directory the binary package is located in 

174 :param binary: binary package to install 

175 :param suite: target suite 

176 :param component: target component 

177 :param allow_tainted: allow to copy additional files from tainted archives 

178 :param fingerprint: optional fingerprint 

179 :param source_suites: suites to copy the source from if they are not 

180 in `suite` or :const:`True` to allow copying from any 

181 suite. 

182 Can be a SQLAlchemy subquery for :class:`Suite` or :const:`True`. 

183 :param extra_source_archives: extra archives to copy Built-Using sources from 

184 :return: database object for the new package 

185 """ 

186 session = self.session 

187 control = binary.control 

188 maintainer = get_or_set_maintainer(control["Maintainer"], session) 

189 architecture = get_architecture(control["Architecture"], session) 

190 

191 (source_name, source_version) = binary.source 

192 source_query = session.query(DBSource).filter_by( 

193 source=source_name, version=source_version 

194 ) 

195 source = source_query.filter(DBSource.suites.contains(suite)).first() 

196 if source is None: 

197 if source_suites is not True: 

198 source_query = source_query.join(DBSource.suites).filter( 

199 Suite.suite_id == source_suites.c.id 

200 ) 

201 source = source_query.first() 

202 if source is None: 202 ↛ 203line 202 didn't jump to line 203 because the condition on line 202 was never true

203 raise ArchiveException( 

204 "{0}: trying to install to {1}, but could not find source ({2} {3})".format( 

205 binary.hashed_file.filename, 

206 suite.suite_name, 

207 source_name, 

208 source_version, 

209 ) 

210 ) 

211 self.copy_source(source, suite, source.poolfile.component) 

212 

213 db_file = self._install_file( 

214 directory, binary.hashed_file, suite.archive, component, source_name 

215 ) 

216 

217 unique = dict( 

218 package=control["Package"], 

219 version=control["Version"], 

220 architecture=architecture, 

221 ) 

222 rest = dict( 

223 source=source, 

224 maintainer=maintainer, 

225 poolfile=db_file, 

226 binarytype=binary.type, 

227 ) 

228 # Other attributes that are ignored for purposes of equality with 

229 # an existing source 

230 rest2 = dict( 

231 fingerprint=fingerprint, 

232 authorized_by_fingerprint=authorized_by_fingerprint, 

233 ) 

234 

235 try: 

236 db_binary = session.query(DBBinary).filter_by(**unique).one() 

237 for key, value in rest.items(): 

238 if getattr(db_binary, key) != value: 238 ↛ 239line 238 didn't jump to line 239 because the condition on line 238 was never true

239 raise ArchiveException( 

240 "{0}: Does not match binary in database.".format( 

241 binary.hashed_file.filename 

242 ) 

243 ) 

244 except NoResultFound: 

245 db_binary = DBBinary(**unique) 

246 for key, value in rest.items(): 

247 setattr(db_binary, key, value) 

248 for key, value in rest2.items(): 

249 setattr(db_binary, key, value) 

250 session.add(db_binary) 

251 session.flush() 

252 import_metadata_into_db(db_binary, session) 

253 

254 self._add_built_using( 

255 db_binary, 

256 binary.hashed_file.filename, 

257 control, 

258 suite, 

259 extra_archives=extra_source_archives, 

260 ) 

261 

262 if suite not in db_binary.suites: 

263 db_binary.suites.append(suite) 

264 

265 session.flush() 

266 

267 return db_binary 

268 

269 def _ensure_extra_source_exists( 

270 self, 

271 filename: str, 

272 source: DBSource, 

273 archive: Archive, 

274 extra_archives: Optional[Iterable[Archive]] = None, 

275 ): 

276 """ensure source exists in the given archive 

277 

278 This is intended to be used to check that Built-Using sources exist. 

279 

280 :param filename: filename to use in error messages 

281 :param source: source to look for 

282 :param archive: archive to look in 

283 :param extra_archives: list of archives to copy the source package from 

284 if it is not yet present in `archive` 

285 """ 

286 session = self.session 

287 db_file = ( 

288 session.query(ArchiveFile) 

289 .filter_by(file=source.poolfile, archive=archive) 

290 .first() 

291 ) 

292 if db_file is not None: 292 ↛ 296line 292 didn't jump to line 296 because the condition on line 292 was always true

293 return True 

294 

295 # Try to copy file from one extra archive 

296 if extra_archives is None: 

297 extra_archives = [] 

298 db_file = ( 

299 session.query(ArchiveFile) 

300 .filter_by(file=source.poolfile) 

301 .filter(ArchiveFile.archive_id.in_([a.archive_id for a in extra_archives])) 

302 .first() 

303 ) 

304 if db_file is None: 

305 raise ArchiveException( 

306 "{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.".format( 

307 filename, source.source, source.version, archive.archive_name 

308 ) 

309 ) 

310 

311 source_archive = db_file.archive 

312 for dsc_file in source.srcfiles: 

313 af = ( 

314 session.query(ArchiveFile) 

315 .filter_by( 

316 file=dsc_file.poolfile, 

317 archive=source_archive, 

318 component=db_file.component, 

319 ) 

320 .one() 

321 ) 

322 # We were given an explicit list of archives so it is okay to copy from tainted archives. 

323 self._copy_file(af.file, archive, db_file.component, allow_tainted=True) 

324 

325 def _add_built_using( 

326 self, db_binary, filename, control, suite, extra_archives=None 

327 ) -> None: 

328 """Add Built-Using sources to ``db_binary.extra_sources``""" 

329 session = self.session 

330 

331 for bu_source_name, bu_source_version in daklib.utils.parse_built_using( 

332 control 

333 ): 

334 bu_source = ( 

335 session.query(DBSource) 

336 .filter_by(source=bu_source_name, version=bu_source_version) 

337 .first() 

338 ) 

339 if bu_source is None: 339 ↛ 340line 339 didn't jump to line 340 because the condition on line 339 was never true

340 raise ArchiveException( 

341 "{0}: Built-Using refers to non-existing source package {1} (= {2})".format( 

342 filename, bu_source_name, bu_source_version 

343 ) 

344 ) 

345 

346 self._ensure_extra_source_exists( 

347 filename, bu_source, suite.archive, extra_archives=extra_archives 

348 ) 

349 

350 db_binary.extra_sources.append(bu_source) 

351 

352 def _add_dsc_files( 

353 self, 

354 directory: str, 

355 archive: Archive, 

356 component: Component, 

357 source: DBSource, 

358 files: Iterable[daklib.upload.HashedFile], 

359 *, 

360 allow_tainted: bool, 

361 extra_file: bool = False, 

362 ) -> None: 

363 for hashed_file in files: 

364 hashed_file_path = os.path.join(directory, hashed_file.input_filename) 

365 if os.path.exists(hashed_file_path): 365 ↛ 371line 365 didn't jump to line 371 because the condition on line 365 was always true

366 db_file = self._install_file( 

367 directory, hashed_file, archive, component, source.source 

368 ) 

369 self.session.add(db_file) 

370 else: 

371 db_file = self.get_file(hashed_file, source.source) 

372 self._copy_file( 

373 db_file, archive, component, allow_tainted=allow_tainted 

374 ) 

375 

376 db_dsc_file = DSCFile() 

377 db_dsc_file.source = source 

378 db_dsc_file.poolfile = db_file 

379 db_dsc_file.extra_file = extra_file 

380 self.session.add(db_dsc_file) 

381 

382 def install_source_to_archive( 

383 self, 

384 directory: str, 

385 source: daklib.upload.Source, 

386 archive: Archive, 

387 component: Component, 

388 changed_by: Maintainer, 

389 *, 

390 allow_tainted=False, 

391 fingerprint: Optional[Fingerprint] = None, 

392 authorized_by_fingerprint: Optional[Fingerprint] = None, 

393 extra_source_files: Iterable[daklib.upload.HashedFile] = [], 

394 ) -> DBSource: 

395 """Install source package to archive""" 

396 session = self.session 

397 control = source.dsc 

398 maintainer = get_or_set_maintainer(control["Maintainer"], session) 

399 source_name = control["Source"] 

400 

401 ### Add source package to database 

402 

403 # We need to install the .dsc first as the DBSource object refers to it. 

404 db_file_dsc = self._install_file( 

405 directory, source._dsc_file, archive, component, source_name 

406 ) 

407 

408 unique = dict( 

409 source=source_name, 

410 version=control["Version"], 

411 ) 

412 rest = dict( 

413 maintainer=maintainer, 

414 poolfile=db_file_dsc, 

415 dm_upload_allowed=(control.get("DM-Upload-Allowed", "no") == "yes"), 

416 ) 

417 # Other attributes that are ignored for purposes of equality with 

418 # an existing source 

419 rest2 = dict( 

420 changedby=changed_by, 

421 fingerprint=fingerprint, 

422 authorized_by_fingerprint=authorized_by_fingerprint, 

423 ) 

424 

425 created = False 

426 try: 

427 db_source = session.query(DBSource).filter_by(**unique).one() 

428 for key, value in rest.items(): 

429 if getattr(db_source, key) != value: 429 ↛ 430line 429 didn't jump to line 430 because the condition on line 429 was never true

430 raise ArchiveException( 

431 "{0}: Does not match source in database.".format( 

432 source._dsc_file.filename 

433 ) 

434 ) 

435 except NoResultFound: 

436 created = True 

437 db_source = DBSource(**unique) 

438 for key, value in rest.items(): 

439 setattr(db_source, key, value) 

440 for key, value in rest2.items(): 

441 setattr(db_source, key, value) 

442 session.add(db_source) 

443 session.flush() 

444 

445 # Add .dsc file. Other files will be added later. 

446 db_dsc_file = DSCFile() 

447 db_dsc_file.source = db_source 

448 db_dsc_file.poolfile = db_file_dsc 

449 session.add(db_dsc_file) 

450 session.flush() 

451 

452 if not created: 

453 for f in db_source.srcfiles: 

454 self._copy_file( 

455 f.poolfile, archive, component, allow_tainted=allow_tainted 

456 ) 

457 return db_source 

458 

459 ### Now add remaining files and copy them to the archive. 

460 self._add_dsc_files( 

461 directory, 

462 archive, 

463 component, 

464 db_source, 

465 source.files.values(), 

466 allow_tainted=allow_tainted, 

467 ) 

468 self._add_dsc_files( 

469 directory, 

470 archive, 

471 component, 

472 db_source, 

473 extra_source_files, 

474 allow_tainted=allow_tainted, 

475 extra_file=True, 

476 ) 

477 

478 session.flush() 

479 

480 # Importing is safe as we only arrive here when we did not find the source already installed earlier. 

481 import_metadata_into_db(db_source, session) 

482 

483 # Uploaders are the maintainer and co-maintainers from the Uploaders field 

484 db_source.uploaders.append(maintainer) 

485 if "Uploaders" in control: 

486 from daklib.textutils import split_uploaders 

487 

488 for u in split_uploaders(control["Uploaders"]): 

489 db_source.uploaders.append(get_or_set_maintainer(u, session)) 

490 session.flush() 

491 

492 return db_source 

493 

494 def install_source( 

495 self, 

496 directory: str, 

497 source: daklib.upload.Source, 

498 suite: Suite, 

499 component: Component, 

500 changed_by: Maintainer, 

501 *, 

502 allow_tainted: bool = False, 

503 fingerprint: Optional[Fingerprint] = None, 

504 authorized_by_fingerprint: Optional[Fingerprint] = None, 

505 extra_source_files: Iterable[daklib.upload.HashedFile] = [], 

506 ) -> DBSource: 

507 """Install a source package 

508 

509 :param directory: directory the source package is located in 

510 :param source: source package to install 

511 :param suite: target suite 

512 :param component: target component 

513 :param changed_by: person who prepared this version of the package 

514 :param allow_tainted: allow to copy additional files from tainted archives 

515 :param fingerprint: optional fingerprint 

516 :return: database object for the new source 

517 """ 

518 db_source = self.install_source_to_archive( 

519 directory, 

520 source, 

521 suite.archive, 

522 component, 

523 changed_by, 

524 allow_tainted=allow_tainted, 

525 fingerprint=fingerprint, 

526 authorized_by_fingerprint=authorized_by_fingerprint, 

527 extra_source_files=extra_source_files, 

528 ) 

529 

530 if suite in db_source.suites: 

531 return db_source 

532 db_source.suites.append(suite) 

533 self.session.flush() 

534 

535 return db_source 

536 

537 def _copy_file( 

538 self, 

539 db_file: PoolFile, 

540 archive: Archive, 

541 component: Component, 

542 allow_tainted: bool = False, 

543 ) -> None: 

544 """Copy a file to the given archive and component 

545 

546 :param db_file: file to copy 

547 :param archive: target archive 

548 :param component: target component 

549 :param allow_tainted: allow to copy from tainted archives (such as NEW) 

550 """ 

551 session = self.session 

552 

553 if ( 

554 session.query(ArchiveFile) 

555 .filter_by(archive=archive, component=component, file=db_file) 

556 .first() 

557 is None 

558 ): 

559 query = session.query(ArchiveFile).filter_by(file=db_file) 

560 if not allow_tainted: 

561 query = query.join(Archive).filter( 

562 Archive.tainted == False # noqa:E712 

563 ) 

564 

565 source_af = query.first() 

566 if source_af is None: 566 ↛ 567line 566 didn't jump to line 567 because the condition on line 566 was never true

567 raise ArchiveException( 

568 "cp: Could not find {0} in any archive.".format(db_file.filename) 

569 ) 

570 target_af = ArchiveFile(archive, component, db_file) 

571 session.add(target_af) 

572 session.flush() 

573 self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode) 

574 

575 def copy_binary( 

576 self, 

577 db_binary: DBBinary, 

578 suite: Suite, 

579 component: Component, 

580 allow_tainted: bool = False, 

581 extra_archives: Optional[Iterable[Archive]] = None, 

582 ) -> None: 

583 """Copy a binary package to the given suite and component 

584 

585 :param db_binary: binary to copy 

586 :param suite: target suite 

587 :param component: target component 

588 :param allow_tainted: allow to copy from tainted archives (such as NEW) 

589 :param extra_archives: extra archives to copy Built-Using sources from 

590 """ 

591 session = self.session 

592 archive = suite.archive 

593 if archive.tainted: 

594 allow_tainted = True 

595 

596 filename = db_binary.poolfile.filename 

597 

598 # make sure source is present in target archive 

599 db_source = db_binary.source 

600 if ( 600 ↛ 606line 600 didn't jump to line 606

601 session.query(ArchiveFile) 

602 .filter_by(archive=archive, file=db_source.poolfile) 

603 .first() 

604 is None 

605 ): 

606 raise ArchiveException( 

607 "{0}: cannot copy to {1}: source is not present in target archive".format( 

608 filename, suite.suite_name 

609 ) 

610 ) 

611 

612 # make sure built-using packages are present in target archive 

613 for db_source in db_binary.extra_sources: 

614 self._ensure_extra_source_exists( 

615 filename, db_source, archive, extra_archives=extra_archives 

616 ) 

617 

618 # copy binary 

619 db_file = db_binary.poolfile 

620 self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted) 

621 if suite not in db_binary.suites: 

622 db_binary.suites.append(suite) 

623 self.session.flush() 

624 

625 def copy_source( 

626 self, 

627 db_source: DBSource, 

628 suite: Suite, 

629 component: Component, 

630 allow_tainted: bool = False, 

631 ) -> None: 

632 """Copy a source package to the given suite and component 

633 

634 :param db_source: source to copy 

635 :param suite: target suite 

636 :param component: target component 

637 :param allow_tainted: allow to copy from tainted archives (such as NEW) 

638 """ 

639 archive = suite.archive 

640 if archive.tainted: 

641 allow_tainted = True 

642 for db_dsc_file in db_source.srcfiles: 

643 self._copy_file( 

644 db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted 

645 ) 

646 if suite not in db_source.suites: 

647 db_source.suites.append(suite) 

648 self.session.flush() 

649 

650 def remove_file( 

651 self, db_file: PoolFile, archive: Archive, component: Component 

652 ) -> None: 

653 """Remove a file from a given archive and component 

654 

655 :param db_file: file to remove 

656 :param archive: archive to remove the file from 

657 :param component: component to remove the file from 

658 """ 

659 af: ArchiveFile = ( 

660 self.session.query(ArchiveFile) 

661 .filter_by(file=db_file, archive=archive, component=component) 

662 .scalar() 

663 ) 

664 self.fs.unlink(af.path) 

665 self.session.delete(af) 

666 

667 def remove_binary(self, binary: DBBinary, suite: Suite) -> None: 

668 """Remove a binary from a given suite and component 

669 

670 :param binary: binary to remove 

671 :param suite: suite to remove the package from 

672 """ 

673 binary.suites.remove(suite) 

674 self.session.flush() 

675 

676 def remove_source(self, source: DBSource, suite: Suite) -> None: 

677 """Remove a source from a given suite and component 

678 

679 :param source: source to remove 

680 :param suite: suite to remove the package from 

681 

682 :raises ArchiveException: source package is still referenced by other 

683 binaries in the suite 

684 """ 

685 session = self.session 

686 

687 query = ( 

688 session.query(DBBinary) 

689 .filter_by(source=source) 

690 .filter(DBBinary.suites.contains(suite)) 

691 ) 

692 if query.first() is not None: 692 ↛ 693line 692 didn't jump to line 693 because the condition on line 692 was never true

693 raise ArchiveException( 

694 "src:{0} is still used by binaries in suite {1}".format( 

695 source.source, suite.suite_name 

696 ) 

697 ) 

698 

699 source.suites.remove(suite) 

700 session.flush() 

701 

702 def commit(self) -> None: 

703 """commit changes""" 

704 try: 

705 self.session.commit() 

706 self.fs.commit() 

707 finally: 

708 self.session.rollback() 

709 self.fs.rollback() 

710 

711 def rollback(self) -> None: 

712 """rollback changes""" 

713 self.session.rollback() 

714 self.fs.rollback() 

715 

716 def flush(self) -> None: 

717 """flush underlying database session""" 

718 self.session.flush() 

719 

720 def __enter__(self): 

721 return self 

722 

723 def __exit__(self, type, value, traceback): 

724 if type is None: 

725 self.commit() 

726 else: 

727 self.rollback() 

728 return None 

729 

730 

731def source_component_from_package_list( 

732 package_list: "daklib.packagelist.PackageList", suite: Suite 

733) -> Optional[Component]: 

734 """Get component for a source package 

735 

736 This function will look at the Package-List field to determine the 

737 component the source package belongs to. This is the first component 

738 the source package provides binaries for (first with respect to the 

739 ordering of components). 

740 

741 It the source package has no Package-List field, None is returned. 

742 

743 :param package_list: package list of the source to get the override for 

744 :param suite: suite to consider for binaries produced 

745 :return: component for the given source or :const:`None` 

746 """ 

747 if package_list.fallback: 747 ↛ 748line 747 didn't jump to line 748 because the condition on line 747 was never true

748 return None 

749 session = object_session(suite) 

750 assert session is not None 

751 packages = package_list.packages_for_suite(suite) 

752 components = set(p.component for p in packages) 

753 query = ( 

754 session.query(Component) 

755 .order_by(Component.ordering) 

756 .filter(Component.component_name.in_(components)) 

757 ) 

758 return query.first() 

759 

760 

761class ArchiveUpload: 

762 """handle an upload 

763 

764 This class can be used in a with-statement:: 

765 

766 with ArchiveUpload(...) as upload: 

767 ... 

768 

769 Doing so will automatically run any required cleanup and also rollback the 

770 transaction if it was not committed. 

771 """ 

772 

773 def __init__( 

774 self, directory: str, changes: daklib.upload.Changes, keyrings: Collection[str] 

775 ): 

776 self.transaction: ArchiveTransaction = ArchiveTransaction() 

777 """transaction used to handle the upload""" 

778 

779 self.session = self.transaction.session 

780 """database session""" 

781 

782 self.original_directory: str = directory 

783 self.original_changes = changes 

784 

785 self._changes: Optional[daklib.upload.Changes] = None 

786 """upload to process""" 

787 

788 self._extra_source_files: list[daklib.upload.HashedFile] = [] 

789 """extra source files""" 

790 

791 self._directory: str | None = None 

792 """directory with temporary copy of files. set by :meth:`prepare`""" 

793 

794 self.keyrings = keyrings 

795 

796 self.fingerprint: Fingerprint = ( 

797 self.session.query(Fingerprint) 

798 .filter_by(fingerprint=changes.primary_fingerprint) 

799 .one() 

800 ) 

801 """fingerprint of the key used to sign the upload""" 

802 

803 self._authorized_by_fingerprint: Optional[Fingerprint] = None 

804 """fingerprint of the key that authorized the upload""" 

805 

806 self.reject_reasons: list[str] = [] 

807 """reasons why the upload cannot by accepted""" 

808 

809 self.warnings: list[str] = [] 

810 """warnings 

811 

812 .. note:: 

813 

814 Not used yet. 

815 """ 

816 

817 self.final_suites: Optional[list[Suite]] = None 

818 

819 self.new: bool = False 

820 """upload is NEW. set by :meth:`check`""" 

821 

822 self._checked: bool = False 

823 """checks passes. set by :meth:`check`""" 

824 

825 self._new_queue = ( 

826 self.session.query(PolicyQueue).filter_by(queue_name="new").one() 

827 ) 

828 self._new = self._new_queue.suite 

829 

830 @property 

831 def changes(self) -> daklib.upload.Changes: 

832 assert self._changes is not None 

833 return self._changes 

834 

835 @property 

836 def directory(self) -> str: 

837 assert self._directory is not None 

838 return self._directory 

839 

840 @property 

841 def authorized_by_fingerprint(self) -> Fingerprint: 

842 """ 

843 fingerprint of the key that authorized the upload 

844 """ 

845 

846 return ( 

847 self._authorized_by_fingerprint 

848 if self._authorized_by_fingerprint is not None 

849 else self.fingerprint 

850 ) 

851 

852 @authorized_by_fingerprint.setter 

853 def authorized_by_fingerprint(self, fingerprint: Fingerprint) -> None: 

854 self._authorized_by_fingerprint = fingerprint 

855 

856 def warn(self, message: str) -> None: 

857 """add a warning message 

858 

859 Adds a warning message that can later be seen in :attr:`warnings` 

860 

861 :param message: warning message 

862 """ 

863 self.warnings.append(message) 

864 

865 def prepare(self) -> None: 

866 """prepare upload for further processing 

867 

868 This copies the files involved to a temporary directory. If you use 

869 this method directly, you have to remove the directory given by the 

870 :attr:`directory` attribute later on your own. 

871 

872 Instead of using the method directly, you can also use a with-statement:: 

873 

874 with ArchiveUpload(...) as upload: 

875 ... 

876 

877 This will automatically handle any required cleanup. 

878 """ 

879 assert self._directory is None 

880 assert self.original_changes.valid_signature 

881 

882 cnf = Config() 

883 session = self.transaction.session 

884 

885 group = cnf.get("Dinstall::UnprivGroup") or None 

886 self._directory = daklib.utils.temp_dirname( 

887 parent=cnf.get("Dir::TempPath"), mode=0o2750, group=group 

888 ) 

889 with FilesystemTransaction() as fs: 

890 src = os.path.join(self.original_directory, self.original_changes.filename) 

891 dst = os.path.join(self._directory, self.original_changes.filename) 

892 fs.copy(src, dst, mode=0o640) 

893 

894 self._changes = daklib.upload.Changes( 

895 self._directory, self.original_changes.filename, self.keyrings 

896 ) 

897 

898 files = {} 

899 try: 

900 files = self.changes.files 

901 except daklib.upload.InvalidChangesException: 

902 # Do not raise an exception; upload will be rejected later 

903 # due to the missing files 

904 pass 

905 

906 for f in files.values(): 

907 src = os.path.join(self.original_directory, f.filename) 

908 dst = os.path.join(self._directory, f.filename) 

909 if not os.path.exists(src): 

910 continue 

911 fs.copy(src, dst, mode=0o640) 

912 

913 source = None 

914 try: 

915 source = self.changes.source 

916 except Exception: 

917 # Do not raise an exception here if the .dsc is invalid. 

918 pass 

919 

920 if source is not None: 

921 for f in source.files.values(): 

922 src = os.path.join(self.original_directory, f.filename) 

923 dst = os.path.join(self._directory, f.filename) 

924 if not os.path.exists(dst): 

925 try: 

926 db_file = self.transaction.get_file( 

927 f, source.dsc["Source"], check_hashes=False 

928 ) 

929 db_archive_file = ( 

930 session.query(ArchiveFile) 

931 .filter_by(file=db_file) 

932 .first() 

933 ) 

934 assert db_archive_file is not None 

935 fs.copy(db_archive_file.path, dst, mode=0o640) 

936 except KeyError: 

937 # Ignore if get_file could not find it. Upload will 

938 # probably be rejected later. 

939 pass 

940 

941 def unpacked_source(self) -> Optional[str]: 

942 """Path to unpacked source 

943 

944 Get path to the unpacked source. This method does unpack the source 

945 into a temporary directory under :attr:`directory` if it has not 

946 been done so already. 

947 

948 :return: string giving the path to the unpacked source directory 

949 or :const:`None` if no source was included in the upload. 

950 """ 

951 source = self.changes.source 

952 if source is None: 

953 return None 

954 dsc_path = os.path.join(self.directory, source._dsc_file.filename) 

955 

956 sourcedir = os.path.join(self.directory, "source") 

957 if not os.path.exists(sourcedir): 

958 subprocess.check_call( 

959 ["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], 

960 shell=False, 

961 stdout=subprocess.DEVNULL, 

962 ) 

963 if not os.path.isdir(sourcedir): 

964 raise Exception( 

965 "{0} is not a directory after extracting source package".format( 

966 sourcedir 

967 ) 

968 ) 

969 return sourcedir 

970 

971 def _map_suite(self, suite_name: str) -> set[str]: 

972 suite_names = set((suite_name,)) 

973 for rule in Config().value_list("SuiteMappings"): 

974 fields = rule.split() 

975 rtype = fields[0] 

976 if rtype == "map" or rtype == "silent-map": 976 ↛ 977line 976 didn't jump to line 977 because the condition on line 976 was never true

977 (src, dst) = fields[1:3] 

978 if src in suite_names: 

979 suite_names.remove(src) 

980 suite_names.add(dst) 

981 if rtype != "silent-map": 

982 self.warnings.append("Mapping {0} to {1}.".format(src, dst)) 

983 elif rtype == "copy" or rtype == "silent-copy": 983 ↛ 984line 983 didn't jump to line 984 because the condition on line 983 was never true

984 (src, dst) = fields[1:3] 

985 if src in suite_names: 

986 suite_names.add(dst) 

987 if rtype != "silent-copy": 

988 self.warnings.append("Copy {0} to {1}.".format(src, dst)) 

989 elif rtype == "ignore": 989 ↛ 990line 989 didn't jump to line 990 because the condition on line 989 was never true

990 ignored = fields[1] 

991 if ignored in suite_names: 

992 suite_names.remove(ignored) 

993 self.warnings.append("Ignoring target suite {0}.".format(ignored)) 

994 elif rtype == "reject": 994 ↛ 995line 994 didn't jump to line 995 because the condition on line 994 was never true

995 rejected = fields[1] 

996 if rejected in suite_names: 

997 raise checks.Reject( 

998 "Uploads to {0} are not accepted.".format(rejected) 

999 ) 

1000 ## XXX: propup-version and map-unreleased not yet implemented 

1001 return suite_names 

1002 

1003 def _mapped_suites(self) -> list[Suite]: 

1004 """Get target suites after mappings 

1005 

1006 :return: list giving the mapped target suites of this upload 

1007 """ 

1008 session = self.session 

1009 

1010 suite_names = set() 

1011 for dist in self.changes.distributions: 

1012 suite_names.update(self._map_suite(dist)) 

1013 

1014 suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names)) 

1015 return suites.all() 

1016 

1017 def _check_new_binary_overrides(self, suite: Suite, overridesuite: Suite) -> bool: 

1018 new = False 

1019 source = self.changes.source 

1020 

1021 # Check binaries listed in the source package's Package-List field: 

1022 if source is not None and not source.package_list.fallback: 

1023 packages = source.package_list.packages_for_suite(suite) 

1024 for b in packages: 

1025 override = self._binary_override(overridesuite, b) 

1026 if override is None: 

1027 self.warnings.append("binary:{0} is NEW.".format(b.name)) 

1028 new = True 

1029 

1030 # Check all uploaded packages. 

1031 # This is necessary to account for packages without a Package-List 

1032 # field, really late binary-only uploads (where an unused override 

1033 # was already removed), and for debug packages uploaded to a suite 

1034 # without a debug suite (which are then considered as NEW). 

1035 for b2 in self.changes.binaries: 

1036 if ( 

1037 daklib.utils.is_in_debug_section(b2.control) 

1038 and suite.debug_suite is not None 

1039 ): 

1040 continue 

1041 override = self._binary_override(overridesuite, b2) 

1042 if override is None: 

1043 self.warnings.append("binary:{0} is NEW.".format(b2.name)) 

1044 new = True 

1045 

1046 return new 

1047 

1048 def _check_new(self, suite: Suite, overridesuite: Suite) -> bool: 

1049 """Check if upload is NEW 

1050 

1051 An upload is NEW if it has binary or source packages that do not have 

1052 an override in `overridesuite` OR if it references files ONLY in a 

1053 tainted archive (eg. when it references files in NEW). 

1054 

1055 Debug packages (*-dbgsym in Section: debug) are not considered as NEW 

1056 if `suite` has a separate debug suite. 

1057 

1058 :return: :const:`True` if the upload is NEW, :const:`False` otherwise 

1059 """ 

1060 session = self.session 

1061 new = False 

1062 

1063 # Check for missing overrides 

1064 if self._check_new_binary_overrides(suite, overridesuite): 

1065 new = True 

1066 if self.changes.source is not None: 

1067 override = self._source_override(overridesuite, self.changes.source) 

1068 if override is None: 

1069 self.warnings.append( 

1070 "source:{0} is NEW.".format(self.changes.source.dsc["Source"]) 

1071 ) 

1072 new = True 

1073 

1074 # Check if we reference a file only in a tainted archive 

1075 files = list(self.changes.files.values()) 

1076 if self.changes.source is not None: 

1077 files.extend(self.changes.source.files.values()) 

1078 for f in files: 

1079 query = ( 

1080 session.query(ArchiveFile) 

1081 .join(PoolFile) 

1082 .filter(PoolFile.sha1sum == f.sha1sum) 

1083 ) 

1084 query_untainted = query.join(Archive).filter( 

1085 Archive.tainted == False # noqa:E712 

1086 ) 

1087 

1088 in_archive = query.first() is not None 

1089 in_untainted_archive = query_untainted.first() is not None 

1090 

1091 if in_archive and not in_untainted_archive: 

1092 self.warnings.append("{0} is only available in NEW.".format(f.filename)) 

1093 new = True 

1094 

1095 return new 

1096 

1097 def _final_suites(self) -> list[Suite]: 

1098 session = self.session 

1099 

1100 mapped_suites = self._mapped_suites() 

1101 final_suites: list[Suite] = [] 

1102 

1103 for suite in mapped_suites: 

1104 overridesuite = suite 

1105 if suite.overridesuite is not None: 

1106 overridesuite = ( 

1107 session.query(Suite).filter_by(suite_name=suite.overridesuite).one() 

1108 ) 

1109 if self._check_new(suite, overridesuite): 

1110 self.new = True 

1111 if suite not in final_suites: 1111 ↛ 1103line 1111 didn't jump to line 1103 because the condition on line 1111 was always true

1112 final_suites.append(suite) 

1113 

1114 return final_suites 

1115 

1116 def _binary_override( 

1117 self, 

1118 suite: Suite, 

1119 binary: "Union[daklib.upload.Binary, daklib.packagelist.PackageListEntry]", 

1120 ) -> Optional[Override]: 

1121 """Get override entry for a binary 

1122 

1123 :param suite: suite to get override for 

1124 :param binary: binary to get override for 

1125 :return: override for the given binary or :const:`None` 

1126 """ 

1127 if suite.overridesuite is not None: 

1128 suite = ( 

1129 self.session.query(Suite) 

1130 .filter_by(suite_name=suite.overridesuite) 

1131 .one() 

1132 ) 

1133 

1134 if binary.component is None: 1134 ↛ 1135line 1134 didn't jump to line 1135 because the condition on line 1134 was never true

1135 return None 

1136 mapped_component = get_mapped_component(binary.component) 

1137 if mapped_component is None: 1137 ↛ 1138line 1137 didn't jump to line 1138 because the condition on line 1137 was never true

1138 return None 

1139 

1140 query = ( 

1141 self.session.query(Override) 

1142 .filter_by(suite=suite, package=binary.name) 

1143 .join(Component) 

1144 .filter(Component.component_name == mapped_component.component_name) 

1145 .join(OverrideType) 

1146 .filter(OverrideType.overridetype == binary.type) 

1147 ) 

1148 

1149 return query.one_or_none() 

1150 

1151 def _source_override( 

1152 self, suite: Suite, source: daklib.upload.Source 

1153 ) -> Optional[Override]: 

1154 """Get override entry for a source 

1155 

1156 :param suite: suite to get override for 

1157 :param source: source to get override for 

1158 :return: override for the given source or :const:`None` 

1159 """ 

1160 if suite.overridesuite is not None: 1160 ↛ 1161line 1160 didn't jump to line 1161

1161 suite = ( 

1162 self.session.query(Suite) 

1163 .filter_by(suite_name=suite.overridesuite) 

1164 .one() 

1165 ) 

1166 

1167 query = ( 

1168 self.session.query(Override) 

1169 .filter_by(suite=suite, package=source.dsc["Source"]) 

1170 .join(OverrideType) 

1171 .filter(OverrideType.overridetype == "dsc") 

1172 ) 

1173 

1174 component = source_component_from_package_list(source.package_list, suite) 

1175 if component is not None: 

1176 query = query.filter(Override.component == component) 

1177 

1178 return query.one_or_none() 

1179 

1180 def _binary_component( 

1181 self, suite: Suite, binary: daklib.upload.Binary, only_overrides: bool = True 

1182 ) -> Optional[Component]: 

1183 """get component for a binary 

1184 

1185 By default this will only look at overrides to get the right component; 

1186 if `only_overrides` is :const:`False` this method will also look at the 

1187 Section field. 

1188 

1189 :param only_overrides: only use overrides to get the right component 

1190 """ 

1191 override = self._binary_override(suite, binary) 

1192 if override is not None: 

1193 return override.component 

1194 if only_overrides: 1194 ↛ 1195line 1194 didn't jump to line 1195 because the condition on line 1194 was never true

1195 return None 

1196 return get_mapped_component(binary.component, self.session) 

1197 

1198 def _source_component( 

1199 self, suite: Suite, source: daklib.upload.Source, only_overrides: bool = True 

1200 ) -> Optional[Component]: 

1201 """get component for a source 

1202 

1203 By default this will only look at overrides to get the right component; 

1204 if `only_overrides` is :const:`False` this method will also look at the 

1205 Section field. 

1206 

1207 :param only_overrides: only use overrides to get the right component 

1208 """ 

1209 override = self._source_override(suite, source) 

1210 if override is not None: 1210 ↛ 1212line 1210 didn't jump to line 1212 because the condition on line 1210 was always true

1211 return override.component 

1212 if only_overrides: 

1213 return None 

1214 return get_mapped_component(source.component, self.session) 

1215 

1216 def _run_checks( 

1217 self, 

1218 force: bool, 

1219 simple_checks: Iterable[type[checks.Check]], 

1220 per_suite_checks: Collection[type[checks.Check]], 

1221 suites: Collection[Suite], 

1222 ) -> bool: 

1223 try: 

1224 for check in simple_checks: 

1225 check().check(self) 

1226 

1227 if per_suite_checks and not suites: 1227 ↛ 1228line 1227 didn't jump to line 1228 because the condition on line 1227 was never true

1228 raise ValueError( 

1229 "Per-suite checks should be called, but no suites given." 

1230 ) 

1231 for check in per_suite_checks: 

1232 for suite in suites: 

1233 check().per_suite_check(self, suite) 

1234 except checks.Reject as e: 1234 ↛ 1237line 1234 didn't jump to line 1237

1235 self.reject_reasons.append(str(e)) 

1236 return False 

1237 except Exception as e: 

1238 self.reject_reasons.append( 

1239 "Processing raised an exception: {0}.\n{1}".format( 

1240 e, traceback.format_exc() 

1241 ) 

1242 ) 

1243 return False 

1244 

1245 if len(self.reject_reasons) != 0: 1245 ↛ 1246line 1245 didn't jump to line 1246 because the condition on line 1245 was never true

1246 return False 

1247 return True 

1248 

1249 def _run_checks_very_early(self, force: bool) -> bool: 

1250 """ 

1251 run very early checks 

1252 

1253 These check validate signatures on .changes and hashes. 

1254 """ 

1255 return self._run_checks( 

1256 force=force, 

1257 simple_checks=[ 

1258 checks.SignatureAndHashesCheck, 

1259 checks.WeakSignatureCheck, 

1260 checks.SignatureTimestampCheck, 

1261 ], 

1262 per_suite_checks=[], 

1263 suites=[], 

1264 ) 

1265 

1266 def _run_checks_early(self, force: bool) -> bool: 

1267 """ 

1268 run early checks 

1269 

1270 These are checks that run after checking signatures, but 

1271 before deciding the target suite. 

1272 

1273 This should cover archive-wide policies, sanity checks, ... 

1274 """ 

1275 return self._run_checks( 

1276 force=force, 

1277 simple_checks=[ 

1278 checks.ChangesCheck, 

1279 checks.ExternalHashesCheck, 

1280 checks.SourceCheck, 

1281 checks.BinaryCheck, 

1282 checks.BinaryMembersCheck, 

1283 checks.BinaryTimestampCheck, 

1284 checks.SingleDistributionCheck, 

1285 checks.ArchAllBinNMUCheck, 

1286 ], 

1287 per_suite_checks=[], 

1288 suites=[], 

1289 ) 

1290 

1291 def _run_checks_late(self, force: bool, suites: Collection[Suite]) -> bool: 

1292 """ 

1293 run late checks 

1294 

1295 These are checks that run after the target suites are known. 

1296 

1297 This should cover permission checks, suite-specific polices 

1298 (e.g., lintian), version constraints, ... 

1299 """ 

1300 return self._run_checks( 

1301 force=force, 

1302 simple_checks=[ 

1303 checks.TransitionCheck, 

1304 checks.ACLCheck, 

1305 checks.NewOverrideCheck, 

1306 checks.NoSourceOnlyCheck, 

1307 checks.LintianCheck, 

1308 ], 

1309 per_suite_checks=[ 

1310 checks.SuiteCheck, 

1311 checks.ACLCheck, 

1312 checks.SourceFormatCheck, 

1313 checks.SuiteArchitectureCheck, 

1314 checks.VersionCheck, 

1315 ], 

1316 suites=suites, 

1317 ) 

1318 

1319 def _handle_tag2upload(self) -> bool: 

1320 """ 

1321 check if upload is via tag2upload 

1322 

1323 if so, determine who authorized the upload to notify them of 

1324 rejections and for ACL checks 

1325 """ 

1326 

1327 if not (keyring := self.fingerprint.keyring) or not keyring.tag2upload: 

1328 return True 

1329 

1330 source = self.changes.source 

1331 if not source: 1331 ↛ 1332line 1331 didn't jump to line 1332 because the condition on line 1331 was never true

1332 self.reject_reasons.append("tag2upload: upload missing source") 

1333 return False 

1334 

1335 try: 

1336 tag2upload_file, info = get_tag2upload_info_for_upload(self) 

1337 except Exception as e: 

1338 self.reject_reasons.append(f"tag2upload: invalid metadata: {e}") 

1339 return False 

1340 self._extra_source_files.append(tag2upload_file) 

1341 

1342 success = True 

1343 

1344 if self.changes.binaries: 1344 ↛ 1345line 1344 didn't jump to line 1345 because the condition on line 1344 was never true

1345 success = False 

1346 self.reject_reasons.append("tag2upload: upload includes binaries") 

1347 if self.changes.byhand_files: 1347 ↛ 1348line 1347 didn't jump to line 1348 because the condition on line 1347 was never true

1348 success = False 

1349 self.reject_reasons.append("tag2upload: upload included by-hand files") 

1350 

1351 if not info.signed_file.valid: 1351 ↛ 1352line 1351 didn't jump to line 1352 because the condition on line 1351 was never true

1352 success = False 

1353 self.reject_reasons.append("tag2upload: no valid signature on tag") 

1354 else: 

1355 # Only set with a valid signature, but also when we reject 

1356 # the upload so the signer might get included in the 

1357 # rejection mail. 

1358 self.authorized_by_fingerprint = ( 

1359 self.session.query(Fingerprint) 

1360 .filter_by(fingerprint=info.signed_file.primary_fingerprint) 

1361 .one() 

1362 ) 

1363 if info.signed_file.weak_signature: 1363 ↛ 1364line 1363 didn't jump to line 1364 because the condition on line 1363 was never true

1364 success = False 

1365 self.reject_reasons.append( 

1366 "tag2upload: tag was signed using a weak algorithm (such as SHA-1)" 

1367 ) 

1368 try: 

1369 checks.check_signature_timestamp("tag2upload", info.signed_file) 

1370 except checks.Reject as e: 

1371 success = False 

1372 self.reject_reasons.append(str(e)) 

1373 

1374 if info.metadata.get("distro") != "debian": 1374 ↛ 1375line 1374 didn't jump to line 1375 because the condition on line 1374 was never true

1375 success = False 

1376 self.reject_reasons.append("tag2upload: upload not targeted at Debian.") 

1377 if info.metadata.get("source") != source.dsc["Source"]: 1377 ↛ 1378line 1377 didn't jump to line 1378 because the condition on line 1377 was never true

1378 success = False 

1379 self.reject_reasons.append( 

1380 "tag2upload: source from tag metadata does not match upload" 

1381 ) 

1382 if info.metadata.get("version") != source.dsc["Version"]: 1382 ↛ 1383line 1382 didn't jump to line 1383 because the condition on line 1382 was never true

1383 success = False 

1384 self.reject_reasons.append( 

1385 "tag2upload: version from tag metadata does not match upload" 

1386 ) 

1387 

1388 tag_info_field = source.dsc.get("Git-Tag-Info") 

1389 if not tag_info_field: 1389 ↛ 1390line 1389 didn't jump to line 1390 because the condition on line 1389 was never true

1390 success = False 

1391 self.reject_reasons.append("tag2upload: source misses Git-Tag-Info field") 

1392 else: 

1393 try: 

1394 tag_info = parse_git_tag_info(tag_info_field) 

1395 except ValueError: 

1396 success = False 

1397 self.reject_reasons.append("tag2upload: could not parse Git-Tag-Info") 

1398 else: 

1399 if tag_info.fp.upper() != info.signed_file.fingerprint: 1399 ↛ 1400line 1399 didn't jump to line 1400 because the condition on line 1399 was never true

1400 success = False 

1401 self.reject_reasons.append( 

1402 "tag2upload: signing key from Git and Git-Tag-Info differ" 

1403 ) 

1404 

1405 return success 

1406 

1407 def check(self, force: bool = False) -> bool: 

1408 """run checks against the upload 

1409 

1410 :param force: ignore failing forcable checks 

1411 :return: :const:`True` if all checks passed, :const:`False` otherwise 

1412 """ 

1413 # XXX: needs to be better structured. 

1414 assert self.changes.valid_signature 

1415 

1416 # Validate signatures and hashes before we do any real work: 

1417 if not self._run_checks_very_early(force): 

1418 return False 

1419 

1420 if not self._handle_tag2upload(): 1420 ↛ 1421line 1420 didn't jump to line 1421 because the condition on line 1420 was never true

1421 return False 

1422 

1423 if not self._run_checks_early(force): 1423 ↛ 1424line 1423 didn't jump to line 1424 because the condition on line 1423 was never true

1424 return False 

1425 

1426 try: 

1427 final_suites = self._final_suites() 

1428 except Exception as e: 

1429 self.reject_reasons.append( 

1430 "Processing raised an exception: {0}.\n{1}".format( 

1431 e, traceback.format_exc() 

1432 ) 

1433 ) 

1434 return False 

1435 if len(final_suites) == 0: 

1436 self.reject_reasons.append( 

1437 "No target suite found. Please check your target distribution and that you uploaded to the right archive." 

1438 ) 

1439 return False 

1440 

1441 self.final_suites = final_suites 

1442 

1443 if not self._run_checks_late(force, final_suites): 

1444 return False 

1445 

1446 if len(self.reject_reasons) != 0: 1446 ↛ 1447line 1446 didn't jump to line 1447 because the condition on line 1446 was never true

1447 return False 

1448 

1449 self._checked = True 

1450 return True 

1451 

1452 def _install_to_suite( 

1453 self, 

1454 target_suite: Suite, 

1455 suite: Suite, 

1456 source_component_func: Callable[[daklib.upload.Source], Component], 

1457 binary_component_func: Callable[[daklib.upload.Binary], Component], 

1458 source_suites=None, 

1459 extra_source_archives: Optional[Iterable[Archive]] = None, 

1460 policy_upload: bool = False, 

1461 ) -> tuple[Optional[DBSource], list[DBBinary]]: 

1462 """Install upload to the given suite 

1463 

1464 :param target_suite: target suite (before redirection to policy queue or NEW) 

1465 :param suite: suite to install the package into. This is the real suite, 

1466 ie. after any redirection to NEW or a policy queue 

1467 :param source_component_func: function to get the :class:`daklib.dbconn.Component` 

1468 for a :class:`daklib.upload.Source` object 

1469 :param binary_component_func: function to get the :class:`daklib.dbconn.Component` 

1470 for a :class:`daklib.upload.Binary` object 

1471 :param source_suites: see :meth:`daklib.archive.ArchiveTransaction.install_binary` 

1472 :param extra_source_archives: see :meth:`daklib.archive.ArchiveTransaction.install_binary` 

1473 :param policy_upload: Boolean indicating upload to policy queue (including NEW) 

1474 :return: tuple with two elements. The first is a :class:`daklib.dbconn.DBSource` 

1475 object for the install source or :const:`None` if no source was 

1476 included. The second is a list of :class:`daklib.dbconn.DBBinary` 

1477 objects for the installed binary packages. 

1478 """ 

1479 # XXX: move this function to ArchiveTransaction? 

1480 

1481 control = self.changes.changes 

1482 changed_by = get_or_set_maintainer( 

1483 control.get("Changed-By", control["Maintainer"]), self.session 

1484 ) 

1485 

1486 if source_suites is None: 1486 ↛ 1487line 1486 didn't jump to line 1487

1487 source_suites = ( 

1488 self.session.query(Suite) 

1489 .join(VersionCheck, VersionCheck.reference_id == Suite.suite_id) 

1490 .filter(VersionCheck.check == "Enhances") 

1491 .filter(VersionCheck.suite == suite) 

1492 .subquery() 

1493 ) 

1494 

1495 source = self.changes.source 

1496 if source is not None: 

1497 component = source_component_func(source) 

1498 db_source = self.transaction.install_source( 

1499 self.directory, 

1500 source, 

1501 suite, 

1502 component, 

1503 changed_by, 

1504 fingerprint=self.fingerprint, 

1505 authorized_by_fingerprint=self.authorized_by_fingerprint, 

1506 extra_source_files=self._extra_source_files, 

1507 ) 

1508 else: 

1509 db_source = None 

1510 

1511 db_binaries = [] 

1512 for binary in sorted(self.changes.binaries, key=lambda x: x.name): 

1513 copy_to_suite = suite 

1514 if ( 

1515 daklib.utils.is_in_debug_section(binary.control) 

1516 and suite.debug_suite is not None 

1517 ): 

1518 copy_to_suite = suite.debug_suite 

1519 

1520 component = binary_component_func(binary) 

1521 db_binary = self.transaction.install_binary( 

1522 self.directory, 

1523 binary, 

1524 copy_to_suite, 

1525 component, 

1526 fingerprint=self.fingerprint, 

1527 authorized_by_fingerprint=self.authorized_by_fingerprint, 

1528 source_suites=source_suites, 

1529 extra_source_archives=extra_source_archives, 

1530 ) 

1531 db_binaries.append(db_binary) 

1532 

1533 if not policy_upload: 

1534 check_upload_for_external_signature_request( 

1535 self.session, target_suite, copy_to_suite, db_binary 

1536 ) 

1537 

1538 if suite.copychanges: 1538 ↛ 1539line 1538 didn't jump to line 1539 because the condition on line 1538 was never true

1539 src = os.path.join(self.directory, self.changes.filename) 

1540 dst = os.path.join( 

1541 suite.archive.path, "dists", suite.suite_name, self.changes.filename 

1542 ) 

1543 self.transaction.fs.copy(src, dst, mode=suite.archive.mode) 

1544 

1545 suite.update_last_changed() 

1546 

1547 return (db_source, db_binaries) 

1548 

1549 def _install_changes(self) -> DBChange: 

1550 assert self.changes.valid_signature 

1551 control = self.changes.changes 

1552 session = self.transaction.session 

1553 

1554 changelog_id = None 

1555 # Only add changelog for sourceful uploads and binNMUs 

1556 if self.changes.sourceful or re_bin_only_nmu.search(control["Version"]): 

1557 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id" 

1558 changelog_id = session.execute( 

1559 sql.text(query), {"changelog": control["Changes"]} 

1560 ).scalar() 

1561 assert changelog_id is not None 

1562 

1563 db_changes = DBChange() 

1564 db_changes.changesname = self.changes.filename 

1565 db_changes.source = control["Source"] 

1566 db_changes.binaries = control.get("Binary", None) 

1567 db_changes.architecture = control["Architecture"] 

1568 db_changes.version = control["Version"] 

1569 db_changes.distribution = control["Distribution"] 

1570 db_changes.urgency = control["Urgency"] 

1571 db_changes.maintainer = control["Maintainer"] 

1572 db_changes.changedby = control.get("Changed-By", control["Maintainer"]) 

1573 db_changes.date = control["Date"] 

1574 db_changes.fingerprint = self.fingerprint.fingerprint 

1575 db_changes.authorized_by_fingerprint = ( 

1576 self.authorized_by_fingerprint.fingerprint 

1577 ) 

1578 db_changes.changelog_id = changelog_id 

1579 db_changes.closes = self.changes.closed_bugs 

1580 

1581 try: 

1582 self.transaction.session.add(db_changes) 

1583 self.transaction.session.flush() 

1584 except sqlalchemy.exc.IntegrityError: 

1585 raise ArchiveException( 

1586 "{0} is already known.".format(self.changes.filename) 

1587 ) 

1588 

1589 return db_changes 

1590 

1591 def _install_policy( 

1592 self, policy_queue, target_suite, db_changes, db_source, db_binaries 

1593 ) -> PolicyQueueUpload: 

1594 """install upload to policy queue""" 

1595 u = PolicyQueueUpload() 

1596 u.policy_queue = policy_queue 

1597 u.target_suite = target_suite 

1598 u.changes = db_changes 

1599 u.source = db_source 

1600 u.binaries = db_binaries 

1601 self.transaction.session.add(u) 

1602 self.transaction.session.flush() 

1603 

1604 queue_files = [self.changes.filename] 

1605 queue_files.extend(f.filename for f in self.changes.buildinfo_files) 

1606 for fn in queue_files: 

1607 src = os.path.join(self.changes.directory, fn) 

1608 dst = os.path.join(policy_queue.path, fn) 

1609 self.transaction.fs.copy(src, dst, mode=policy_queue.change_perms) 

1610 

1611 return u 

1612 

1613 def try_autobyhand(self) -> bool: 

1614 """Try AUTOBYHAND 

1615 

1616 Try to handle byhand packages automatically. 

1617 """ 

1618 assert len(self.reject_reasons) == 0 

1619 assert self.changes.valid_signature 

1620 assert self.final_suites is not None 

1621 assert self._checked 

1622 

1623 byhand = self.changes.byhand_files 

1624 if len(byhand) == 0: 1624 ↛ 1627line 1624 didn't jump to line 1627 because the condition on line 1624 was always true

1625 return True 

1626 

1627 suites = list(self.final_suites) 

1628 assert len(suites) == 1, "BYHAND uploads must be to a single suite" 

1629 suite = suites[0] 

1630 

1631 cnf = Config() 

1632 control = self.changes.changes 

1633 automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages") 

1634 

1635 remaining = [] 

1636 for f in byhand: 

1637 if "_" in f.filename: 

1638 parts = f.filename.split("_", 2) 

1639 if len(parts) != 3: 

1640 print( 

1641 "W: unexpected byhand filename {0}. No automatic processing.".format( 

1642 f.filename 

1643 ) 

1644 ) 

1645 remaining.append(f) 

1646 continue 

1647 

1648 package, _, archext = parts 

1649 arch, ext = archext.split(".", 1) 

1650 else: 

1651 parts = f.filename.split(".") 

1652 if len(parts) < 2: 

1653 print( 

1654 "W: unexpected byhand filename {0}. No automatic processing.".format( 

1655 f.filename 

1656 ) 

1657 ) 

1658 remaining.append(f) 

1659 continue 

1660 

1661 package = parts[0] 

1662 arch = "all" 

1663 ext = parts[-1] 

1664 

1665 try: 

1666 rule = automatic_byhand_packages.subtree(package) 

1667 except KeyError: 

1668 remaining.append(f) 

1669 continue 

1670 

1671 if ( 

1672 rule["Source"] != self.changes.source_name 

1673 or rule["Section"] != f.section 

1674 or ("Extension" in rule and rule["Extension"] != ext) 

1675 ): 

1676 remaining.append(f) 

1677 continue 

1678 

1679 script = rule["Script"] 

1680 retcode = subprocess.call( 

1681 [ 

1682 script, 

1683 os.path.join(self.directory, f.filename), 

1684 control["Version"], 

1685 arch, 

1686 os.path.join(self.directory, self.changes.filename), 

1687 suite.suite_name, 

1688 ], 

1689 shell=False, 

1690 ) 

1691 if retcode != 0: 

1692 print("W: error processing {0}.".format(f.filename)) 

1693 remaining.append(f) 

1694 

1695 return len(remaining) == 0 

1696 

1697 def _install_byhand( 

1698 self, 

1699 policy_queue_upload: PolicyQueueUpload, 

1700 hashed_file: daklib.upload.HashedFile, 

1701 ) -> PolicyQueueByhandFile: 

1702 """install byhand file""" 

1703 fs = self.transaction.fs 

1704 session = self.transaction.session 

1705 policy_queue = policy_queue_upload.policy_queue 

1706 

1707 byhand_file = PolicyQueueByhandFile() 

1708 byhand_file.upload = policy_queue_upload 

1709 byhand_file.filename = hashed_file.filename 

1710 session.add(byhand_file) 

1711 session.flush() 

1712 

1713 src = os.path.join(self.directory, hashed_file.filename) 

1714 dst = os.path.join(policy_queue.path, hashed_file.filename) 

1715 fs.copy(src, dst, mode=policy_queue.change_perms) 

1716 

1717 return byhand_file 

1718 

1719 def _do_bts_versiontracking(self) -> None: 

1720 cnf = Config() 

1721 fs = self.transaction.fs 

1722 

1723 btsdir = cnf.get("Dir::BTSVersionTrack") 

1724 if btsdir is None or btsdir == "": 1724 ↛ 1727line 1724 didn't jump to line 1727 because the condition on line 1724 was always true

1725 return 

1726 

1727 base = os.path.join(btsdir, self.changes.filename[:-8]) 

1728 

1729 # version history 

1730 sourcedir = self.unpacked_source() 

1731 if sourcedir is not None: 

1732 dch_path = os.path.join(sourcedir, "debian", "changelog") 

1733 with open(dch_path, "r") as fh: 

1734 versions = fs.create("{0}.versions".format(base), mode=0o644) 

1735 for line in fh.readlines(): 

1736 if re_changelog_versions.match(line): 

1737 versions.write(line) 

1738 versions.close() 

1739 

1740 # binary -> source mapping 

1741 if self.changes.binaries: 

1742 debinfo = fs.create("{0}.debinfo".format(base), mode=0o644) 

1743 for binary in self.changes.binaries: 

1744 control = binary.control 

1745 source_package, source_version = binary.source 

1746 line = " ".join( 

1747 [ 

1748 control["Package"], 

1749 control["Version"], 

1750 control["Architecture"], 

1751 source_package, 

1752 source_version, 

1753 ] 

1754 ) 

1755 print(line, file=debinfo) 

1756 debinfo.close() 

1757 

1758 def _policy_queue(self, suite) -> Optional[PolicyQueue]: 

1759 if suite.policy_queue is not None: 

1760 return suite.policy_queue 

1761 return None 

1762 

1763 def install(self) -> None: 

1764 """install upload 

1765 

1766 Install upload to a suite or policy queue. This method does **not** 

1767 handle uploads to NEW. 

1768 

1769 You need to have called the :meth:`check` method before calling this method. 

1770 """ 

1771 assert len(self.reject_reasons) == 0 

1772 assert self.changes.valid_signature 

1773 assert self.final_suites is not None 

1774 assert self._checked 

1775 assert not self.new 

1776 

1777 db_changes = self._install_changes() 

1778 

1779 for suite in self.final_suites: 

1780 overridesuite = suite 

1781 if suite.overridesuite is not None: 

1782 overridesuite = ( 

1783 self.session.query(Suite) 

1784 .filter_by(suite_name=suite.overridesuite) 

1785 .one() 

1786 ) 

1787 

1788 policy_queue = self._policy_queue(suite) 

1789 policy_upload = False 

1790 

1791 redirected_suite = suite 

1792 if policy_queue is not None: 

1793 redirected_suite = policy_queue.suite 

1794 policy_upload = True 

1795 

1796 # source can be in the suite we install to or any suite we enhance 

1797 source_suite_ids = set([suite.suite_id, redirected_suite.suite_id]) 

1798 for (enhanced_suite_id,) in ( 

1799 self.session.query(VersionCheck.reference_id) 

1800 .filter(VersionCheck.suite_id.in_(source_suite_ids)) 

1801 .filter(VersionCheck.check == "Enhances") 

1802 ): 

1803 source_suite_ids.add(enhanced_suite_id) 

1804 

1805 source_suites = ( 

1806 self.session.query(Suite) 

1807 .filter(Suite.suite_id.in_(source_suite_ids)) 

1808 .subquery() 

1809 ) 

1810 

1811 def source_component_func(source: daklib.upload.Source) -> Component: 

1812 component = self._source_component( 

1813 overridesuite, source, only_overrides=False 

1814 ) 

1815 assert component is not None 

1816 return component 

1817 

1818 def binary_component_func(binary: daklib.upload.Binary) -> Component: 

1819 component = self._binary_component( 

1820 overridesuite, binary, only_overrides=False 

1821 ) 

1822 assert component is not None 

1823 return component 

1824 

1825 (db_source, db_binaries) = self._install_to_suite( 

1826 suite, 

1827 redirected_suite, 

1828 source_component_func, 

1829 binary_component_func, 

1830 source_suites=source_suites, 

1831 extra_source_archives=[suite.archive], 

1832 policy_upload=policy_upload, 

1833 ) 

1834 

1835 if policy_queue is not None: 

1836 self._install_policy( 

1837 policy_queue, suite, db_changes, db_source, db_binaries 

1838 ) 

1839 

1840 # copy to build queues 

1841 if policy_queue is None or policy_queue.send_to_build_queues: 1841 ↛ 1779line 1841 didn't jump to line 1779 because the condition on line 1841 was always true

1842 for build_queue in suite.copy_queues: 

1843 self._install_to_suite( 

1844 suite, 

1845 build_queue.suite, 

1846 source_component_func, 

1847 binary_component_func, 

1848 source_suites=source_suites, 

1849 extra_source_archives=[suite.archive], 

1850 ) 

1851 

1852 self._do_bts_versiontracking() 

1853 

1854 def install_to_new(self) -> None: 

1855 """install upload to NEW 

1856 

1857 Install upload to NEW. This method does **not** handle regular uploads 

1858 to suites or policy queues. 

1859 

1860 You need to have called the :meth:`check` method before calling this method. 

1861 """ 

1862 # Uploads to NEW are special as we don't have overrides. 

1863 assert len(self.reject_reasons) == 0 

1864 assert self.changes.valid_signature 

1865 assert self.final_suites is not None 

1866 

1867 binaries = self.changes.binaries 

1868 byhand = self.changes.byhand_files 

1869 

1870 # we need a suite to guess components 

1871 suites = list(self.final_suites) 

1872 assert len(suites) == 1, "NEW uploads must be to a single suite" 

1873 suite = suites[0] 

1874 

1875 # decide which NEW queue to use 

1876 if suite.new_queue is None: 1876 ↛ 1883line 1876 didn't jump to line 1883 because the condition on line 1876 was always true

1877 new_queue = ( 

1878 self.transaction.session.query(PolicyQueue) 

1879 .filter_by(queue_name="new") 

1880 .one() 

1881 ) 

1882 else: 

1883 new_queue = suite.new_queue 

1884 if len(byhand) > 0: 1884 ↛ 1886line 1884 didn't jump to line 1886

1885 # There is only one global BYHAND queue 

1886 new_queue = ( 

1887 self.transaction.session.query(PolicyQueue) 

1888 .filter_by(queue_name="byhand") 

1889 .one() 

1890 ) 

1891 new_suite = new_queue.suite 

1892 

1893 def binary_component_func(binary: daklib.upload.Binary) -> Component: 

1894 component = self._binary_component(suite, binary, only_overrides=False) 

1895 assert component is not None 

1896 return component 

1897 

1898 # guess source component 

1899 # XXX: should be moved into an extra method 

1900 binary_component_names = set() 

1901 for binary in binaries: 

1902 component = binary_component_func(binary) 

1903 binary_component_names.add(component.component_name) 

1904 source_component_name = None 

1905 for c in self.session.query(Component).order_by(Component.component_id): 

1906 guess = c.component_name 

1907 if guess in binary_component_names: 

1908 source_component_name = guess 

1909 break 

1910 if source_component_name is None: 

1911 source_component = ( 

1912 self.session.query(Component).order_by(Component.component_id).first() 

1913 ) 

1914 else: 

1915 source_component = ( 

1916 self.session.query(Component) 

1917 .filter_by(component_name=source_component_name) 

1918 .one() 

1919 ) 

1920 assert source_component is not None 

1921 

1922 def source_component_func(source: daklib.upload.Source) -> Component: 

1923 return source_component 

1924 

1925 db_changes = self._install_changes() 

1926 (db_source, db_binaries) = self._install_to_suite( 

1927 suite, 

1928 new_suite, 

1929 source_component_func, 

1930 binary_component_func, 

1931 source_suites=True, 

1932 extra_source_archives=[suite.archive], 

1933 policy_upload=True, 

1934 ) 

1935 policy_upload = self._install_policy( 

1936 new_queue, suite, db_changes, db_source, db_binaries 

1937 ) 

1938 

1939 for f in byhand: 1939 ↛ 1940line 1939 didn't jump to line 1940 because the loop on line 1939 never started

1940 self._install_byhand(policy_upload, f) 

1941 

1942 self._do_bts_versiontracking() 

1943 

1944 def commit(self) -> None: 

1945 """commit changes""" 

1946 self.transaction.commit() 

1947 

1948 def rollback(self) -> None: 

1949 """rollback changes""" 

1950 self.transaction.rollback() 

1951 

1952 def __enter__(self): 

1953 self.prepare() 

1954 return self 

1955 

1956 def __exit__(self, type, value, traceback): 

1957 if self._directory is not None: 1957 ↛ 1960line 1957 didn't jump to line 1960 because the condition on line 1957 was always true

1958 shutil.rmtree(self._directory) 

1959 self._directory = None 

1960 self._changes = None 

1961 self.transaction.rollback() 

1962 return None