1# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org> 

2# 

3# This program is free software; you can redistribute it and/or modify 

4# it under the terms of the GNU General Public License as published by 

5# the Free Software Foundation; either version 2 of the License, or 

6# (at your option) any later version. 

7# 

8# This program is distributed in the hope that it will be useful, 

9# but WITHOUT ANY WARRANTY; without even the implied warranty of 

10# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

11# GNU General Public License for more details. 

12# 

13# You should have received a copy of the GNU General Public License along 

14# with this program; if not, write to the Free Software Foundation, Inc., 

15# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 

16 

17"""module to manipulate the archive 

18 

19This module provides classes to manipulate the archive. 

20""" 

21 

22from daklib.dbconn import * 

23import daklib.checks as checks 

24from daklib.config import Config 

25from daklib.externalsignature import check_upload_for_external_signature_request 

26import daklib.upload 

27import daklib.utils 

28from daklib.fstransactions import FilesystemTransaction 

29from daklib.regexes import re_changelog_versions, re_bin_only_nmu 

30 

31import os 

32import shutil 

33from collections.abc import Callable, Iterable 

34from sqlalchemy.orm.exc import NoResultFound 

35from sqlalchemy.orm import object_session 

36from typing import Optional, TYPE_CHECKING, Union 

37import sqlalchemy.exc 

38import subprocess 

39import traceback 

40 

41if TYPE_CHECKING: 41 ↛ 42line 41 didn't jump to line 42, because the condition on line 41 was never true

42 import daklib.packagelist 

43 

44 

45class ArchiveException(Exception): 

46 pass 

47 

48 

49class HashMismatchException(ArchiveException): 

50 pass 

51 

52 

53class ArchiveTransaction: 

54 """manipulate the archive in a transaction 

55 """ 

56 

57 def __init__(self): 

58 self.fs = FilesystemTransaction() 

59 self.session = DBConn().session() 

60 

61 def get_file(self, hashed_file: daklib.upload.HashedFile, source_name: str, check_hashes: bool = True) -> PoolFile: 

62 """Look for file `hashed_file` in database 

63 

64 :param hashed_file: file to look for in the database 

65 :param source_name: source package name 

66 :param check_hashes: check size and hashes match 

67 :return: database entry for the file 

68 :raises KeyError: file was not found in the database 

69 :raises HashMismatchException: hash mismatch 

70 """ 

71 poolname = os.path.join(daklib.utils.poolify(source_name), hashed_file.filename) 

72 try: 

73 poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one() 

74 if check_hashes and (poolfile.filesize != hashed_file.size 74 ↛ 78line 74 didn't jump to line 78, because the condition on line 74 was never true

75 or poolfile.md5sum != hashed_file.md5sum 

76 or poolfile.sha1sum != hashed_file.sha1sum 

77 or poolfile.sha256sum != hashed_file.sha256sum): 

78 raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename)) 

79 return poolfile 

80 except NoResultFound: 

81 raise KeyError('{0} not found in database.'.format(poolname)) 

82 

83 def _install_file(self, directory, hashed_file, archive, component, source_name) -> PoolFile: 

84 """Install a file 

85 

86 Will not give an error when the file is already present. 

87 

88 :return: database object for the new file 

89 """ 

90 session = self.session 

91 

92 poolname = os.path.join(daklib.utils.poolify(source_name), hashed_file.filename) 

93 try: 

94 poolfile = self.get_file(hashed_file, source_name) 

95 except KeyError: 

96 poolfile = PoolFile(filename=poolname, filesize=hashed_file.size) 

97 poolfile.md5sum = hashed_file.md5sum 

98 poolfile.sha1sum = hashed_file.sha1sum 

99 poolfile.sha256sum = hashed_file.sha256sum 

100 session.add(poolfile) 

101 session.flush() 

102 

103 try: 

104 session.query(ArchiveFile).filter_by(archive=archive, component=component, file=poolfile).one() 

105 except NoResultFound: 

106 archive_file = ArchiveFile(archive, component, poolfile) 

107 session.add(archive_file) 

108 session.flush() 

109 

110 path = os.path.join(archive.path, 'pool', component.component_name, poolname) 

111 hashed_file_path = os.path.join(directory, hashed_file.input_filename) 

112 self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode) 

113 

114 return poolfile 

115 

116 def install_binary(self, directory: str, binary: daklib.upload.Binary, suite: Suite, component: Component, allow_tainted: bool = False, fingerprint: Optional[Fingerprint] = None, source_suites=None, extra_source_archives: Optional[Iterable[Archive]] = None) -> DBBinary: 

117 """Install a binary package 

118 

119 :param directory: directory the binary package is located in 

120 :param binary: binary package to install 

121 :param suite: target suite 

122 :param component: target component 

123 :param allow_tainted: allow to copy additional files from tainted archives 

124 :param fingerprint: optional fingerprint 

125 :param source_suites: suites to copy the source from if they are not 

126 in `suite` or :const:`True` to allow copying from any 

127 suite. 

128 Can be a SQLAlchemy subquery for :class:`Suite` or :const:`True`. 

129 :param extra_source_archives: extra archives to copy Built-Using sources from 

130 :return: database object for the new package 

131 """ 

132 session = self.session 

133 control = binary.control 

134 maintainer = get_or_set_maintainer(control['Maintainer'], session) 

135 architecture = get_architecture(control['Architecture'], session) 

136 

137 (source_name, source_version) = binary.source 

138 source_query = session.query(DBSource).filter_by(source=source_name, version=source_version) 

139 source = source_query.filter(DBSource.suites.contains(suite)).first() 

140 if source is None: 

141 if source_suites is not True: 

142 source_query = source_query.join(DBSource.suites) \ 

143 .filter(Suite.suite_id == source_suites.c.id) 

144 source = source_query.first() 

145 if source is None: 145 ↛ 146line 145 didn't jump to line 146, because the condition on line 145 was never true

146 raise ArchiveException('{0}: trying to install to {1}, but could not find source ({2} {3})'. 

147 format(binary.hashed_file.filename, suite.suite_name, source_name, source_version)) 

148 self.copy_source(source, suite, source.poolfile.component) 

149 

150 db_file = self._install_file(directory, binary.hashed_file, suite.archive, component, source_name) 

151 

152 unique = dict( 

153 package=control['Package'], 

154 version=control['Version'], 

155 architecture=architecture, 

156 ) 

157 rest = dict( 

158 source=source, 

159 maintainer=maintainer, 

160 poolfile=db_file, 

161 binarytype=binary.type, 

162 ) 

163 # Other attributes that are ignored for purposes of equality with 

164 # an existing source 

165 rest2 = dict( 

166 fingerprint=fingerprint, 

167 ) 

168 

169 try: 

170 db_binary = session.query(DBBinary).filter_by(**unique).one() 

171 for key, value in rest.items(): 

172 if getattr(db_binary, key) != value: 172 ↛ 173line 172 didn't jump to line 173, because the condition on line 172 was never true

173 raise ArchiveException('{0}: Does not match binary in database.'.format(binary.hashed_file.filename)) 

174 except NoResultFound: 

175 db_binary = DBBinary(**unique) 

176 for key, value in rest.items(): 

177 setattr(db_binary, key, value) 

178 for key, value in rest2.items(): 

179 setattr(db_binary, key, value) 

180 session.add(db_binary) 

181 session.flush() 

182 import_metadata_into_db(db_binary, session) 

183 

184 self._add_built_using(db_binary, binary.hashed_file.filename, control, suite, extra_archives=extra_source_archives) 

185 

186 if suite not in db_binary.suites: 

187 db_binary.suites.append(suite) 

188 

189 session.flush() 

190 

191 return db_binary 

192 

193 def _ensure_extra_source_exists(self, filename: str, source: DBSource, archive: Archive, extra_archives: Optional[Iterable[Archive]] = None): 

194 """ensure source exists in the given archive 

195 

196 This is intended to be used to check that Built-Using sources exist. 

197 

198 :param filename: filename to use in error messages 

199 :param source: source to look for 

200 :param archive: archive to look in 

201 :param extra_archives: list of archives to copy the source package from 

202 if it is not yet present in `archive` 

203 """ 

204 session = self.session 

205 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first() 

206 if db_file is not None: 206 ↛ 210line 206 didn't jump to line 210, because the condition on line 206 was never false

207 return True 

208 

209 # Try to copy file from one extra archive 

210 if extra_archives is None: 

211 extra_archives = [] 

212 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile).filter(ArchiveFile.archive_id.in_([a.archive_id for a in extra_archives])).first() 

213 if db_file is None: 

214 raise ArchiveException('{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.'.format(filename, source.source, source.version, archive.archive_name)) 

215 

216 source_archive = db_file.archive 

217 for dsc_file in source.srcfiles: 

218 af = session.query(ArchiveFile).filter_by(file=dsc_file.poolfile, archive=source_archive, component=db_file.component).one() 

219 # We were given an explicit list of archives so it is okay to copy from tainted archives. 

220 self._copy_file(af.file, archive, db_file.component, allow_tainted=True) 

221 

222 def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None) -> None: 

223 """Add Built-Using sources to ``db_binary.extra_sources`` 

224 """ 

225 session = self.session 

226 

227 for bu_source_name, bu_source_version in daklib.utils.parse_built_using(control): 

228 bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first() 

229 if bu_source is None: 229 ↛ 230line 229 didn't jump to line 230, because the condition on line 229 was never true

230 raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version)) 

231 

232 self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives) 

233 

234 db_binary.extra_sources.append(bu_source) 

235 

236 def install_source_to_archive(self, directory, source, archive, component, changed_by, allow_tainted=False, fingerprint=None) -> DBSource: 

237 """Install source package to archive""" 

238 session = self.session 

239 control = source.dsc 

240 maintainer = get_or_set_maintainer(control['Maintainer'], session) 

241 source_name = control['Source'] 

242 

243 ### Add source package to database 

244 

245 # We need to install the .dsc first as the DBSource object refers to it. 

246 db_file_dsc = self._install_file(directory, source._dsc_file, archive, component, source_name) 

247 

248 unique = dict( 

249 source=source_name, 

250 version=control['Version'], 

251 ) 

252 rest = dict( 

253 maintainer=maintainer, 

254 poolfile=db_file_dsc, 

255 dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'), 

256 ) 

257 # Other attributes that are ignored for purposes of equality with 

258 # an existing source 

259 rest2 = dict( 

260 changedby=changed_by, 

261 fingerprint=fingerprint, 

262 ) 

263 

264 created = False 

265 try: 

266 db_source = session.query(DBSource).filter_by(**unique).one() 

267 for key, value in rest.items(): 

268 if getattr(db_source, key) != value: 268 ↛ 269line 268 didn't jump to line 269, because the condition on line 268 was never true

269 raise ArchiveException('{0}: Does not match source in database.'.format(source._dsc_file.filename)) 

270 except NoResultFound: 

271 created = True 

272 db_source = DBSource(**unique) 

273 for key, value in rest.items(): 

274 setattr(db_source, key, value) 

275 for key, value in rest2.items(): 

276 setattr(db_source, key, value) 

277 session.add(db_source) 

278 session.flush() 

279 

280 # Add .dsc file. Other files will be added later. 

281 db_dsc_file = DSCFile() 

282 db_dsc_file.source = db_source 

283 db_dsc_file.poolfile = db_file_dsc 

284 session.add(db_dsc_file) 

285 session.flush() 

286 

287 if not created: 

288 for f in db_source.srcfiles: 

289 self._copy_file(f.poolfile, archive, component, allow_tainted=allow_tainted) 

290 return db_source 

291 

292 ### Now add remaining files and copy them to the archive. 

293 

294 for hashed_file in source.files.values(): 

295 hashed_file_path = os.path.join(directory, hashed_file.input_filename) 

296 if os.path.exists(hashed_file_path): 296 ↛ 300line 296 didn't jump to line 300, because the condition on line 296 was never false

297 db_file = self._install_file(directory, hashed_file, archive, component, source_name) 

298 session.add(db_file) 

299 else: 

300 db_file = self.get_file(hashed_file, source_name) 

301 self._copy_file(db_file, archive, component, allow_tainted=allow_tainted) 

302 

303 db_dsc_file = DSCFile() 

304 db_dsc_file.source = db_source 

305 db_dsc_file.poolfile = db_file 

306 session.add(db_dsc_file) 

307 

308 session.flush() 

309 

310 # Importing is safe as we only arrive here when we did not find the source already installed earlier. 

311 import_metadata_into_db(db_source, session) 

312 

313 # Uploaders are the maintainer and co-maintainers from the Uploaders field 

314 db_source.uploaders.append(maintainer) 

315 if 'Uploaders' in control: 

316 from daklib.textutils import split_uploaders 

317 for u in split_uploaders(control['Uploaders']): 

318 db_source.uploaders.append(get_or_set_maintainer(u, session)) 

319 session.flush() 

320 

321 return db_source 

322 

323 def install_source(self, directory: str, source: daklib.upload.Source, suite: Suite, component: Component, changed_by: Maintainer, allow_tainted: bool = False, fingerprint: Optional[Fingerprint] = None) -> DBSource: 

324 """Install a source package 

325 

326 :param directory: directory the source package is located in 

327 :param source: source package to install 

328 :param suite: target suite 

329 :param component: target component 

330 :param changed_by: person who prepared this version of the package 

331 :param allow_tainted: allow to copy additional files from tainted archives 

332 :param fingerprint: optional fingerprint 

333 :return: database object for the new source 

334 """ 

335 db_source = self.install_source_to_archive(directory, source, suite.archive, component, changed_by, allow_tainted, fingerprint) 

336 

337 if suite in db_source.suites: 

338 return db_source 

339 db_source.suites.append(suite) 

340 self.session.flush() 

341 

342 return db_source 

343 

344 def _copy_file(self, db_file: PoolFile, archive: Archive, component: Component, allow_tainted: bool = False) -> None: 

345 """Copy a file to the given archive and component 

346 

347 :param db_file: file to copy 

348 :param archive: target archive 

349 :param component: target component 

350 :param allow_tainted: allow to copy from tainted archives (such as NEW) 

351 """ 

352 session = self.session 

353 

354 if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None: 

355 query = session.query(ArchiveFile).filter_by(file=db_file) 

356 if not allow_tainted: 

357 query = query.join(Archive).filter(Archive.tainted == False) # noqa:E712 

358 

359 source_af = query.first() 

360 if source_af is None: 360 ↛ 361line 360 didn't jump to line 361, because the condition on line 360 was never true

361 raise ArchiveException('cp: Could not find {0} in any archive.'.format(db_file.filename)) 

362 target_af = ArchiveFile(archive, component, db_file) 

363 session.add(target_af) 

364 session.flush() 

365 self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode) 

366 

367 def copy_binary(self, db_binary: DBBinary, suite: Suite, component: Component, allow_tainted: bool = False, extra_archives: Optional[Iterable[Archive]] = None) -> None: 

368 """Copy a binary package to the given suite and component 

369 

370 :param db_binary: binary to copy 

371 :param suite: target suite 

372 :param component: target component 

373 :param allow_tainted: allow to copy from tainted archives (such as NEW) 

374 :param extra_archives: extra archives to copy Built-Using sources from 

375 """ 

376 session = self.session 

377 archive = suite.archive 

378 if archive.tainted: 

379 allow_tainted = True 

380 

381 filename = db_binary.poolfile.filename 

382 

383 # make sure source is present in target archive 

384 db_source = db_binary.source 

385 if session.query(ArchiveFile).filter_by(archive=archive, file=db_source.poolfile).first() is None: 385 ↛ 386line 385 didn't jump to line 386, because the condition on line 385 was never true

386 raise ArchiveException('{0}: cannot copy to {1}: source is not present in target archive'.format(filename, suite.suite_name)) 

387 

388 # make sure built-using packages are present in target archive 

389 for db_source in db_binary.extra_sources: 

390 self._ensure_extra_source_exists(filename, db_source, archive, extra_archives=extra_archives) 

391 

392 # copy binary 

393 db_file = db_binary.poolfile 

394 self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted) 

395 if suite not in db_binary.suites: 

396 db_binary.suites.append(suite) 

397 self.session.flush() 

398 

399 def copy_source(self, db_source: DBSource, suite: Suite, component: Component, allow_tainted: bool = False) -> None: 

400 """Copy a source package to the given suite and component 

401 

402 :param db_source: source to copy 

403 :param suite: target suite 

404 :param component: target component 

405 :param allow_tainted: allow to copy from tainted archives (such as NEW) 

406 """ 

407 archive = suite.archive 

408 if archive.tainted: 

409 allow_tainted = True 

410 for db_dsc_file in db_source.srcfiles: 

411 self._copy_file(db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted) 

412 if suite not in db_source.suites: 

413 db_source.suites.append(suite) 

414 self.session.flush() 

415 

416 def remove_file(self, db_file: PoolFile, archive: Archive, component: Component) -> None: 

417 """Remove a file from a given archive and component 

418 

419 :param db_file: file to remove 

420 :param archive: archive to remove the file from 

421 :param component: component to remove the file from 

422 """ 

423 af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component) 

424 self.fs.unlink(af.path) 

425 self.session.delete(af) 

426 

427 def remove_binary(self, binary: DBBinary, suite: Suite) -> None: 

428 """Remove a binary from a given suite and component 

429 

430 :param binary: binary to remove 

431 :param suite: suite to remove the package from 

432 """ 

433 binary.suites.remove(suite) 

434 self.session.flush() 

435 

436 def remove_source(self, source: DBSource, suite: Suite) -> None: 

437 """Remove a source from a given suite and component 

438 

439 :param source: source to remove 

440 :param suite: suite to remove the package from 

441 

442 :raises ArchiveException: source package is still referenced by other 

443 binaries in the suite 

444 """ 

445 session = self.session 

446 

447 query = session.query(DBBinary).filter_by(source=source) \ 

448 .filter(DBBinary.suites.contains(suite)) 

449 if query.first() is not None: 449 ↛ 450line 449 didn't jump to line 450, because the condition on line 449 was never true

450 raise ArchiveException('src:{0} is still used by binaries in suite {1}'.format(source.source, suite.suite_name)) 

451 

452 source.suites.remove(suite) 

453 session.flush() 

454 

455 def commit(self) -> None: 

456 """commit changes""" 

457 try: 

458 self.session.commit() 

459 self.fs.commit() 

460 finally: 

461 self.session.rollback() 

462 self.fs.rollback() 

463 

464 def rollback(self) -> None: 

465 """rollback changes""" 

466 self.session.rollback() 

467 self.fs.rollback() 

468 

469 def flush(self) -> None: 

470 """flush underlying database session""" 

471 self.session.flush() 

472 

473 def __enter__(self): 

474 return self 

475 

476 def __exit__(self, type, value, traceback): 

477 if type is None: 

478 self.commit() 

479 else: 

480 self.rollback() 

481 return None 

482 

483 

484def source_component_from_package_list(package_list: 'daklib.packagelist.PackageList', suite: Suite) -> Optional[Component]: 

485 """Get component for a source package 

486 

487 This function will look at the Package-List field to determine the 

488 component the source package belongs to. This is the first component 

489 the source package provides binaries for (first with respect to the 

490 ordering of components). 

491 

492 It the source package has no Package-List field, None is returned. 

493 

494 :param package_list: package list of the source to get the override for 

495 :param suite: suite to consider for binaries produced 

496 :return: component for the given source or :const:`None` 

497 """ 

498 if package_list.fallback: 498 ↛ 499line 498 didn't jump to line 499, because the condition on line 498 was never true

499 return None 

500 session = object_session(suite) 

501 packages = package_list.packages_for_suite(suite) 

502 components = set(p.component for p in packages) 

503 query = session.query(Component).order_by(Component.ordering) \ 

504 .filter(Component.component_name.in_(components)) 

505 return query.first() 

506 

507 

508class ArchiveUpload: 

509 """handle an upload 

510 

511 This class can be used in a with-statement:: 

512 

513 with ArchiveUpload(...) as upload: 

514 ... 

515 

516 Doing so will automatically run any required cleanup and also rollback the 

517 transaction if it was not committed. 

518 """ 

519 

520 def __init__(self, directory: str, changes, keyrings): 

521 self.transaction: ArchiveTransaction = ArchiveTransaction() 

522 """transaction used to handle the upload""" 

523 

524 self.session = self.transaction.session 

525 """database session""" 

526 

527 self.original_directory: str = directory 

528 self.original_changes = changes 

529 

530 self.changes: Optional[daklib.upload.Changes] = None 

531 """upload to process""" 

532 

533 self.directory: str = None 

534 """directory with temporary copy of files. set by :meth:`prepare`""" 

535 

536 self.keyrings = keyrings 

537 

538 self.fingerprint: Fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one() 

539 """fingerprint of the key used to sign the upload""" 

540 

541 self.reject_reasons: list[str] = [] 

542 """reasons why the upload cannot by accepted""" 

543 

544 self.warnings: list[str] = [] 

545 """warnings 

546 

547 .. note:: 

548 

549 Not used yet. 

550 """ 

551 

552 self.final_suites = None 

553 

554 self.new: bool = False 

555 """upload is NEW. set by :meth:`check`""" 

556 

557 self._checked: bool = False 

558 """checks passes. set by :meth:`check`""" 

559 

560 self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one() 

561 self._new = self._new_queue.suite 

562 

563 def warn(self, message: str) -> None: 

564 """add a warning message 

565 

566 Adds a warning message that can later be seen in :attr:`warnings` 

567 

568 :param message: warning message 

569 """ 

570 self.warnings.append(message) 

571 

572 def prepare(self): 

573 """prepare upload for further processing 

574 

575 This copies the files involved to a temporary directory. If you use 

576 this method directly, you have to remove the directory given by the 

577 :attr:`directory` attribute later on your own. 

578 

579 Instead of using the method directly, you can also use a with-statement:: 

580 

581 with ArchiveUpload(...) as upload: 

582 ... 

583 

584 This will automatically handle any required cleanup. 

585 """ 

586 assert self.directory is None 

587 assert self.original_changes.valid_signature 

588 

589 cnf = Config() 

590 session = self.transaction.session 

591 

592 group = cnf.get('Dinstall::UnprivGroup') or None 

593 self.directory = daklib.utils.temp_dirname(parent=cnf.get('Dir::TempPath'), 

594 mode=0o2750, group=group) 

595 with FilesystemTransaction() as fs: 

596 src = os.path.join(self.original_directory, self.original_changes.filename) 

597 dst = os.path.join(self.directory, self.original_changes.filename) 

598 fs.copy(src, dst, mode=0o640) 

599 

600 self.changes = daklib.upload.Changes(self.directory, self.original_changes.filename, self.keyrings) 

601 

602 files = {} 

603 try: 

604 files = self.changes.files 

605 except daklib.upload.InvalidChangesException: 

606 # Do not raise an exception; upload will be rejected later 

607 # due to the missing files 

608 pass 

609 

610 for f in files.values(): 

611 src = os.path.join(self.original_directory, f.filename) 

612 dst = os.path.join(self.directory, f.filename) 

613 if not os.path.exists(src): 

614 continue 

615 fs.copy(src, dst, mode=0o640) 

616 

617 source = None 

618 try: 

619 source = self.changes.source 

620 except Exception: 

621 # Do not raise an exception here if the .dsc is invalid. 

622 pass 

623 

624 if source is not None: 

625 for f in source.files.values(): 

626 src = os.path.join(self.original_directory, f.filename) 

627 dst = os.path.join(self.directory, f.filename) 

628 if not os.path.exists(dst): 

629 try: 

630 db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False) 

631 db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first() 

632 fs.copy(db_archive_file.path, dst, mode=0o640) 

633 except KeyError: 

634 # Ignore if get_file could not find it. Upload will 

635 # probably be rejected later. 

636 pass 

637 

638 def unpacked_source(self) -> Optional[str]: 

639 """Path to unpacked source 

640 

641 Get path to the unpacked source. This method does unpack the source 

642 into a temporary directory under :attr:`directory` if it has not 

643 been done so already. 

644 

645 :return: string giving the path to the unpacked source directory 

646 or :const:`None` if no source was included in the upload. 

647 """ 

648 assert self.directory is not None 

649 

650 source = self.changes.source 

651 if source is None: 

652 return None 

653 dsc_path = os.path.join(self.directory, source._dsc_file.filename) 

654 

655 sourcedir = os.path.join(self.directory, 'source') 

656 if not os.path.exists(sourcedir): 

657 subprocess.check_call(["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], shell=False, stdout=subprocess.DEVNULL) 

658 if not os.path.isdir(sourcedir): 

659 raise Exception("{0} is not a directory after extracting source package".format(sourcedir)) 

660 return sourcedir 

661 

662 def _map_suite(self, suite_name): 

663 suite_names = set((suite_name, )) 

664 for rule in Config().value_list("SuiteMappings"): 

665 fields = rule.split() 

666 rtype = fields[0] 

667 if rtype == "map" or rtype == "silent-map": 667 ↛ 668line 667 didn't jump to line 668, because the condition on line 667 was never true

668 (src, dst) = fields[1:3] 

669 if src in suite_names: 

670 suite_names.remove(src) 

671 suite_names.add(dst) 

672 if rtype != "silent-map": 

673 self.warnings.append('Mapping {0} to {1}.'.format(src, dst)) 

674 elif rtype == "copy" or rtype == "silent-copy": 674 ↛ 675line 674 didn't jump to line 675, because the condition on line 674 was never true

675 (src, dst) = fields[1:3] 

676 if src in suite_names: 

677 suite_names.add(dst) 

678 if rtype != "silent-copy": 

679 self.warnings.append('Copy {0} to {1}.'.format(src, dst)) 

680 elif rtype == "ignore": 680 ↛ 681line 680 didn't jump to line 681, because the condition on line 680 was never true

681 ignored = fields[1] 

682 if ignored in suite_names: 

683 suite_names.remove(ignored) 

684 self.warnings.append('Ignoring target suite {0}.'.format(ignored)) 

685 elif rtype == "reject": 685 ↛ 686line 685 didn't jump to line 686, because the condition on line 685 was never true

686 rejected = fields[1] 

687 if rejected in suite_names: 

688 raise checks.Reject('Uploads to {0} are not accepted.'.format(rejected)) 

689 ## XXX: propup-version and map-unreleased not yet implemented 

690 return suite_names 

691 

692 def _mapped_suites(self) -> list[Suite]: 

693 """Get target suites after mappings 

694 

695 :return: list giving the mapped target suites of this upload 

696 """ 

697 session = self.session 

698 

699 suite_names = set() 

700 for dist in self.changes.distributions: 

701 suite_names.update(self._map_suite(dist)) 

702 

703 suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names)) 

704 return suites.all() 

705 

706 def _check_new_binary_overrides(self, suite, overridesuite): 

707 new = False 

708 source = self.changes.source 

709 

710 # Check binaries listed in the source package's Package-List field: 

711 if source is not None and not source.package_list.fallback: 

712 packages = source.package_list.packages_for_suite(suite) 

713 binaries = [entry for entry in packages] 

714 for b in binaries: 

715 override = self._binary_override(overridesuite, b) 

716 if override is None: 

717 self.warnings.append('binary:{0} is NEW.'.format(b.name)) 

718 new = True 

719 

720 # Check all uploaded packages. 

721 # This is necessary to account for packages without a Package-List 

722 # field, really late binary-only uploads (where an unused override 

723 # was already removed), and for debug packages uploaded to a suite 

724 # without a debug suite (which are then considered as NEW). 

725 binaries = self.changes.binaries 

726 for b in binaries: 

727 if daklib.utils.is_in_debug_section(b.control) and suite.debug_suite is not None: 

728 continue 

729 override = self._binary_override(overridesuite, b) 

730 if override is None: 

731 self.warnings.append('binary:{0} is NEW.'.format(b.name)) 

732 new = True 

733 

734 return new 

735 

736 def _check_new(self, suite, overridesuite) -> bool: 

737 """Check if upload is NEW 

738 

739 An upload is NEW if it has binary or source packages that do not have 

740 an override in `overridesuite` OR if it references files ONLY in a 

741 tainted archive (eg. when it references files in NEW). 

742 

743 Debug packages (*-dbgsym in Section: debug) are not considered as NEW 

744 if `suite` has a separate debug suite. 

745 

746 :return: :const:`True` if the upload is NEW, :const:`False` otherwise 

747 """ 

748 session = self.session 

749 new = False 

750 

751 # Check for missing overrides 

752 if self._check_new_binary_overrides(suite, overridesuite): 

753 new = True 

754 if self.changes.source is not None: 

755 override = self._source_override(overridesuite, self.changes.source) 

756 if override is None: 

757 self.warnings.append('source:{0} is NEW.'.format(self.changes.source.dsc['Source'])) 

758 new = True 

759 

760 # Check if we reference a file only in a tainted archive 

761 files = list(self.changes.files.values()) 

762 if self.changes.source is not None: 

763 files.extend(self.changes.source.files.values()) 

764 for f in files: 

765 query = session.query(ArchiveFile).join(PoolFile).filter(PoolFile.sha1sum == f.sha1sum) 

766 query_untainted = query.join(Archive).filter(Archive.tainted == False) # noqa:E712 

767 

768 in_archive = (query.first() is not None) 

769 in_untainted_archive = (query_untainted.first() is not None) 

770 

771 if in_archive and not in_untainted_archive: 

772 self.warnings.append('{0} is only available in NEW.'.format(f.filename)) 

773 new = True 

774 

775 return new 

776 

777 def _final_suites(self): 

778 session = self.session 

779 

780 mapped_suites = self._mapped_suites() 

781 final_suites = list() 

782 

783 for suite in mapped_suites: 

784 overridesuite = suite 

785 if suite.overridesuite is not None: 

786 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one() 

787 if self._check_new(suite, overridesuite): 

788 self.new = True 

789 if suite not in final_suites: 789 ↛ 783line 789 didn't jump to line 783, because the condition on line 789 was never false

790 final_suites.append(suite) 

791 

792 return final_suites 

793 

794 def _binary_override(self, suite: Suite, binary: 'Union[daklib.upload.Binary, daklib.packagelist.PackageListEntry]') -> Optional[Override]: 

795 """Get override entry for a binary 

796 

797 :param suite: suite to get override for 

798 :param binary: binary to get override for 

799 :return: override for the given binary or :const:`None` 

800 """ 

801 if suite.overridesuite is not None: 

802 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one() 

803 

804 mapped_component = get_mapped_component(binary.component) 

805 if mapped_component is None: 805 ↛ 806line 805 didn't jump to line 806, because the condition on line 805 was never true

806 return None 

807 

808 query = self.session.query(Override).filter_by(suite=suite, package=binary.name) \ 

809 .join(Component).filter(Component.component_name == mapped_component.component_name) \ 

810 .join(OverrideType).filter(OverrideType.overridetype == binary.type) 

811 

812 return query.one_or_none() 

813 

814 def _source_override(self, suite: Suite, source: daklib.upload.Source) -> Optional[Override]: 

815 """Get override entry for a source 

816 

817 :param suite: suite to get override for 

818 :param source: source to get override for 

819 :return: override for the given source or :const:`None` 

820 """ 

821 if suite.overridesuite is not None: 821 ↛ 822line 821 didn't jump to line 822, because the condition on line 821 was never true

822 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one() 

823 

824 query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \ 

825 .join(OverrideType).filter(OverrideType.overridetype == 'dsc') 

826 

827 component = source_component_from_package_list(source.package_list, suite) 

828 if component is not None: 

829 query = query.filter(Override.component == component) 

830 

831 return query.one_or_none() 

832 

833 def _binary_component(self, suite: Suite, binary: daklib.upload.Binary, only_overrides: bool = True) -> Optional[Component]: 

834 """get component for a binary 

835 

836 By default this will only look at overrides to get the right component; 

837 if `only_overrides` is :const:`False` this method will also look at the 

838 Section field. 

839 

840 :param only_overrides: only use overrides to get the right component 

841 """ 

842 override = self._binary_override(suite, binary) 

843 if override is not None: 

844 return override.component 

845 if only_overrides: 845 ↛ 846line 845 didn't jump to line 846, because the condition on line 845 was never true

846 return None 

847 return get_mapped_component(binary.component, self.session) 

848 

849 def _source_component(self, suite: Suite, source: daklib.upload.Binary, only_overrides: bool = True) -> Optional[Component]: 

850 """get component for a source 

851 

852 By default this will only look at overrides to get the right component; 

853 if `only_overrides` is :const:`False` this method will also look at the 

854 Section field. 

855 

856 :param only_overrides: only use overrides to get the right component 

857 """ 

858 override = self._source_override(suite, source) 

859 if override is not None: 859 ↛ 861line 859 didn't jump to line 861, because the condition on line 859 was never false

860 return override.component 

861 if only_overrides: 

862 return None 

863 return get_mapped_component(source.component, self.session) 

864 

865 def check(self, force: bool = False) -> bool: 

866 """run checks against the upload 

867 

868 :param force: ignore failing forcable checks 

869 :return: :const:`True` if all checks passed, :const:`False` otherwise 

870 """ 

871 # XXX: needs to be better structured. 

872 assert self.changes.valid_signature 

873 

874 try: 

875 # Validate signatures and hashes before we do any real work: 

876 for chk in ( 

877 checks.SignatureAndHashesCheck, 

878 checks.WeakSignatureCheck, 

879 checks.SignatureTimestampCheck, 

880 checks.ChangesCheck, 

881 checks.ExternalHashesCheck, 

882 checks.SourceCheck, 

883 checks.BinaryCheck, 

884 checks.BinaryMembersCheck, 

885 checks.BinaryTimestampCheck, 

886 checks.SingleDistributionCheck, 

887 checks.ArchAllBinNMUCheck, 

888 ): 

889 chk().check(self) 

890 

891 final_suites = self._final_suites() 

892 if len(final_suites) == 0: 

893 self.reject_reasons.append('No target suite found. Please check your target distribution and that you uploaded to the right archive.') 

894 return False 

895 

896 self.final_suites = final_suites 

897 

898 for chk in ( 

899 checks.TransitionCheck, 

900 checks.ACLCheck, 

901 checks.NewOverrideCheck, 

902 checks.NoSourceOnlyCheck, 

903 checks.LintianCheck, 

904 ): 

905 chk().check(self) 

906 

907 for chk in ( 

908 checks.SuiteCheck, 

909 checks.ACLCheck, 

910 checks.SourceFormatCheck, 

911 checks.SuiteArchitectureCheck, 

912 checks.VersionCheck, 

913 ): 

914 for suite in final_suites: 

915 chk().per_suite_check(self, suite) 

916 

917 if len(self.reject_reasons) != 0: 917 ↛ 918line 917 didn't jump to line 918, because the condition on line 917 was never true

918 return False 

919 

920 self._checked = True 

921 return True 

922 except checks.Reject as e: 922 ↛ 924line 922 didn't jump to line 924

923 self.reject_reasons.append(str(e)) 

924 except Exception as e: 

925 self.reject_reasons.append("Processing raised an exception: {0}.\n{1}".format(e, traceback.format_exc())) 

926 return False 

927 

928 def _install_to_suite( 

929 self, 

930 target_suite: Suite, 

931 suite: Suite, 

932 source_component_func: Callable[[daklib.upload.Source], Component], 

933 binary_component_func: Callable[[daklib.upload.Binary], Component], 

934 source_suites=None, 

935 extra_source_archives: Optional[Iterable[Archive]] = None, 

936 policy_upload: bool = False 

937 ) -> tuple[Optional[DBSource], list[DBBinary]]: 

938 """Install upload to the given suite 

939 

940 :param target_suite: target suite (before redirection to policy queue or NEW) 

941 :param suite: suite to install the package into. This is the real suite, 

942 ie. after any redirection to NEW or a policy queue 

943 :param source_component_func: function to get the :class:`daklib.dbconn.Component` 

944 for a :class:`daklib.upload.Source` object 

945 :param binary_component_func: function to get the :class:`daklib.dbconn.Component` 

946 for a :class:`daklib.upload.Binary` object 

947 :param source_suites: see :meth:`daklib.archive.ArchiveTransaction.install_binary` 

948 :param extra_source_archives: see :meth:`daklib.archive.ArchiveTransaction.install_binary` 

949 :param policy_upload: Boolean indicating upload to policy queue (including NEW) 

950 :return: tuple with two elements. The first is a :class:`daklib.dbconn.DBSource` 

951 object for the install source or :const:`None` if no source was 

952 included. The second is a list of :class:`daklib.dbconn.DBBinary` 

953 objects for the installed binary packages. 

954 """ 

955 # XXX: move this function to ArchiveTransaction? 

956 

957 control = self.changes.changes 

958 changed_by = get_or_set_maintainer(control.get('Changed-By', control['Maintainer']), self.session) 

959 

960 if source_suites is None: 960 ↛ 961line 960 didn't jump to line 961, because the condition on line 960 was never true

961 source_suites = self.session.query(Suite).join((VersionCheck, VersionCheck.reference_id == Suite.suite_id)).filter(VersionCheck.check == 'Enhances').filter(VersionCheck.suite == suite).subquery() 

962 

963 source = self.changes.source 

964 if source is not None: 

965 component = source_component_func(source) 

966 db_source = self.transaction.install_source( 

967 self.directory, 

968 source, 

969 suite, 

970 component, 

971 changed_by, 

972 fingerprint=self.fingerprint 

973 ) 

974 else: 

975 db_source = None 

976 

977 db_binaries = [] 

978 for binary in sorted(self.changes.binaries, key=lambda x: x.name): 

979 copy_to_suite = suite 

980 if daklib.utils.is_in_debug_section(binary.control) and suite.debug_suite is not None: 

981 copy_to_suite = suite.debug_suite 

982 

983 component = binary_component_func(binary) 

984 db_binary = self.transaction.install_binary( 

985 self.directory, 

986 binary, 

987 copy_to_suite, 

988 component, 

989 fingerprint=self.fingerprint, 

990 source_suites=source_suites, 

991 extra_source_archives=extra_source_archives 

992 ) 

993 db_binaries.append(db_binary) 

994 

995 if not policy_upload: 

996 check_upload_for_external_signature_request(self.session, target_suite, copy_to_suite, db_binary) 

997 

998 if suite.copychanges: 998 ↛ 999line 998 didn't jump to line 999, because the condition on line 998 was never true

999 src = os.path.join(self.directory, self.changes.filename) 

1000 dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename) 

1001 self.transaction.fs.copy(src, dst, mode=suite.archive.mode) 

1002 

1003 suite.update_last_changed() 

1004 

1005 return (db_source, db_binaries) 

1006 

1007 def _install_changes(self) -> DBChange: 

1008 assert self.changes.valid_signature 

1009 control = self.changes.changes 

1010 session = self.transaction.session 

1011 config = Config() 

1012 

1013 changelog_id = None 

1014 # Only add changelog for sourceful uploads and binNMUs 

1015 if self.changes.sourceful or re_bin_only_nmu.search(control['Version']): 

1016 query = 'INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id' 

1017 changelog_id = session.execute(query, {'changelog': control['Changes']}).scalar() 

1018 assert changelog_id is not None 

1019 

1020 db_changes = DBChange() 

1021 db_changes.changesname = self.changes.filename 

1022 db_changes.source = control['Source'] 

1023 db_changes.binaries = control.get('Binary', None) 

1024 db_changes.architecture = control['Architecture'] 

1025 db_changes.version = control['Version'] 

1026 db_changes.distribution = control['Distribution'] 

1027 db_changes.urgency = control['Urgency'] 

1028 db_changes.maintainer = control['Maintainer'] 

1029 db_changes.changedby = control.get('Changed-By', control['Maintainer']) 

1030 db_changes.date = control['Date'] 

1031 db_changes.fingerprint = self.fingerprint.fingerprint 

1032 db_changes.changelog_id = changelog_id 

1033 db_changes.closes = self.changes.closed_bugs 

1034 

1035 try: 

1036 self.transaction.session.add(db_changes) 

1037 self.transaction.session.flush() 

1038 except sqlalchemy.exc.IntegrityError: 

1039 raise ArchiveException('{0} is already known.'.format(self.changes.filename)) 

1040 

1041 return db_changes 

1042 

1043 def _install_policy(self, policy_queue, target_suite, db_changes, db_source, db_binaries) -> PolicyQueueUpload: 

1044 """install upload to policy queue""" 

1045 u = PolicyQueueUpload() 

1046 u.policy_queue = policy_queue 

1047 u.target_suite = target_suite 

1048 u.changes = db_changes 

1049 u.source = db_source 

1050 u.binaries = db_binaries 

1051 self.transaction.session.add(u) 

1052 self.transaction.session.flush() 

1053 

1054 queue_files = [self.changes.filename] 

1055 queue_files.extend(f.filename for f in self.changes.buildinfo_files) 

1056 for fn in queue_files: 

1057 src = os.path.join(self.changes.directory, fn) 

1058 dst = os.path.join(policy_queue.path, fn) 

1059 self.transaction.fs.copy(src, dst, mode=policy_queue.change_perms) 

1060 

1061 return u 

1062 

1063 def try_autobyhand(self) -> bool: 

1064 """Try AUTOBYHAND 

1065 

1066 Try to handle byhand packages automatically. 

1067 """ 

1068 assert len(self.reject_reasons) == 0 

1069 assert self.changes.valid_signature 

1070 assert self.final_suites is not None 

1071 assert self._checked 

1072 

1073 byhand = self.changes.byhand_files 

1074 if len(byhand) == 0: 1074 ↛ 1077line 1074 didn't jump to line 1077, because the condition on line 1074 was never false

1075 return True 

1076 

1077 suites = list(self.final_suites) 

1078 assert len(suites) == 1, "BYHAND uploads must be to a single suite" 

1079 suite = suites[0] 

1080 

1081 cnf = Config() 

1082 control = self.changes.changes 

1083 automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages") 

1084 

1085 remaining = [] 

1086 for f in byhand: 

1087 if '_' in f.filename: 

1088 parts = f.filename.split('_', 2) 

1089 if len(parts) != 3: 

1090 print("W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)) 

1091 remaining.append(f) 

1092 continue 

1093 

1094 package, version, archext = parts 

1095 arch, ext = archext.split('.', 1) 

1096 else: 

1097 parts = f.filename.split('.') 

1098 if len(parts) < 2: 

1099 print("W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)) 

1100 remaining.append(f) 

1101 continue 

1102 

1103 package = parts[0] 

1104 version = '0' 

1105 arch = 'all' 

1106 ext = parts[-1] 

1107 

1108 try: 

1109 rule = automatic_byhand_packages.subtree(package) 

1110 except KeyError: 

1111 remaining.append(f) 

1112 continue 

1113 

1114 if rule['Source'] != self.changes.source_name \ 

1115 or rule['Section'] != f.section \ 

1116 or ('Extension' in rule and rule['Extension'] != ext): 

1117 remaining.append(f) 

1118 continue 

1119 

1120 script = rule['Script'] 

1121 retcode = subprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename), suite.suite_name], shell=False) 

1122 if retcode != 0: 

1123 print("W: error processing {0}.".format(f.filename)) 

1124 remaining.append(f) 

1125 

1126 return len(remaining) == 0 

1127 

1128 def _install_byhand(self, policy_queue_upload: PolicyQueueUpload, hashed_file: daklib.upload.HashedFile) -> PolicyQueueByhandFile: 

1129 """install byhand file""" 

1130 fs = self.transaction.fs 

1131 session = self.transaction.session 

1132 policy_queue = policy_queue_upload.policy_queue 

1133 

1134 byhand_file = PolicyQueueByhandFile() 

1135 byhand_file.upload = policy_queue_upload 

1136 byhand_file.filename = hashed_file.filename 

1137 session.add(byhand_file) 

1138 session.flush() 

1139 

1140 src = os.path.join(self.directory, hashed_file.filename) 

1141 dst = os.path.join(policy_queue.path, hashed_file.filename) 

1142 fs.copy(src, dst, mode=policy_queue.change_perms) 

1143 

1144 return byhand_file 

1145 

1146 def _do_bts_versiontracking(self) -> None: 

1147 cnf = Config() 

1148 fs = self.transaction.fs 

1149 

1150 btsdir = cnf.get('Dir::BTSVersionTrack') 

1151 if btsdir is None or btsdir == '': 1151 ↛ 1154line 1151 didn't jump to line 1154, because the condition on line 1151 was never false

1152 return 

1153 

1154 base = os.path.join(btsdir, self.changes.filename[:-8]) 

1155 

1156 # version history 

1157 sourcedir = self.unpacked_source() 

1158 if sourcedir is not None: 

1159 dch_path = os.path.join(sourcedir, 'debian', 'changelog') 

1160 with open(dch_path, 'r') as fh: 

1161 versions = fs.create("{0}.versions".format(base), mode=0o644) 

1162 for line in fh.readlines(): 

1163 if re_changelog_versions.match(line): 

1164 versions.write(line) 

1165 versions.close() 

1166 

1167 # binary -> source mapping 

1168 if self.changes.binaries: 

1169 debinfo = fs.create("{0}.debinfo".format(base), mode=0o644) 

1170 for binary in self.changes.binaries: 

1171 control = binary.control 

1172 source_package, source_version = binary.source 

1173 line = " ".join([control['Package'], control['Version'], control['Architecture'], source_package, source_version]) 

1174 print(line, file=debinfo) 

1175 debinfo.close() 

1176 

1177 def _policy_queue(self, suite) -> Optional[PolicyQueue]: 

1178 if suite.policy_queue is not None: 

1179 return suite.policy_queue 

1180 return None 

1181 

1182 def install(self) -> None: 

1183 """install upload 

1184 

1185 Install upload to a suite or policy queue. This method does **not** 

1186 handle uploads to NEW. 

1187 

1188 You need to have called the :meth:`check` method before calling this method. 

1189 """ 

1190 assert len(self.reject_reasons) == 0 

1191 assert self.changes.valid_signature 

1192 assert self.final_suites is not None 

1193 assert self._checked 

1194 assert not self.new 

1195 

1196 db_changes = self._install_changes() 

1197 

1198 for suite in self.final_suites: 

1199 overridesuite = suite 

1200 if suite.overridesuite is not None: 

1201 overridesuite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one() 

1202 

1203 policy_queue = self._policy_queue(suite) 

1204 policy_upload = False 

1205 

1206 redirected_suite = suite 

1207 if policy_queue is not None: 

1208 redirected_suite = policy_queue.suite 

1209 policy_upload = True 

1210 

1211 # source can be in the suite we install to or any suite we enhance 

1212 source_suite_ids = set([suite.suite_id, redirected_suite.suite_id]) 

1213 for enhanced_suite_id, in self.session.query(VersionCheck.reference_id) \ 

1214 .filter(VersionCheck.suite_id.in_(source_suite_ids)) \ 

1215 .filter(VersionCheck.check == 'Enhances'): 

1216 source_suite_ids.add(enhanced_suite_id) 

1217 

1218 source_suites = self.session.query(Suite).filter(Suite.suite_id.in_(source_suite_ids)).subquery() 

1219 

1220 def source_component_func(source): 

1221 return self._source_component(overridesuite, source, only_overrides=False) 

1222 

1223 def binary_component_func(binary): 

1224 return self._binary_component(overridesuite, binary, only_overrides=False) 

1225 

1226 (db_source, db_binaries) = self._install_to_suite(suite, redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive], policy_upload=policy_upload) 

1227 

1228 if policy_queue is not None: 

1229 self._install_policy(policy_queue, suite, db_changes, db_source, db_binaries) 

1230 

1231 # copy to build queues 

1232 if policy_queue is None or policy_queue.send_to_build_queues: 1232 ↛ 1198line 1232 didn't jump to line 1198, because the condition on line 1232 was never false

1233 for build_queue in suite.copy_queues: 

1234 self._install_to_suite(suite, build_queue.suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive]) 

1235 

1236 self._do_bts_versiontracking() 

1237 

1238 def install_to_new(self) -> None: 

1239 """install upload to NEW 

1240 

1241 Install upload to NEW. This method does **not** handle regular uploads 

1242 to suites or policy queues. 

1243 

1244 You need to have called the :meth:`check` method before calling this method. 

1245 """ 

1246 # Uploads to NEW are special as we don't have overrides. 

1247 assert len(self.reject_reasons) == 0 

1248 assert self.changes.valid_signature 

1249 assert self.final_suites is not None 

1250 

1251 source = self.changes.source 

1252 binaries = self.changes.binaries 

1253 byhand = self.changes.byhand_files 

1254 

1255 # we need a suite to guess components 

1256 suites = list(self.final_suites) 

1257 assert len(suites) == 1, "NEW uploads must be to a single suite" 

1258 suite = suites[0] 

1259 

1260 # decide which NEW queue to use 

1261 if suite.new_queue is None: 1261 ↛ 1264line 1261 didn't jump to line 1264, because the condition on line 1261 was never false

1262 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one() 

1263 else: 

1264 new_queue = suite.new_queue 

1265 if len(byhand) > 0: 1265 ↛ 1267line 1265 didn't jump to line 1267, because the condition on line 1265 was never true

1266 # There is only one global BYHAND queue 

1267 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one() 

1268 new_suite = new_queue.suite 

1269 

1270 def binary_component_func(binary): 

1271 return self._binary_component(suite, binary, only_overrides=False) 

1272 

1273 # guess source component 

1274 # XXX: should be moved into an extra method 

1275 binary_component_names = set() 

1276 for binary in binaries: 

1277 component = binary_component_func(binary) 

1278 binary_component_names.add(component.component_name) 

1279 source_component_name = None 

1280 for c in self.session.query(Component).order_by(Component.component_id): 

1281 guess = c.component_name 

1282 if guess in binary_component_names: 

1283 source_component_name = guess 

1284 break 

1285 if source_component_name is None: 

1286 source_component = self.session.query(Component).order_by(Component.component_id).first() 

1287 else: 

1288 source_component = self.session.query(Component).filter_by(component_name=source_component_name).one() 

1289 

1290 def source_component_func(source): 

1291 return source_component 

1292 

1293 db_changes = self._install_changes() 

1294 (db_source, db_binaries) = self._install_to_suite(suite, new_suite, source_component_func, binary_component_func, source_suites=True, extra_source_archives=[suite.archive], policy_upload=True) 

1295 policy_upload = self._install_policy(new_queue, suite, db_changes, db_source, db_binaries) 

1296 

1297 for f in byhand: 1297 ↛ 1298line 1297 didn't jump to line 1298, because the loop on line 1297 never started

1298 self._install_byhand(policy_upload, f) 

1299 

1300 self._do_bts_versiontracking() 

1301 

1302 def commit(self) -> None: 

1303 """commit changes""" 

1304 self.transaction.commit() 

1305 

1306 def rollback(self) -> None: 

1307 """rollback changes""" 

1308 self.transaction.rollback() 

1309 

1310 def __enter__(self): 

1311 self.prepare() 

1312 return self 

1313 

1314 def __exit__(self, type, value, traceback): 

1315 if self.directory is not None: 1315 ↛ 1318line 1315 didn't jump to line 1318, because the condition on line 1315 was never false

1316 shutil.rmtree(self.directory) 

1317 self.directory = None 

1318 self.changes = None 

1319 self.transaction.rollback() 

1320 return None