1# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
2#
3# This program is free software; you can redistribute it and/or modify
4# it under the terms of the GNU General Public License as published by
5# the Free Software Foundation; either version 2 of the License, or
6# (at your option) any later version.
7#
8# This program is distributed in the hope that it will be useful,
9# but WITHOUT ANY WARRANTY; without even the implied warranty of
10# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11# GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program; if not, write to the Free Software Foundation, Inc.,
15# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17"""module to manipulate the archive
19This module provides classes to manipulate the archive.
20"""
22import os
23import shutil
24import subprocess
25import traceback
26from collections.abc import Callable, Iterable
27from typing import TYPE_CHECKING, Optional, Union
29import sqlalchemy.exc
30from sqlalchemy.orm import object_session
31from sqlalchemy.orm.exc import NoResultFound
33import daklib.checks as checks
34import daklib.upload
35import daklib.utils
36from daklib.config import Config
37from daklib.dbconn import (
38 Archive,
39 ArchiveFile,
40 Component,
41 DBBinary,
42 DBChange,
43 DBConn,
44 DBSource,
45 DSCFile,
46 Fingerprint,
47 Maintainer,
48 Override,
49 OverrideType,
50 PolicyQueue,
51 PolicyQueueByhandFile,
52 PolicyQueueUpload,
53 PoolFile,
54 Suite,
55 VersionCheck,
56 get_architecture,
57 get_mapped_component,
58 get_or_set_maintainer,
59 import_metadata_into_db,
60)
61from daklib.externalsignature import check_upload_for_external_signature_request
62from daklib.fstransactions import FilesystemTransaction
63from daklib.regexes import re_bin_only_nmu, re_changelog_versions
65if TYPE_CHECKING: 65 ↛ 66line 65 didn't jump to line 66, because the condition on line 65 was never true
66 import daklib.packagelist
69class ArchiveException(Exception):
70 pass
73class HashMismatchException(ArchiveException):
74 pass
77class ArchiveTransaction:
78 """manipulate the archive in a transaction"""
80 def __init__(self):
81 self.fs = FilesystemTransaction()
82 self.session = DBConn().session()
84 def get_file(
85 self,
86 hashed_file: daklib.upload.HashedFile,
87 source_name: str,
88 check_hashes: bool = True,
89 ) -> PoolFile:
90 """Look for file `hashed_file` in database
92 :param hashed_file: file to look for in the database
93 :param source_name: source package name
94 :param check_hashes: check size and hashes match
95 :return: database entry for the file
96 :raises KeyError: file was not found in the database
97 :raises HashMismatchException: hash mismatch
98 """
99 poolname = os.path.join(daklib.utils.poolify(source_name), hashed_file.filename)
100 try:
101 poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
102 if check_hashes and ( 102 ↛ 108line 102 didn't jump to line 108, because the condition on line 102 was never true
103 poolfile.filesize != hashed_file.size
104 or poolfile.md5sum != hashed_file.md5sum
105 or poolfile.sha1sum != hashed_file.sha1sum
106 or poolfile.sha256sum != hashed_file.sha256sum
107 ):
108 raise HashMismatchException(
109 "{0}: Does not match file already existing in the pool.".format(
110 hashed_file.filename
111 )
112 )
113 return poolfile
114 except NoResultFound:
115 raise KeyError("{0} not found in database.".format(poolname))
117 def _install_file(
118 self, directory, hashed_file, archive, component, source_name
119 ) -> PoolFile:
120 """Install a file
122 Will not give an error when the file is already present.
124 :return: database object for the new file
125 """
126 session = self.session
128 poolname = os.path.join(daklib.utils.poolify(source_name), hashed_file.filename)
129 try:
130 poolfile = self.get_file(hashed_file, source_name)
131 except KeyError:
132 poolfile = PoolFile(filename=poolname, filesize=hashed_file.size)
133 poolfile.md5sum = hashed_file.md5sum
134 poolfile.sha1sum = hashed_file.sha1sum
135 poolfile.sha256sum = hashed_file.sha256sum
136 session.add(poolfile)
137 session.flush()
139 try:
140 session.query(ArchiveFile).filter_by(
141 archive=archive, component=component, file=poolfile
142 ).one()
143 except NoResultFound:
144 archive_file = ArchiveFile(archive, component, poolfile)
145 session.add(archive_file)
146 session.flush()
148 path = os.path.join(
149 archive.path, "pool", component.component_name, poolname
150 )
151 hashed_file_path = os.path.join(directory, hashed_file.input_filename)
152 self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
154 return poolfile
156 def install_binary(
157 self,
158 directory: str,
159 binary: daklib.upload.Binary,
160 suite: Suite,
161 component: Component,
162 allow_tainted: bool = False,
163 fingerprint: Optional[Fingerprint] = None,
164 source_suites=None,
165 extra_source_archives: Optional[Iterable[Archive]] = None,
166 ) -> DBBinary:
167 """Install a binary package
169 :param directory: directory the binary package is located in
170 :param binary: binary package to install
171 :param suite: target suite
172 :param component: target component
173 :param allow_tainted: allow to copy additional files from tainted archives
174 :param fingerprint: optional fingerprint
175 :param source_suites: suites to copy the source from if they are not
176 in `suite` or :const:`True` to allow copying from any
177 suite.
178 Can be a SQLAlchemy subquery for :class:`Suite` or :const:`True`.
179 :param extra_source_archives: extra archives to copy Built-Using sources from
180 :return: database object for the new package
181 """
182 session = self.session
183 control = binary.control
184 maintainer = get_or_set_maintainer(control["Maintainer"], session)
185 architecture = get_architecture(control["Architecture"], session)
187 (source_name, source_version) = binary.source
188 source_query = session.query(DBSource).filter_by(
189 source=source_name, version=source_version
190 )
191 source = source_query.filter(DBSource.suites.contains(suite)).first()
192 if source is None:
193 if source_suites is not True:
194 source_query = source_query.join(DBSource.suites).filter(
195 Suite.suite_id == source_suites.c.id
196 )
197 source = source_query.first()
198 if source is None: 198 ↛ 199line 198 didn't jump to line 199, because the condition on line 198 was never true
199 raise ArchiveException(
200 "{0}: trying to install to {1}, but could not find source ({2} {3})".format(
201 binary.hashed_file.filename,
202 suite.suite_name,
203 source_name,
204 source_version,
205 )
206 )
207 self.copy_source(source, suite, source.poolfile.component)
209 db_file = self._install_file(
210 directory, binary.hashed_file, suite.archive, component, source_name
211 )
213 unique = dict(
214 package=control["Package"],
215 version=control["Version"],
216 architecture=architecture,
217 )
218 rest = dict(
219 source=source,
220 maintainer=maintainer,
221 poolfile=db_file,
222 binarytype=binary.type,
223 )
224 # Other attributes that are ignored for purposes of equality with
225 # an existing source
226 rest2 = dict(
227 fingerprint=fingerprint,
228 )
230 try:
231 db_binary = session.query(DBBinary).filter_by(**unique).one()
232 for key, value in rest.items():
233 if getattr(db_binary, key) != value: 233 ↛ 234line 233 didn't jump to line 234, because the condition on line 233 was never true
234 raise ArchiveException(
235 "{0}: Does not match binary in database.".format(
236 binary.hashed_file.filename
237 )
238 )
239 except NoResultFound:
240 db_binary = DBBinary(**unique)
241 for key, value in rest.items():
242 setattr(db_binary, key, value)
243 for key, value in rest2.items():
244 setattr(db_binary, key, value)
245 session.add(db_binary)
246 session.flush()
247 import_metadata_into_db(db_binary, session)
249 self._add_built_using(
250 db_binary,
251 binary.hashed_file.filename,
252 control,
253 suite,
254 extra_archives=extra_source_archives,
255 )
257 if suite not in db_binary.suites:
258 db_binary.suites.append(suite)
260 session.flush()
262 return db_binary
264 def _ensure_extra_source_exists(
265 self,
266 filename: str,
267 source: DBSource,
268 archive: Archive,
269 extra_archives: Optional[Iterable[Archive]] = None,
270 ):
271 """ensure source exists in the given archive
273 This is intended to be used to check that Built-Using sources exist.
275 :param filename: filename to use in error messages
276 :param source: source to look for
277 :param archive: archive to look in
278 :param extra_archives: list of archives to copy the source package from
279 if it is not yet present in `archive`
280 """
281 session = self.session
282 db_file = (
283 session.query(ArchiveFile)
284 .filter_by(file=source.poolfile, archive=archive)
285 .first()
286 )
287 if db_file is not None: 287 ↛ 291line 287 didn't jump to line 291, because the condition on line 287 was never false
288 return True
290 # Try to copy file from one extra archive
291 if extra_archives is None:
292 extra_archives = []
293 db_file = (
294 session.query(ArchiveFile)
295 .filter_by(file=source.poolfile)
296 .filter(ArchiveFile.archive_id.in_([a.archive_id for a in extra_archives]))
297 .first()
298 )
299 if db_file is None:
300 raise ArchiveException(
301 "{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.".format(
302 filename, source.source, source.version, archive.archive_name
303 )
304 )
306 source_archive = db_file.archive
307 for dsc_file in source.srcfiles:
308 af = (
309 session.query(ArchiveFile)
310 .filter_by(
311 file=dsc_file.poolfile,
312 archive=source_archive,
313 component=db_file.component,
314 )
315 .one()
316 )
317 # We were given an explicit list of archives so it is okay to copy from tainted archives.
318 self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
320 def _add_built_using(
321 self, db_binary, filename, control, suite, extra_archives=None
322 ) -> None:
323 """Add Built-Using sources to ``db_binary.extra_sources``"""
324 session = self.session
326 for bu_source_name, bu_source_version in daklib.utils.parse_built_using(
327 control
328 ):
329 bu_source = (
330 session.query(DBSource)
331 .filter_by(source=bu_source_name, version=bu_source_version)
332 .first()
333 )
334 if bu_source is None: 334 ↛ 335line 334 didn't jump to line 335, because the condition on line 334 was never true
335 raise ArchiveException(
336 "{0}: Built-Using refers to non-existing source package {1} (= {2})".format(
337 filename, bu_source_name, bu_source_version
338 )
339 )
341 self._ensure_extra_source_exists(
342 filename, bu_source, suite.archive, extra_archives=extra_archives
343 )
345 db_binary.extra_sources.append(bu_source)
347 def install_source_to_archive(
348 self,
349 directory,
350 source,
351 archive,
352 component,
353 changed_by,
354 allow_tainted=False,
355 fingerprint=None,
356 ) -> DBSource:
357 """Install source package to archive"""
358 session = self.session
359 control = source.dsc
360 maintainer = get_or_set_maintainer(control["Maintainer"], session)
361 source_name = control["Source"]
363 ### Add source package to database
365 # We need to install the .dsc first as the DBSource object refers to it.
366 db_file_dsc = self._install_file(
367 directory, source._dsc_file, archive, component, source_name
368 )
370 unique = dict(
371 source=source_name,
372 version=control["Version"],
373 )
374 rest = dict(
375 maintainer=maintainer,
376 poolfile=db_file_dsc,
377 dm_upload_allowed=(control.get("DM-Upload-Allowed", "no") == "yes"),
378 )
379 # Other attributes that are ignored for purposes of equality with
380 # an existing source
381 rest2 = dict(
382 changedby=changed_by,
383 fingerprint=fingerprint,
384 )
386 created = False
387 try:
388 db_source = session.query(DBSource).filter_by(**unique).one()
389 for key, value in rest.items():
390 if getattr(db_source, key) != value: 390 ↛ 391line 390 didn't jump to line 391, because the condition on line 390 was never true
391 raise ArchiveException(
392 "{0}: Does not match source in database.".format(
393 source._dsc_file.filename
394 )
395 )
396 except NoResultFound:
397 created = True
398 db_source = DBSource(**unique)
399 for key, value in rest.items():
400 setattr(db_source, key, value)
401 for key, value in rest2.items():
402 setattr(db_source, key, value)
403 session.add(db_source)
404 session.flush()
406 # Add .dsc file. Other files will be added later.
407 db_dsc_file = DSCFile()
408 db_dsc_file.source = db_source
409 db_dsc_file.poolfile = db_file_dsc
410 session.add(db_dsc_file)
411 session.flush()
413 if not created:
414 for f in db_source.srcfiles:
415 self._copy_file(
416 f.poolfile, archive, component, allow_tainted=allow_tainted
417 )
418 return db_source
420 ### Now add remaining files and copy them to the archive.
422 for hashed_file in source.files.values():
423 hashed_file_path = os.path.join(directory, hashed_file.input_filename)
424 if os.path.exists(hashed_file_path): 424 ↛ 430line 424 didn't jump to line 430, because the condition on line 424 was never false
425 db_file = self._install_file(
426 directory, hashed_file, archive, component, source_name
427 )
428 session.add(db_file)
429 else:
430 db_file = self.get_file(hashed_file, source_name)
431 self._copy_file(
432 db_file, archive, component, allow_tainted=allow_tainted
433 )
435 db_dsc_file = DSCFile()
436 db_dsc_file.source = db_source
437 db_dsc_file.poolfile = db_file
438 session.add(db_dsc_file)
440 session.flush()
442 # Importing is safe as we only arrive here when we did not find the source already installed earlier.
443 import_metadata_into_db(db_source, session)
445 # Uploaders are the maintainer and co-maintainers from the Uploaders field
446 db_source.uploaders.append(maintainer)
447 if "Uploaders" in control:
448 from daklib.textutils import split_uploaders
450 for u in split_uploaders(control["Uploaders"]):
451 db_source.uploaders.append(get_or_set_maintainer(u, session))
452 session.flush()
454 return db_source
456 def install_source(
457 self,
458 directory: str,
459 source: daklib.upload.Source,
460 suite: Suite,
461 component: Component,
462 changed_by: Maintainer,
463 allow_tainted: bool = False,
464 fingerprint: Optional[Fingerprint] = None,
465 ) -> DBSource:
466 """Install a source package
468 :param directory: directory the source package is located in
469 :param source: source package to install
470 :param suite: target suite
471 :param component: target component
472 :param changed_by: person who prepared this version of the package
473 :param allow_tainted: allow to copy additional files from tainted archives
474 :param fingerprint: optional fingerprint
475 :return: database object for the new source
476 """
477 db_source = self.install_source_to_archive(
478 directory,
479 source,
480 suite.archive,
481 component,
482 changed_by,
483 allow_tainted,
484 fingerprint,
485 )
487 if suite in db_source.suites:
488 return db_source
489 db_source.suites.append(suite)
490 self.session.flush()
492 return db_source
494 def _copy_file(
495 self,
496 db_file: PoolFile,
497 archive: Archive,
498 component: Component,
499 allow_tainted: bool = False,
500 ) -> None:
501 """Copy a file to the given archive and component
503 :param db_file: file to copy
504 :param archive: target archive
505 :param component: target component
506 :param allow_tainted: allow to copy from tainted archives (such as NEW)
507 """
508 session = self.session
510 if (
511 session.query(ArchiveFile)
512 .filter_by(archive=archive, component=component, file=db_file)
513 .first()
514 is None
515 ):
516 query = session.query(ArchiveFile).filter_by(file=db_file)
517 if not allow_tainted:
518 query = query.join(Archive).filter(
519 Archive.tainted == False # noqa:E712
520 )
522 source_af = query.first()
523 if source_af is None: 523 ↛ 524line 523 didn't jump to line 524, because the condition on line 523 was never true
524 raise ArchiveException(
525 "cp: Could not find {0} in any archive.".format(db_file.filename)
526 )
527 target_af = ArchiveFile(archive, component, db_file)
528 session.add(target_af)
529 session.flush()
530 self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode)
532 def copy_binary(
533 self,
534 db_binary: DBBinary,
535 suite: Suite,
536 component: Component,
537 allow_tainted: bool = False,
538 extra_archives: Optional[Iterable[Archive]] = None,
539 ) -> None:
540 """Copy a binary package to the given suite and component
542 :param db_binary: binary to copy
543 :param suite: target suite
544 :param component: target component
545 :param allow_tainted: allow to copy from tainted archives (such as NEW)
546 :param extra_archives: extra archives to copy Built-Using sources from
547 """
548 session = self.session
549 archive = suite.archive
550 if archive.tainted:
551 allow_tainted = True
553 filename = db_binary.poolfile.filename
555 # make sure source is present in target archive
556 db_source = db_binary.source
557 if ( 557 ↛ 563line 557 didn't jump to line 563
558 session.query(ArchiveFile)
559 .filter_by(archive=archive, file=db_source.poolfile)
560 .first()
561 is None
562 ):
563 raise ArchiveException(
564 "{0}: cannot copy to {1}: source is not present in target archive".format(
565 filename, suite.suite_name
566 )
567 )
569 # make sure built-using packages are present in target archive
570 for db_source in db_binary.extra_sources:
571 self._ensure_extra_source_exists(
572 filename, db_source, archive, extra_archives=extra_archives
573 )
575 # copy binary
576 db_file = db_binary.poolfile
577 self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
578 if suite not in db_binary.suites:
579 db_binary.suites.append(suite)
580 self.session.flush()
582 def copy_source(
583 self,
584 db_source: DBSource,
585 suite: Suite,
586 component: Component,
587 allow_tainted: bool = False,
588 ) -> None:
589 """Copy a source package to the given suite and component
591 :param db_source: source to copy
592 :param suite: target suite
593 :param component: target component
594 :param allow_tainted: allow to copy from tainted archives (such as NEW)
595 """
596 archive = suite.archive
597 if archive.tainted:
598 allow_tainted = True
599 for db_dsc_file in db_source.srcfiles:
600 self._copy_file(
601 db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted
602 )
603 if suite not in db_source.suites:
604 db_source.suites.append(suite)
605 self.session.flush()
607 def remove_file(
608 self, db_file: PoolFile, archive: Archive, component: Component
609 ) -> None:
610 """Remove a file from a given archive and component
612 :param db_file: file to remove
613 :param archive: archive to remove the file from
614 :param component: component to remove the file from
615 """
616 af = self.session.query(ArchiveFile).filter_by(
617 file=db_file, archive=archive, component=component
618 )
619 self.fs.unlink(af.path)
620 self.session.delete(af)
622 def remove_binary(self, binary: DBBinary, suite: Suite) -> None:
623 """Remove a binary from a given suite and component
625 :param binary: binary to remove
626 :param suite: suite to remove the package from
627 """
628 binary.suites.remove(suite)
629 self.session.flush()
631 def remove_source(self, source: DBSource, suite: Suite) -> None:
632 """Remove a source from a given suite and component
634 :param source: source to remove
635 :param suite: suite to remove the package from
637 :raises ArchiveException: source package is still referenced by other
638 binaries in the suite
639 """
640 session = self.session
642 query = (
643 session.query(DBBinary)
644 .filter_by(source=source)
645 .filter(DBBinary.suites.contains(suite))
646 )
647 if query.first() is not None: 647 ↛ 648line 647 didn't jump to line 648, because the condition on line 647 was never true
648 raise ArchiveException(
649 "src:{0} is still used by binaries in suite {1}".format(
650 source.source, suite.suite_name
651 )
652 )
654 source.suites.remove(suite)
655 session.flush()
657 def commit(self) -> None:
658 """commit changes"""
659 try:
660 self.session.commit()
661 self.fs.commit()
662 finally:
663 self.session.rollback()
664 self.fs.rollback()
666 def rollback(self) -> None:
667 """rollback changes"""
668 self.session.rollback()
669 self.fs.rollback()
671 def flush(self) -> None:
672 """flush underlying database session"""
673 self.session.flush()
675 def __enter__(self):
676 return self
678 def __exit__(self, type, value, traceback):
679 if type is None:
680 self.commit()
681 else:
682 self.rollback()
683 return None
686def source_component_from_package_list(
687 package_list: "daklib.packagelist.PackageList", suite: Suite
688) -> Optional[Component]:
689 """Get component for a source package
691 This function will look at the Package-List field to determine the
692 component the source package belongs to. This is the first component
693 the source package provides binaries for (first with respect to the
694 ordering of components).
696 It the source package has no Package-List field, None is returned.
698 :param package_list: package list of the source to get the override for
699 :param suite: suite to consider for binaries produced
700 :return: component for the given source or :const:`None`
701 """
702 if package_list.fallback: 702 ↛ 703line 702 didn't jump to line 703, because the condition on line 702 was never true
703 return None
704 session = object_session(suite)
705 packages = package_list.packages_for_suite(suite)
706 components = set(p.component for p in packages)
707 query = (
708 session.query(Component)
709 .order_by(Component.ordering)
710 .filter(Component.component_name.in_(components))
711 )
712 return query.first()
715class ArchiveUpload:
716 """handle an upload
718 This class can be used in a with-statement::
720 with ArchiveUpload(...) as upload:
721 ...
723 Doing so will automatically run any required cleanup and also rollback the
724 transaction if it was not committed.
725 """
727 def __init__(self, directory: str, changes, keyrings):
728 self.transaction: ArchiveTransaction = ArchiveTransaction()
729 """transaction used to handle the upload"""
731 self.session = self.transaction.session
732 """database session"""
734 self.original_directory: str = directory
735 self.original_changes = changes
737 self.changes: Optional[daklib.upload.Changes] = None
738 """upload to process"""
740 self.directory: str = None
741 """directory with temporary copy of files. set by :meth:`prepare`"""
743 self.keyrings = keyrings
745 self.fingerprint: Fingerprint = (
746 self.session.query(Fingerprint)
747 .filter_by(fingerprint=changes.primary_fingerprint)
748 .one()
749 )
750 """fingerprint of the key used to sign the upload"""
752 self.reject_reasons: list[str] = []
753 """reasons why the upload cannot by accepted"""
755 self.warnings: list[str] = []
756 """warnings
758 .. note::
760 Not used yet.
761 """
763 self.final_suites = None
765 self.new: bool = False
766 """upload is NEW. set by :meth:`check`"""
768 self._checked: bool = False
769 """checks passes. set by :meth:`check`"""
771 self._new_queue = (
772 self.session.query(PolicyQueue).filter_by(queue_name="new").one()
773 )
774 self._new = self._new_queue.suite
776 def warn(self, message: str) -> None:
777 """add a warning message
779 Adds a warning message that can later be seen in :attr:`warnings`
781 :param message: warning message
782 """
783 self.warnings.append(message)
785 def prepare(self):
786 """prepare upload for further processing
788 This copies the files involved to a temporary directory. If you use
789 this method directly, you have to remove the directory given by the
790 :attr:`directory` attribute later on your own.
792 Instead of using the method directly, you can also use a with-statement::
794 with ArchiveUpload(...) as upload:
795 ...
797 This will automatically handle any required cleanup.
798 """
799 assert self.directory is None
800 assert self.original_changes.valid_signature
802 cnf = Config()
803 session = self.transaction.session
805 group = cnf.get("Dinstall::UnprivGroup") or None
806 self.directory = daklib.utils.temp_dirname(
807 parent=cnf.get("Dir::TempPath"), mode=0o2750, group=group
808 )
809 with FilesystemTransaction() as fs:
810 src = os.path.join(self.original_directory, self.original_changes.filename)
811 dst = os.path.join(self.directory, self.original_changes.filename)
812 fs.copy(src, dst, mode=0o640)
814 self.changes = daklib.upload.Changes(
815 self.directory, self.original_changes.filename, self.keyrings
816 )
818 files = {}
819 try:
820 files = self.changes.files
821 except daklib.upload.InvalidChangesException:
822 # Do not raise an exception; upload will be rejected later
823 # due to the missing files
824 pass
826 for f in files.values():
827 src = os.path.join(self.original_directory, f.filename)
828 dst = os.path.join(self.directory, f.filename)
829 if not os.path.exists(src):
830 continue
831 fs.copy(src, dst, mode=0o640)
833 source = None
834 try:
835 source = self.changes.source
836 except Exception:
837 # Do not raise an exception here if the .dsc is invalid.
838 pass
840 if source is not None:
841 for f in source.files.values():
842 src = os.path.join(self.original_directory, f.filename)
843 dst = os.path.join(self.directory, f.filename)
844 if not os.path.exists(dst):
845 try:
846 db_file = self.transaction.get_file(
847 f, source.dsc["Source"], check_hashes=False
848 )
849 db_archive_file = (
850 session.query(ArchiveFile)
851 .filter_by(file=db_file)
852 .first()
853 )
854 fs.copy(db_archive_file.path, dst, mode=0o640)
855 except KeyError:
856 # Ignore if get_file could not find it. Upload will
857 # probably be rejected later.
858 pass
860 def unpacked_source(self) -> Optional[str]:
861 """Path to unpacked source
863 Get path to the unpacked source. This method does unpack the source
864 into a temporary directory under :attr:`directory` if it has not
865 been done so already.
867 :return: string giving the path to the unpacked source directory
868 or :const:`None` if no source was included in the upload.
869 """
870 assert self.directory is not None
872 source = self.changes.source
873 if source is None:
874 return None
875 dsc_path = os.path.join(self.directory, source._dsc_file.filename)
877 sourcedir = os.path.join(self.directory, "source")
878 if not os.path.exists(sourcedir):
879 subprocess.check_call(
880 ["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir],
881 shell=False,
882 stdout=subprocess.DEVNULL,
883 )
884 if not os.path.isdir(sourcedir):
885 raise Exception(
886 "{0} is not a directory after extracting source package".format(
887 sourcedir
888 )
889 )
890 return sourcedir
892 def _map_suite(self, suite_name):
893 suite_names = set((suite_name,))
894 for rule in Config().value_list("SuiteMappings"):
895 fields = rule.split()
896 rtype = fields[0]
897 if rtype == "map" or rtype == "silent-map": 897 ↛ 898line 897 didn't jump to line 898, because the condition on line 897 was never true
898 (src, dst) = fields[1:3]
899 if src in suite_names:
900 suite_names.remove(src)
901 suite_names.add(dst)
902 if rtype != "silent-map":
903 self.warnings.append("Mapping {0} to {1}.".format(src, dst))
904 elif rtype == "copy" or rtype == "silent-copy": 904 ↛ 905line 904 didn't jump to line 905, because the condition on line 904 was never true
905 (src, dst) = fields[1:3]
906 if src in suite_names:
907 suite_names.add(dst)
908 if rtype != "silent-copy":
909 self.warnings.append("Copy {0} to {1}.".format(src, dst))
910 elif rtype == "ignore": 910 ↛ 911line 910 didn't jump to line 911, because the condition on line 910 was never true
911 ignored = fields[1]
912 if ignored in suite_names:
913 suite_names.remove(ignored)
914 self.warnings.append("Ignoring target suite {0}.".format(ignored))
915 elif rtype == "reject": 915 ↛ 916line 915 didn't jump to line 916, because the condition on line 915 was never true
916 rejected = fields[1]
917 if rejected in suite_names:
918 raise checks.Reject(
919 "Uploads to {0} are not accepted.".format(rejected)
920 )
921 ## XXX: propup-version and map-unreleased not yet implemented
922 return suite_names
924 def _mapped_suites(self) -> list[Suite]:
925 """Get target suites after mappings
927 :return: list giving the mapped target suites of this upload
928 """
929 session = self.session
931 suite_names = set()
932 for dist in self.changes.distributions:
933 suite_names.update(self._map_suite(dist))
935 suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
936 return suites.all()
938 def _check_new_binary_overrides(self, suite, overridesuite):
939 new = False
940 source = self.changes.source
942 # Check binaries listed in the source package's Package-List field:
943 if source is not None and not source.package_list.fallback:
944 packages = source.package_list.packages_for_suite(suite)
945 binaries = [entry for entry in packages]
946 for b in binaries:
947 override = self._binary_override(overridesuite, b)
948 if override is None:
949 self.warnings.append("binary:{0} is NEW.".format(b.name))
950 new = True
952 # Check all uploaded packages.
953 # This is necessary to account for packages without a Package-List
954 # field, really late binary-only uploads (where an unused override
955 # was already removed), and for debug packages uploaded to a suite
956 # without a debug suite (which are then considered as NEW).
957 binaries = self.changes.binaries
958 for b in binaries:
959 if (
960 daklib.utils.is_in_debug_section(b.control)
961 and suite.debug_suite is not None
962 ):
963 continue
964 override = self._binary_override(overridesuite, b)
965 if override is None:
966 self.warnings.append("binary:{0} is NEW.".format(b.name))
967 new = True
969 return new
971 def _check_new(self, suite, overridesuite) -> bool:
972 """Check if upload is NEW
974 An upload is NEW if it has binary or source packages that do not have
975 an override in `overridesuite` OR if it references files ONLY in a
976 tainted archive (eg. when it references files in NEW).
978 Debug packages (*-dbgsym in Section: debug) are not considered as NEW
979 if `suite` has a separate debug suite.
981 :return: :const:`True` if the upload is NEW, :const:`False` otherwise
982 """
983 session = self.session
984 new = False
986 # Check for missing overrides
987 if self._check_new_binary_overrides(suite, overridesuite):
988 new = True
989 if self.changes.source is not None:
990 override = self._source_override(overridesuite, self.changes.source)
991 if override is None:
992 self.warnings.append(
993 "source:{0} is NEW.".format(self.changes.source.dsc["Source"])
994 )
995 new = True
997 # Check if we reference a file only in a tainted archive
998 files = list(self.changes.files.values())
999 if self.changes.source is not None:
1000 files.extend(self.changes.source.files.values())
1001 for f in files:
1002 query = (
1003 session.query(ArchiveFile)
1004 .join(PoolFile)
1005 .filter(PoolFile.sha1sum == f.sha1sum)
1006 )
1007 query_untainted = query.join(Archive).filter(
1008 Archive.tainted == False # noqa:E712
1009 )
1011 in_archive = query.first() is not None
1012 in_untainted_archive = query_untainted.first() is not None
1014 if in_archive and not in_untainted_archive:
1015 self.warnings.append("{0} is only available in NEW.".format(f.filename))
1016 new = True
1018 return new
1020 def _final_suites(self):
1021 session = self.session
1023 mapped_suites = self._mapped_suites()
1024 final_suites = list()
1026 for suite in mapped_suites:
1027 overridesuite = suite
1028 if suite.overridesuite is not None:
1029 overridesuite = (
1030 session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
1031 )
1032 if self._check_new(suite, overridesuite):
1033 self.new = True
1034 if suite not in final_suites: 1034 ↛ 1026line 1034 didn't jump to line 1026, because the condition on line 1034 was never false
1035 final_suites.append(suite)
1037 return final_suites
1039 def _binary_override(
1040 self,
1041 suite: Suite,
1042 binary: "Union[daklib.upload.Binary, daklib.packagelist.PackageListEntry]",
1043 ) -> Optional[Override]:
1044 """Get override entry for a binary
1046 :param suite: suite to get override for
1047 :param binary: binary to get override for
1048 :return: override for the given binary or :const:`None`
1049 """
1050 if suite.overridesuite is not None:
1051 suite = (
1052 self.session.query(Suite)
1053 .filter_by(suite_name=suite.overridesuite)
1054 .one()
1055 )
1057 mapped_component = get_mapped_component(binary.component)
1058 if mapped_component is None: 1058 ↛ 1059line 1058 didn't jump to line 1059, because the condition on line 1058 was never true
1059 return None
1061 query = (
1062 self.session.query(Override)
1063 .filter_by(suite=suite, package=binary.name)
1064 .join(Component)
1065 .filter(Component.component_name == mapped_component.component_name)
1066 .join(OverrideType)
1067 .filter(OverrideType.overridetype == binary.type)
1068 )
1070 return query.one_or_none()
1072 def _source_override(
1073 self, suite: Suite, source: daklib.upload.Source
1074 ) -> Optional[Override]:
1075 """Get override entry for a source
1077 :param suite: suite to get override for
1078 :param source: source to get override for
1079 :return: override for the given source or :const:`None`
1080 """
1081 if suite.overridesuite is not None: 1081 ↛ 1082line 1081 didn't jump to line 1082
1082 suite = (
1083 self.session.query(Suite)
1084 .filter_by(suite_name=suite.overridesuite)
1085 .one()
1086 )
1088 query = (
1089 self.session.query(Override)
1090 .filter_by(suite=suite, package=source.dsc["Source"])
1091 .join(OverrideType)
1092 .filter(OverrideType.overridetype == "dsc")
1093 )
1095 component = source_component_from_package_list(source.package_list, suite)
1096 if component is not None:
1097 query = query.filter(Override.component == component)
1099 return query.one_or_none()
1101 def _binary_component(
1102 self, suite: Suite, binary: daklib.upload.Binary, only_overrides: bool = True
1103 ) -> Optional[Component]:
1104 """get component for a binary
1106 By default this will only look at overrides to get the right component;
1107 if `only_overrides` is :const:`False` this method will also look at the
1108 Section field.
1110 :param only_overrides: only use overrides to get the right component
1111 """
1112 override = self._binary_override(suite, binary)
1113 if override is not None:
1114 return override.component
1115 if only_overrides: 1115 ↛ 1116line 1115 didn't jump to line 1116, because the condition on line 1115 was never true
1116 return None
1117 return get_mapped_component(binary.component, self.session)
1119 def _source_component(
1120 self, suite: Suite, source: daklib.upload.Binary, only_overrides: bool = True
1121 ) -> Optional[Component]:
1122 """get component for a source
1124 By default this will only look at overrides to get the right component;
1125 if `only_overrides` is :const:`False` this method will also look at the
1126 Section field.
1128 :param only_overrides: only use overrides to get the right component
1129 """
1130 override = self._source_override(suite, source)
1131 if override is not None: 1131 ↛ 1133line 1131 didn't jump to line 1133, because the condition on line 1131 was never false
1132 return override.component
1133 if only_overrides:
1134 return None
1135 return get_mapped_component(source.component, self.session)
1137 def check(self, force: bool = False) -> bool:
1138 """run checks against the upload
1140 :param force: ignore failing forcable checks
1141 :return: :const:`True` if all checks passed, :const:`False` otherwise
1142 """
1143 # XXX: needs to be better structured.
1144 assert self.changes.valid_signature
1146 try:
1147 # Validate signatures and hashes before we do any real work:
1148 for chk in (
1149 checks.SignatureAndHashesCheck,
1150 checks.WeakSignatureCheck,
1151 checks.SignatureTimestampCheck,
1152 checks.ChangesCheck,
1153 checks.ExternalHashesCheck,
1154 checks.SourceCheck,
1155 checks.BinaryCheck,
1156 checks.BinaryMembersCheck,
1157 checks.BinaryTimestampCheck,
1158 checks.SingleDistributionCheck,
1159 checks.ArchAllBinNMUCheck,
1160 ):
1161 chk().check(self)
1163 final_suites = self._final_suites()
1164 if len(final_suites) == 0:
1165 self.reject_reasons.append(
1166 "No target suite found. Please check your target distribution and that you uploaded to the right archive."
1167 )
1168 return False
1170 self.final_suites = final_suites
1172 for chk in (
1173 checks.TransitionCheck,
1174 checks.ACLCheck,
1175 checks.NewOverrideCheck,
1176 checks.NoSourceOnlyCheck,
1177 checks.LintianCheck,
1178 ):
1179 chk().check(self)
1181 for chk in (
1182 checks.SuiteCheck,
1183 checks.ACLCheck,
1184 checks.SourceFormatCheck,
1185 checks.SuiteArchitectureCheck,
1186 checks.VersionCheck,
1187 ):
1188 for suite in final_suites:
1189 chk().per_suite_check(self, suite)
1191 if len(self.reject_reasons) != 0: 1191 ↛ 1192line 1191 didn't jump to line 1192, because the condition on line 1191 was never true
1192 return False
1194 self._checked = True
1195 return True
1196 except checks.Reject as e: 1196 ↛ 1198line 1196 didn't jump to line 1198
1197 self.reject_reasons.append(str(e))
1198 except Exception as e:
1199 self.reject_reasons.append(
1200 "Processing raised an exception: {0}.\n{1}".format(
1201 e, traceback.format_exc()
1202 )
1203 )
1204 return False
1206 def _install_to_suite(
1207 self,
1208 target_suite: Suite,
1209 suite: Suite,
1210 source_component_func: Callable[[daklib.upload.Source], Component],
1211 binary_component_func: Callable[[daklib.upload.Binary], Component],
1212 source_suites=None,
1213 extra_source_archives: Optional[Iterable[Archive]] = None,
1214 policy_upload: bool = False,
1215 ) -> tuple[Optional[DBSource], list[DBBinary]]:
1216 """Install upload to the given suite
1218 :param target_suite: target suite (before redirection to policy queue or NEW)
1219 :param suite: suite to install the package into. This is the real suite,
1220 ie. after any redirection to NEW or a policy queue
1221 :param source_component_func: function to get the :class:`daklib.dbconn.Component`
1222 for a :class:`daklib.upload.Source` object
1223 :param binary_component_func: function to get the :class:`daklib.dbconn.Component`
1224 for a :class:`daklib.upload.Binary` object
1225 :param source_suites: see :meth:`daklib.archive.ArchiveTransaction.install_binary`
1226 :param extra_source_archives: see :meth:`daklib.archive.ArchiveTransaction.install_binary`
1227 :param policy_upload: Boolean indicating upload to policy queue (including NEW)
1228 :return: tuple with two elements. The first is a :class:`daklib.dbconn.DBSource`
1229 object for the install source or :const:`None` if no source was
1230 included. The second is a list of :class:`daklib.dbconn.DBBinary`
1231 objects for the installed binary packages.
1232 """
1233 # XXX: move this function to ArchiveTransaction?
1235 control = self.changes.changes
1236 changed_by = get_or_set_maintainer(
1237 control.get("Changed-By", control["Maintainer"]), self.session
1238 )
1240 if source_suites is None: 1240 ↛ 1241line 1240 didn't jump to line 1241
1241 source_suites = (
1242 self.session.query(Suite)
1243 .join((VersionCheck, VersionCheck.reference_id == Suite.suite_id))
1244 .filter(VersionCheck.check == "Enhances")
1245 .filter(VersionCheck.suite == suite)
1246 .subquery()
1247 )
1249 source = self.changes.source
1250 if source is not None:
1251 component = source_component_func(source)
1252 db_source = self.transaction.install_source(
1253 self.directory,
1254 source,
1255 suite,
1256 component,
1257 changed_by,
1258 fingerprint=self.fingerprint,
1259 )
1260 else:
1261 db_source = None
1263 db_binaries = []
1264 for binary in sorted(self.changes.binaries, key=lambda x: x.name):
1265 copy_to_suite = suite
1266 if (
1267 daklib.utils.is_in_debug_section(binary.control)
1268 and suite.debug_suite is not None
1269 ):
1270 copy_to_suite = suite.debug_suite
1272 component = binary_component_func(binary)
1273 db_binary = self.transaction.install_binary(
1274 self.directory,
1275 binary,
1276 copy_to_suite,
1277 component,
1278 fingerprint=self.fingerprint,
1279 source_suites=source_suites,
1280 extra_source_archives=extra_source_archives,
1281 )
1282 db_binaries.append(db_binary)
1284 if not policy_upload:
1285 check_upload_for_external_signature_request(
1286 self.session, target_suite, copy_to_suite, db_binary
1287 )
1289 if suite.copychanges: 1289 ↛ 1290line 1289 didn't jump to line 1290, because the condition on line 1289 was never true
1290 src = os.path.join(self.directory, self.changes.filename)
1291 dst = os.path.join(
1292 suite.archive.path, "dists", suite.suite_name, self.changes.filename
1293 )
1294 self.transaction.fs.copy(src, dst, mode=suite.archive.mode)
1296 suite.update_last_changed()
1298 return (db_source, db_binaries)
1300 def _install_changes(self) -> DBChange:
1301 assert self.changes.valid_signature
1302 control = self.changes.changes
1303 session = self.transaction.session
1305 changelog_id = None
1306 # Only add changelog for sourceful uploads and binNMUs
1307 if self.changes.sourceful or re_bin_only_nmu.search(control["Version"]):
1308 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
1309 changelog_id = session.execute(
1310 query, {"changelog": control["Changes"]}
1311 ).scalar()
1312 assert changelog_id is not None
1314 db_changes = DBChange()
1315 db_changes.changesname = self.changes.filename
1316 db_changes.source = control["Source"]
1317 db_changes.binaries = control.get("Binary", None)
1318 db_changes.architecture = control["Architecture"]
1319 db_changes.version = control["Version"]
1320 db_changes.distribution = control["Distribution"]
1321 db_changes.urgency = control["Urgency"]
1322 db_changes.maintainer = control["Maintainer"]
1323 db_changes.changedby = control.get("Changed-By", control["Maintainer"])
1324 db_changes.date = control["Date"]
1325 db_changes.fingerprint = self.fingerprint.fingerprint
1326 db_changes.changelog_id = changelog_id
1327 db_changes.closes = self.changes.closed_bugs
1329 try:
1330 self.transaction.session.add(db_changes)
1331 self.transaction.session.flush()
1332 except sqlalchemy.exc.IntegrityError:
1333 raise ArchiveException(
1334 "{0} is already known.".format(self.changes.filename)
1335 )
1337 return db_changes
1339 def _install_policy(
1340 self, policy_queue, target_suite, db_changes, db_source, db_binaries
1341 ) -> PolicyQueueUpload:
1342 """install upload to policy queue"""
1343 u = PolicyQueueUpload()
1344 u.policy_queue = policy_queue
1345 u.target_suite = target_suite
1346 u.changes = db_changes
1347 u.source = db_source
1348 u.binaries = db_binaries
1349 self.transaction.session.add(u)
1350 self.transaction.session.flush()
1352 queue_files = [self.changes.filename]
1353 queue_files.extend(f.filename for f in self.changes.buildinfo_files)
1354 for fn in queue_files:
1355 src = os.path.join(self.changes.directory, fn)
1356 dst = os.path.join(policy_queue.path, fn)
1357 self.transaction.fs.copy(src, dst, mode=policy_queue.change_perms)
1359 return u
1361 def try_autobyhand(self) -> bool:
1362 """Try AUTOBYHAND
1364 Try to handle byhand packages automatically.
1365 """
1366 assert len(self.reject_reasons) == 0
1367 assert self.changes.valid_signature
1368 assert self.final_suites is not None
1369 assert self._checked
1371 byhand = self.changes.byhand_files
1372 if len(byhand) == 0: 1372 ↛ 1375line 1372 didn't jump to line 1375, because the condition on line 1372 was never false
1373 return True
1375 suites = list(self.final_suites)
1376 assert len(suites) == 1, "BYHAND uploads must be to a single suite"
1377 suite = suites[0]
1379 cnf = Config()
1380 control = self.changes.changes
1381 automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages")
1383 remaining = []
1384 for f in byhand:
1385 if "_" in f.filename:
1386 parts = f.filename.split("_", 2)
1387 if len(parts) != 3:
1388 print(
1389 "W: unexpected byhand filename {0}. No automatic processing.".format(
1390 f.filename
1391 )
1392 )
1393 remaining.append(f)
1394 continue
1396 package, _, archext = parts
1397 arch, ext = archext.split(".", 1)
1398 else:
1399 parts = f.filename.split(".")
1400 if len(parts) < 2:
1401 print(
1402 "W: unexpected byhand filename {0}. No automatic processing.".format(
1403 f.filename
1404 )
1405 )
1406 remaining.append(f)
1407 continue
1409 package = parts[0]
1410 arch = "all"
1411 ext = parts[-1]
1413 try:
1414 rule = automatic_byhand_packages.subtree(package)
1415 except KeyError:
1416 remaining.append(f)
1417 continue
1419 if (
1420 rule["Source"] != self.changes.source_name
1421 or rule["Section"] != f.section
1422 or ("Extension" in rule and rule["Extension"] != ext)
1423 ):
1424 remaining.append(f)
1425 continue
1427 script = rule["Script"]
1428 retcode = subprocess.call(
1429 [
1430 script,
1431 os.path.join(self.directory, f.filename),
1432 control["Version"],
1433 arch,
1434 os.path.join(self.directory, self.changes.filename),
1435 suite.suite_name,
1436 ],
1437 shell=False,
1438 )
1439 if retcode != 0:
1440 print("W: error processing {0}.".format(f.filename))
1441 remaining.append(f)
1443 return len(remaining) == 0
1445 def _install_byhand(
1446 self,
1447 policy_queue_upload: PolicyQueueUpload,
1448 hashed_file: daklib.upload.HashedFile,
1449 ) -> PolicyQueueByhandFile:
1450 """install byhand file"""
1451 fs = self.transaction.fs
1452 session = self.transaction.session
1453 policy_queue = policy_queue_upload.policy_queue
1455 byhand_file = PolicyQueueByhandFile()
1456 byhand_file.upload = policy_queue_upload
1457 byhand_file.filename = hashed_file.filename
1458 session.add(byhand_file)
1459 session.flush()
1461 src = os.path.join(self.directory, hashed_file.filename)
1462 dst = os.path.join(policy_queue.path, hashed_file.filename)
1463 fs.copy(src, dst, mode=policy_queue.change_perms)
1465 return byhand_file
1467 def _do_bts_versiontracking(self) -> None:
1468 cnf = Config()
1469 fs = self.transaction.fs
1471 btsdir = cnf.get("Dir::BTSVersionTrack")
1472 if btsdir is None or btsdir == "": 1472 ↛ 1475line 1472 didn't jump to line 1475, because the condition on line 1472 was never false
1473 return
1475 base = os.path.join(btsdir, self.changes.filename[:-8])
1477 # version history
1478 sourcedir = self.unpacked_source()
1479 if sourcedir is not None:
1480 dch_path = os.path.join(sourcedir, "debian", "changelog")
1481 with open(dch_path, "r") as fh:
1482 versions = fs.create("{0}.versions".format(base), mode=0o644)
1483 for line in fh.readlines():
1484 if re_changelog_versions.match(line):
1485 versions.write(line)
1486 versions.close()
1488 # binary -> source mapping
1489 if self.changes.binaries:
1490 debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
1491 for binary in self.changes.binaries:
1492 control = binary.control
1493 source_package, source_version = binary.source
1494 line = " ".join(
1495 [
1496 control["Package"],
1497 control["Version"],
1498 control["Architecture"],
1499 source_package,
1500 source_version,
1501 ]
1502 )
1503 print(line, file=debinfo)
1504 debinfo.close()
1506 def _policy_queue(self, suite) -> Optional[PolicyQueue]:
1507 if suite.policy_queue is not None:
1508 return suite.policy_queue
1509 return None
1511 def install(self) -> None:
1512 """install upload
1514 Install upload to a suite or policy queue. This method does **not**
1515 handle uploads to NEW.
1517 You need to have called the :meth:`check` method before calling this method.
1518 """
1519 assert len(self.reject_reasons) == 0
1520 assert self.changes.valid_signature
1521 assert self.final_suites is not None
1522 assert self._checked
1523 assert not self.new
1525 db_changes = self._install_changes()
1527 for suite in self.final_suites:
1528 overridesuite = suite
1529 if suite.overridesuite is not None:
1530 overridesuite = (
1531 self.session.query(Suite)
1532 .filter_by(suite_name=suite.overridesuite)
1533 .one()
1534 )
1536 policy_queue = self._policy_queue(suite)
1537 policy_upload = False
1539 redirected_suite = suite
1540 if policy_queue is not None:
1541 redirected_suite = policy_queue.suite
1542 policy_upload = True
1544 # source can be in the suite we install to or any suite we enhance
1545 source_suite_ids = set([suite.suite_id, redirected_suite.suite_id])
1546 for (enhanced_suite_id,) in (
1547 self.session.query(VersionCheck.reference_id)
1548 .filter(VersionCheck.suite_id.in_(source_suite_ids))
1549 .filter(VersionCheck.check == "Enhances")
1550 ):
1551 source_suite_ids.add(enhanced_suite_id)
1553 source_suites = (
1554 self.session.query(Suite)
1555 .filter(Suite.suite_id.in_(source_suite_ids))
1556 .subquery()
1557 )
1559 def source_component_func(source):
1560 return self._source_component(
1561 overridesuite, source, only_overrides=False
1562 )
1564 def binary_component_func(binary):
1565 return self._binary_component(
1566 overridesuite, binary, only_overrides=False
1567 )
1569 (db_source, db_binaries) = self._install_to_suite(
1570 suite,
1571 redirected_suite,
1572 source_component_func,
1573 binary_component_func,
1574 source_suites=source_suites,
1575 extra_source_archives=[suite.archive],
1576 policy_upload=policy_upload,
1577 )
1579 if policy_queue is not None:
1580 self._install_policy(
1581 policy_queue, suite, db_changes, db_source, db_binaries
1582 )
1584 # copy to build queues
1585 if policy_queue is None or policy_queue.send_to_build_queues: 1585 ↛ 1527line 1585 didn't jump to line 1527, because the condition on line 1585 was never false
1586 for build_queue in suite.copy_queues:
1587 self._install_to_suite(
1588 suite,
1589 build_queue.suite,
1590 source_component_func,
1591 binary_component_func,
1592 source_suites=source_suites,
1593 extra_source_archives=[suite.archive],
1594 )
1596 self._do_bts_versiontracking()
1598 def install_to_new(self) -> None:
1599 """install upload to NEW
1601 Install upload to NEW. This method does **not** handle regular uploads
1602 to suites or policy queues.
1604 You need to have called the :meth:`check` method before calling this method.
1605 """
1606 # Uploads to NEW are special as we don't have overrides.
1607 assert len(self.reject_reasons) == 0
1608 assert self.changes.valid_signature
1609 assert self.final_suites is not None
1611 binaries = self.changes.binaries
1612 byhand = self.changes.byhand_files
1614 # we need a suite to guess components
1615 suites = list(self.final_suites)
1616 assert len(suites) == 1, "NEW uploads must be to a single suite"
1617 suite = suites[0]
1619 # decide which NEW queue to use
1620 if suite.new_queue is None: 1620 ↛ 1627line 1620 didn't jump to line 1627, because the condition on line 1620 was never false
1621 new_queue = (
1622 self.transaction.session.query(PolicyQueue)
1623 .filter_by(queue_name="new")
1624 .one()
1625 )
1626 else:
1627 new_queue = suite.new_queue
1628 if len(byhand) > 0: 1628 ↛ 1630line 1628 didn't jump to line 1630
1629 # There is only one global BYHAND queue
1630 new_queue = (
1631 self.transaction.session.query(PolicyQueue)
1632 .filter_by(queue_name="byhand")
1633 .one()
1634 )
1635 new_suite = new_queue.suite
1637 def binary_component_func(binary):
1638 return self._binary_component(suite, binary, only_overrides=False)
1640 # guess source component
1641 # XXX: should be moved into an extra method
1642 binary_component_names = set()
1643 for binary in binaries:
1644 component = binary_component_func(binary)
1645 binary_component_names.add(component.component_name)
1646 source_component_name = None
1647 for c in self.session.query(Component).order_by(Component.component_id):
1648 guess = c.component_name
1649 if guess in binary_component_names:
1650 source_component_name = guess
1651 break
1652 if source_component_name is None:
1653 source_component = (
1654 self.session.query(Component).order_by(Component.component_id).first()
1655 )
1656 else:
1657 source_component = (
1658 self.session.query(Component)
1659 .filter_by(component_name=source_component_name)
1660 .one()
1661 )
1663 def source_component_func(source):
1664 return source_component
1666 db_changes = self._install_changes()
1667 (db_source, db_binaries) = self._install_to_suite(
1668 suite,
1669 new_suite,
1670 source_component_func,
1671 binary_component_func,
1672 source_suites=True,
1673 extra_source_archives=[suite.archive],
1674 policy_upload=True,
1675 )
1676 policy_upload = self._install_policy(
1677 new_queue, suite, db_changes, db_source, db_binaries
1678 )
1680 for f in byhand: 1680 ↛ 1681line 1680 didn't jump to line 1681, because the loop on line 1680 never started
1681 self._install_byhand(policy_upload, f)
1683 self._do_bts_versiontracking()
1685 def commit(self) -> None:
1686 """commit changes"""
1687 self.transaction.commit()
1689 def rollback(self) -> None:
1690 """rollback changes"""
1691 self.transaction.rollback()
1693 def __enter__(self):
1694 self.prepare()
1695 return self
1697 def __exit__(self, type, value, traceback):
1698 if self.directory is not None: 1698 ↛ 1701line 1698 didn't jump to line 1701, because the condition on line 1698 was never false
1699 shutil.rmtree(self.directory)
1700 self.directory = None
1701 self.changes = None
1702 self.transaction.rollback()
1703 return None