# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""module to manipulate the archive
This module provides classes to manipulate the archive.
"""
import os
import shutil
import subprocess
import traceback
from collections.abc import Callable, Iterable, Sequence
from typing import TYPE_CHECKING, Optional, Union
import sqlalchemy.exc
from sqlalchemy.orm import object_session
from sqlalchemy.orm.exc import NoResultFound
import daklib.checks as checks
import daklib.upload
import daklib.utils
from daklib.config import Config
from daklib.dbconn import (
    Archive,
    ArchiveFile,
    Component,
    DBBinary,
    DBChange,
    DBConn,
    DBSource,
    DSCFile,
    Fingerprint,
    Maintainer,
    Override,
    OverrideType,
    PolicyQueue,
    PolicyQueueByhandFile,
    PolicyQueueUpload,
    PoolFile,
    Suite,
    VersionCheck,
    get_architecture,
    get_mapped_component,
    get_or_set_maintainer,
    import_metadata_into_db,
)
from daklib.externalsignature import check_upload_for_external_signature_request
from daklib.fstransactions import FilesystemTransaction
from daklib.regexes import re_bin_only_nmu, re_changelog_versions
from daklib.tag2upload import get_tag2upload_info_for_upload, parse_git_tag_info
if TYPE_CHECKING:
    import daklib.packagelist
[docs]class ArchiveException(Exception):
    pass 
[docs]class HashMismatchException(ArchiveException):
    pass 
[docs]class ArchiveTransaction:
    """manipulate the archive in a transaction"""
    def __init__(self):
        self.fs = FilesystemTransaction()
        self.session = DBConn().session()
[docs]    def get_file(
        self,
        hashed_file: daklib.upload.HashedFile,
        source_name: str,
        check_hashes: bool = True,
    ) -> PoolFile:
        """Look for file `hashed_file` in database
        :param hashed_file: file to look for in the database
        :param source_name: source package name
        :param check_hashes: check size and hashes match
        :return: database entry for the file
        :raises KeyError: file was not found in the database
        :raises HashMismatchException: hash mismatch
        """
        poolname = os.path.join(daklib.utils.poolify(source_name), hashed_file.filename)
        try:
            poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
            if check_hashes and (
                poolfile.filesize != hashed_file.size
                or poolfile.md5sum != hashed_file.md5sum
                or poolfile.sha1sum != hashed_file.sha1sum
                or poolfile.sha256sum != hashed_file.sha256sum
            ):
                raise HashMismatchException(
                    "{0}: Does not match file already existing in the pool.".format(
                        hashed_file.filename
                    )
                )
            return poolfile
        except NoResultFound:
            raise KeyError("{0} not found in database.".format(poolname)) 
[docs]    def _install_file(
        self, directory, hashed_file, archive, component, source_name
    ) -> PoolFile:
        """Install a file
        Will not give an error when the file is already present.
        :return: database object for the new file
        """
        session = self.session
        poolname = os.path.join(daklib.utils.poolify(source_name), hashed_file.filename)
        try:
            poolfile = self.get_file(hashed_file, source_name)
        except KeyError:
            poolfile = PoolFile(filename=poolname, filesize=hashed_file.size)
            poolfile.md5sum = hashed_file.md5sum
            poolfile.sha1sum = hashed_file.sha1sum
            poolfile.sha256sum = hashed_file.sha256sum
            session.add(poolfile)
            session.flush()
        try:
            session.query(ArchiveFile).filter_by(
                archive=archive, component=component, file=poolfile
            ).one()
        except NoResultFound:
            archive_file = ArchiveFile(archive, component, poolfile)
            session.add(archive_file)
            session.flush()
            path = os.path.join(
                archive.path, "pool", component.component_name, poolname
            )
            hashed_file_path = os.path.join(directory, hashed_file.input_filename)
            self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
        return poolfile 
[docs]    def install_binary(
        self,
        directory: str,
        binary: daklib.upload.Binary,
        suite: Suite,
        component: Component,
        *,
        allow_tainted: bool = False,
        fingerprint: Optional[Fingerprint] = None,
        authorized_by_fingerprint: Optional[Fingerprint] = None,
        source_suites=None,
        extra_source_archives: Optional[Iterable[Archive]] = None,
    ) -> DBBinary:
        """Install a binary package
        :param directory: directory the binary package is located in
        :param binary: binary package to install
        :param suite: target suite
        :param component: target component
        :param allow_tainted: allow to copy additional files from tainted archives
        :param fingerprint: optional fingerprint
        :param source_suites: suites to copy the source from if they are not
                              in `suite` or :const:`True` to allow copying from any
                              suite.
                              Can be a SQLAlchemy subquery for :class:`Suite` or :const:`True`.
        :param extra_source_archives: extra archives to copy Built-Using sources from
        :return: database object for the new package
        """
        session = self.session
        control = binary.control
        maintainer = get_or_set_maintainer(control["Maintainer"], session)
        architecture = get_architecture(control["Architecture"], session)
        (source_name, source_version) = binary.source
        source_query = session.query(DBSource).filter_by(
            source=source_name, version=source_version
        )
        source = source_query.filter(DBSource.suites.contains(suite)).first()
        if source is None:
            if source_suites is not True:
                source_query = source_query.join(DBSource.suites).filter(
                    Suite.suite_id == source_suites.c.id
                )
            source = source_query.first()
            if source is None:
                raise ArchiveException(
                    "{0}: trying to install to {1}, but could not find source ({2} {3})".format(
                        binary.hashed_file.filename,
                        suite.suite_name,
                        source_name,
                        source_version,
                    )
                )
            self.copy_source(source, suite, source.poolfile.component)
        db_file = self._install_file(
            directory, binary.hashed_file, suite.archive, component, source_name
        )
        unique = dict(
            package=control["Package"],
            version=control["Version"],
            architecture=architecture,
        )
        rest = dict(
            source=source,
            maintainer=maintainer,
            poolfile=db_file,
            binarytype=binary.type,
        )
        # Other attributes that are ignored for purposes of equality with
        # an existing source
        rest2 = dict(
            fingerprint=fingerprint,
            authorized_by_fingerprint=authorized_by_fingerprint,
        )
        try:
            db_binary = session.query(DBBinary).filter_by(**unique).one()
            for key, value in rest.items():
                if getattr(db_binary, key) != value:
                    raise ArchiveException(
                        "{0}: Does not match binary in database.".format(
                            binary.hashed_file.filename
                        )
                    )
        except NoResultFound:
            db_binary = DBBinary(**unique)
            for key, value in rest.items():
                setattr(db_binary, key, value)
            for key, value in rest2.items():
                setattr(db_binary, key, value)
            session.add(db_binary)
            session.flush()
            import_metadata_into_db(db_binary, session)
            self._add_built_using(
                db_binary,
                binary.hashed_file.filename,
                control,
                suite,
                extra_archives=extra_source_archives,
            )
        if suite not in db_binary.suites:
            db_binary.suites.append(suite)
        session.flush()
        return db_binary 
[docs]    def _add_built_using(
        self, db_binary, filename, control, suite, extra_archives=None
    ) -> None:
        """Add Built-Using sources to ``db_binary.extra_sources``"""
        session = self.session
        for bu_source_name, bu_source_version in daklib.utils.parse_built_using(
            control
        ):
            bu_source = (
                session.query(DBSource)
                .filter_by(source=bu_source_name, version=bu_source_version)
                .first()
            )
            if bu_source is None:
                raise ArchiveException(
                    "{0}: Built-Using refers to non-existing source package {1} (= {2})".format(
                        filename, bu_source_name, bu_source_version
                    )
                )
            self._ensure_extra_source_exists(
                filename, bu_source, suite.archive, extra_archives=extra_archives
            )
            db_binary.extra_sources.append(bu_source) 
[docs]    def _add_dsc_files(
        self,
        directory: str,
        archive: Archive,
        component: Component,
        source: DBSource,
        files: Iterable[daklib.upload.HashedFile],
        *,
        allow_tainted: bool,
        extra_file: bool = False,
    ) -> None:
        for hashed_file in files:
            hashed_file_path = os.path.join(directory, hashed_file.input_filename)
            if os.path.exists(hashed_file_path):
                db_file = self._install_file(
                    directory, hashed_file, archive, component, source.source
                )
                self.session.add(db_file)
            else:
                db_file = self.get_file(hashed_file, source.source)
                self._copy_file(
                    db_file, archive, component, allow_tainted=allow_tainted
                )
            db_dsc_file = DSCFile()
            db_dsc_file.source = source
            db_dsc_file.poolfile = db_file
            db_dsc_file.extra_file = extra_file
            self.session.add(db_dsc_file) 
[docs]    def install_source_to_archive(
        self,
        directory: str,
        source: daklib.upload.Source,
        archive: Archive,
        component: Component,
        changed_by: Maintainer,
        *,
        allow_tainted=False,
        fingerprint: Optional[Fingerprint] = None,
        authorized_by_fingerprint: Optional[Fingerprint] = None,
        extra_source_files: Iterable[daklib.upload.HashedFile] = [],
    ) -> DBSource:
        """Install source package to archive"""
        session = self.session
        control = source.dsc
        maintainer = get_or_set_maintainer(control["Maintainer"], session)
        source_name = control["Source"]
        ### Add source package to database
        # We need to install the .dsc first as the DBSource object refers to it.
        db_file_dsc = self._install_file(
            directory, source._dsc_file, archive, component, source_name
        )
        unique = dict(
            source=source_name,
            version=control["Version"],
        )
        rest = dict(
            maintainer=maintainer,
            poolfile=db_file_dsc,
            dm_upload_allowed=(control.get("DM-Upload-Allowed", "no") == "yes"),
        )
        # Other attributes that are ignored for purposes of equality with
        # an existing source
        rest2 = dict(
            changedby=changed_by,
            fingerprint=fingerprint,
            authorized_by_fingerprint=authorized_by_fingerprint,
        )
        created = False
        try:
            db_source = session.query(DBSource).filter_by(**unique).one()
            for key, value in rest.items():
                if getattr(db_source, key) != value:
                    raise ArchiveException(
                        "{0}: Does not match source in database.".format(
                            source._dsc_file.filename
                        )
                    )
        except NoResultFound:
            created = True
            db_source = DBSource(**unique)
            for key, value in rest.items():
                setattr(db_source, key, value)
            for key, value in rest2.items():
                setattr(db_source, key, value)
            session.add(db_source)
            session.flush()
            # Add .dsc file. Other files will be added later.
            db_dsc_file = DSCFile()
            db_dsc_file.source = db_source
            db_dsc_file.poolfile = db_file_dsc
            session.add(db_dsc_file)
            session.flush()
        if not created:
            for f in db_source.srcfiles:
                self._copy_file(
                    f.poolfile, archive, component, allow_tainted=allow_tainted
                )
            return db_source
        ### Now add remaining files and copy them to the archive.
        self._add_dsc_files(
            directory,
            archive,
            component,
            db_source,
            source.files.values(),
            allow_tainted=allow_tainted,
        )
        self._add_dsc_files(
            directory,
            archive,
            component,
            db_source,
            extra_source_files,
            allow_tainted=allow_tainted,
            extra_file=True,
        )
        session.flush()
        # Importing is safe as we only arrive here when we did not find the source already installed earlier.
        import_metadata_into_db(db_source, session)
        # Uploaders are the maintainer and co-maintainers from the Uploaders field
        db_source.uploaders.append(maintainer)
        if "Uploaders" in control:
            from daklib.textutils import split_uploaders
            for u in split_uploaders(control["Uploaders"]):
                db_source.uploaders.append(get_or_set_maintainer(u, session))
        session.flush()
        return db_source 
[docs]    def install_source(
        self,
        directory: str,
        source: daklib.upload.Source,
        suite: Suite,
        component: Component,
        changed_by: Maintainer,
        *,
        allow_tainted: bool = False,
        fingerprint: Optional[Fingerprint] = None,
        authorized_by_fingerprint: Optional[Fingerprint] = None,
        extra_source_files: Iterable[daklib.upload.HashedFile] = [],
    ) -> DBSource:
        """Install a source package
        :param directory: directory the source package is located in
        :param source: source package to install
        :param suite: target suite
        :param component: target component
        :param changed_by: person who prepared this version of the package
        :param allow_tainted: allow to copy additional files from tainted archives
        :param fingerprint: optional fingerprint
        :return: database object for the new source
        """
        db_source = self.install_source_to_archive(
            directory,
            source,
            suite.archive,
            component,
            changed_by,
            allow_tainted=allow_tainted,
            fingerprint=fingerprint,
            authorized_by_fingerprint=authorized_by_fingerprint,
            extra_source_files=extra_source_files,
        )
        if suite in db_source.suites:
            return db_source
        db_source.suites.append(suite)
        self.session.flush()
        return db_source 
[docs]    def _copy_file(
        self,
        db_file: PoolFile,
        archive: Archive,
        component: Component,
        allow_tainted: bool = False,
    ) -> None:
        """Copy a file to the given archive and component
        :param db_file: file to copy
        :param archive: target archive
        :param component: target component
        :param allow_tainted: allow to copy from tainted archives (such as NEW)
        """
        session = self.session
        if (
            session.query(ArchiveFile)
            .filter_by(archive=archive, component=component, file=db_file)
            .first()
            is None
        ):
            query = session.query(ArchiveFile).filter_by(file=db_file)
            if not allow_tainted:
                query = query.join(Archive).filter(
                    Archive.tainted == False  # noqa:E712
                )
            source_af = query.first()
            if source_af is None:
                raise ArchiveException(
                    "cp: Could not find {0} in any archive.".format(db_file.filename)
                )
            target_af = ArchiveFile(archive, component, db_file)
            session.add(target_af)
            session.flush()
            self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode) 
[docs]    def copy_binary(
        self,
        db_binary: DBBinary,
        suite: Suite,
        component: Component,
        allow_tainted: bool = False,
        extra_archives: Optional[Iterable[Archive]] = None,
    ) -> None:
        """Copy a binary package to the given suite and component
        :param db_binary: binary to copy
        :param suite: target suite
        :param component: target component
        :param allow_tainted: allow to copy from tainted archives (such as NEW)
        :param extra_archives: extra archives to copy Built-Using sources from
        """
        session = self.session
        archive = suite.archive
        if archive.tainted:
            allow_tainted = True
        filename = db_binary.poolfile.filename
        # make sure source is present in target archive
        db_source = db_binary.source
        if (
            session.query(ArchiveFile)
            .filter_by(archive=archive, file=db_source.poolfile)
            .first()
            is None
        ):
            raise ArchiveException(
                "{0}: cannot copy to {1}: source is not present in target archive".format(
                    filename, suite.suite_name
                )
            )
        # make sure built-using packages are present in target archive
        for db_source in db_binary.extra_sources:
            self._ensure_extra_source_exists(
                filename, db_source, archive, extra_archives=extra_archives
            )
        # copy binary
        db_file = db_binary.poolfile
        self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
        if suite not in db_binary.suites:
            db_binary.suites.append(suite)
        self.session.flush() 
[docs]    def copy_source(
        self,
        db_source: DBSource,
        suite: Suite,
        component: Component,
        allow_tainted: bool = False,
    ) -> None:
        """Copy a source package to the given suite and component
        :param db_source: source to copy
        :param suite: target suite
        :param component: target component
        :param allow_tainted: allow to copy from tainted archives (such as NEW)
        """
        archive = suite.archive
        if archive.tainted:
            allow_tainted = True
        for db_dsc_file in db_source.srcfiles:
            self._copy_file(
                db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted
            )
        if suite not in db_source.suites:
            db_source.suites.append(suite)
        self.session.flush() 
[docs]    def remove_file(
        self, db_file: PoolFile, archive: Archive, component: Component
    ) -> None:
        """Remove a file from a given archive and component
        :param db_file: file to remove
        :param archive: archive to remove the file from
        :param component: component to remove the file from
        """
        af = self.session.query(ArchiveFile).filter_by(
            file=db_file, archive=archive, component=component
        )
        self.fs.unlink(af.path)
        self.session.delete(af) 
[docs]    def remove_binary(self, binary: DBBinary, suite: Suite) -> None:
        """Remove a binary from a given suite and component
        :param binary: binary to remove
        :param suite: suite to remove the package from
        """
        binary.suites.remove(suite)
        self.session.flush() 
[docs]    def remove_source(self, source: DBSource, suite: Suite) -> None:
        """Remove a source from a given suite and component
        :param source: source to remove
        :param suite: suite to remove the package from
        :raises ArchiveException: source package is still referenced by other
                                 binaries in the suite
        """
        session = self.session
        query = (
            session.query(DBBinary)
            .filter_by(source=source)
            .filter(DBBinary.suites.contains(suite))
        )
        if query.first() is not None:
            raise ArchiveException(
                "src:{0} is still used by binaries in suite {1}".format(
                    source.source, suite.suite_name
                )
            )
        source.suites.remove(suite)
        session.flush() 
[docs]    def commit(self) -> None:
        """commit changes"""
        try:
            self.session.commit()
            self.fs.commit()
        finally:
            self.session.rollback()
            self.fs.rollback() 
[docs]    def rollback(self) -> None:
        """rollback changes"""
        self.session.rollback()
        self.fs.rollback() 
[docs]    def flush(self) -> None:
        """flush underlying database session"""
        self.session.flush() 
    def __enter__(self):
        return self
    def __exit__(self, type, value, traceback):
        if type is None:
            self.commit()
        else:
            self.rollback()
        return None 
[docs]def source_component_from_package_list(
    package_list: "daklib.packagelist.PackageList", suite: Suite
) -> Optional[Component]:
    """Get component for a source package
    This function will look at the Package-List field to determine the
    component the source package belongs to. This is the first component
    the source package provides binaries for (first with respect to the
    ordering of components).
    It the source package has no Package-List field, None is returned.
    :param package_list: package list of the source to get the override for
    :param suite: suite to consider for binaries produced
    :return: component for the given source or :const:`None`
    """
    if package_list.fallback:
        return None
    session = object_session(suite)
    packages = package_list.packages_for_suite(suite)
    components = set(p.component for p in packages)
    query = (
        session.query(Component)
        .order_by(Component.ordering)
        .filter(Component.component_name.in_(components))
    )
    return query.first() 
[docs]class ArchiveUpload:
    """handle an upload
    This class can be used in a with-statement::
       with ArchiveUpload(...) as upload:
          ...
    Doing so will automatically run any required cleanup and also rollback the
    transaction if it was not committed.
    """
    def __init__(self, directory: str, changes, keyrings):
        self.transaction: ArchiveTransaction = ArchiveTransaction()
        """transaction used to handle the upload"""
        self.session = self.transaction.session
        """database session"""
        self.original_directory: str = directory
        self.original_changes = changes
        self.changes: Optional[daklib.upload.Changes] = None
        """upload to process"""
        self._extra_source_files: list[daklib.upload.HashedFile] = []
        """extra source files"""
        self.directory: str = None
        """directory with temporary copy of files. set by :meth:`prepare`"""
        self.keyrings = keyrings
        self.fingerprint: Fingerprint = (
            self.session.query(Fingerprint)
            .filter_by(fingerprint=changes.primary_fingerprint)
            .one()
        )
        """fingerprint of the key used to sign the upload"""
        self._authorized_by_fingerprint: Optional[Fingerprint] = None
        """fingerprint of the key that authorized the upload"""
        self.reject_reasons: list[str] = []
        """reasons why the upload cannot by accepted"""
        self.warnings: list[str] = []
        """warnings
        .. note::
           Not used yet.
        """
        self.final_suites: Optional[list[Suite]] = None
        self.new: bool = False
        """upload is NEW. set by :meth:`check`"""
        self._checked: bool = False
        """checks passes. set by :meth:`check`"""
        self._new_queue = (
            self.session.query(PolicyQueue).filter_by(queue_name="new").one()
        )
        self._new = self._new_queue.suite
    @property
    def authorized_by_fingerprint(self) -> Fingerprint:
        """
        fingerprint of the key that authorized the upload
        """
        return (
            self._authorized_by_fingerprint
            if self._authorized_by_fingerprint is not None
            else self.fingerprint
        )
    @authorized_by_fingerprint.setter
    def authorized_by_fingerprint(self, fingerprint: Fingerprint) -> None:
        self._authorized_by_fingerprint = fingerprint
[docs]    def warn(self, message: str) -> None:
        """add a warning message
        Adds a warning message that can later be seen in :attr:`warnings`
        :param message: warning message
        """
        self.warnings.append(message) 
[docs]    def prepare(self):
        """prepare upload for further processing
        This copies the files involved to a temporary directory.  If you use
        this method directly, you have to remove the directory given by the
        :attr:`directory` attribute later on your own.
        Instead of using the method directly, you can also use a with-statement::
           with ArchiveUpload(...) as upload:
              ...
        This will automatically handle any required cleanup.
        """
        assert self.directory is None
        assert self.original_changes.valid_signature
        cnf = Config()
        session = self.transaction.session
        group = cnf.get("Dinstall::UnprivGroup") or None
        self.directory = daklib.utils.temp_dirname(
            parent=cnf.get("Dir::TempPath"), mode=0o2750, group=group
        )
        with FilesystemTransaction() as fs:
            src = os.path.join(self.original_directory, self.original_changes.filename)
            dst = os.path.join(self.directory, self.original_changes.filename)
            fs.copy(src, dst, mode=0o640)
            self.changes = daklib.upload.Changes(
                self.directory, self.original_changes.filename, self.keyrings
            )
            files = {}
            try:
                files = self.changes.files
            except daklib.upload.InvalidChangesException:
                # Do not raise an exception; upload will be rejected later
                # due to the missing files
                pass
            for f in files.values():
                src = os.path.join(self.original_directory, f.filename)
                dst = os.path.join(self.directory, f.filename)
                if not os.path.exists(src):
                    continue
                fs.copy(src, dst, mode=0o640)
            source = None
            try:
                source = self.changes.source
            except Exception:
                # Do not raise an exception here if the .dsc is invalid.
                pass
            if source is not None:
                for f in source.files.values():
                    src = os.path.join(self.original_directory, f.filename)
                    dst = os.path.join(self.directory, f.filename)
                    if not os.path.exists(dst):
                        try:
                            db_file = self.transaction.get_file(
                                f, source.dsc["Source"], check_hashes=False
                            )
                            db_archive_file = (
                                session.query(ArchiveFile)
                                .filter_by(file=db_file)
                                .first()
                            )
                            fs.copy(db_archive_file.path, dst, mode=0o640)
                        except KeyError:
                            # Ignore if get_file could not find it. Upload will
                            # probably be rejected later.
                            pass 
[docs]    def unpacked_source(self) -> Optional[str]:
        """Path to unpacked source
        Get path to the unpacked source. This method does unpack the source
        into a temporary directory under :attr:`directory` if it has not
        been done so already.
        :return: string giving the path to the unpacked source directory
                 or :const:`None` if no source was included in the upload.
        """
        assert self.directory is not None
        source = self.changes.source
        if source is None:
            return None
        dsc_path = os.path.join(self.directory, source._dsc_file.filename)
        sourcedir = os.path.join(self.directory, "source")
        if not os.path.exists(sourcedir):
            subprocess.check_call(
                ["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir],
                shell=False,
                stdout=subprocess.DEVNULL,
            )
        if not os.path.isdir(sourcedir):
            raise Exception(
                "{0} is not a directory after extracting source package".format(
                    sourcedir
                )
            )
        return sourcedir 
[docs]    def _map_suite(self, suite_name):
        suite_names = set((suite_name,))
        for rule in Config().value_list("SuiteMappings"):
            fields = rule.split()
            rtype = fields[0]
            if rtype == "map" or rtype == "silent-map":
                (src, dst) = fields[1:3]
                if src in suite_names:
                    suite_names.remove(src)
                    suite_names.add(dst)
                    if rtype != "silent-map":
                        self.warnings.append("Mapping {0} to {1}.".format(src, dst))
            elif rtype == "copy" or rtype == "silent-copy":
                (src, dst) = fields[1:3]
                if src in suite_names:
                    suite_names.add(dst)
                    if rtype != "silent-copy":
                        self.warnings.append("Copy {0} to {1}.".format(src, dst))
            elif rtype == "ignore":
                ignored = fields[1]
                if ignored in suite_names:
                    suite_names.remove(ignored)
                    self.warnings.append("Ignoring target suite {0}.".format(ignored))
            elif rtype == "reject":
                rejected = fields[1]
                if rejected in suite_names:
                    raise checks.Reject(
                        "Uploads to {0} are not accepted.".format(rejected)
                    )
            ## XXX: propup-version and map-unreleased not yet implemented
        return suite_names 
[docs]    def _mapped_suites(self) -> list[Suite]:
        """Get target suites after mappings
        :return: list giving the mapped target suites of this upload
        """
        session = self.session
        suite_names = set()
        for dist in self.changes.distributions:
            suite_names.update(self._map_suite(dist))
        suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
        return suites.all() 
[docs]    def _check_new_binary_overrides(self, suite, overridesuite):
        new = False
        source = self.changes.source
        # Check binaries listed in the source package's Package-List field:
        if source is not None and not source.package_list.fallback:
            packages = source.package_list.packages_for_suite(suite)
            binaries = [entry for entry in packages]
            for b in binaries:
                override = self._binary_override(overridesuite, b)
                if override is None:
                    self.warnings.append("binary:{0} is NEW.".format(b.name))
                    new = True
        # Check all uploaded packages.
        # This is necessary to account for packages without a Package-List
        # field, really late binary-only uploads (where an unused override
        # was already removed), and for debug packages uploaded to a suite
        # without a debug suite (which are then considered as NEW).
        binaries = self.changes.binaries
        for b in binaries:
            if (
                daklib.utils.is_in_debug_section(b.control)
                and suite.debug_suite is not None
            ):
                continue
            override = self._binary_override(overridesuite, b)
            if override is None:
                self.warnings.append("binary:{0} is NEW.".format(b.name))
                new = True
        return new 
[docs]    def _check_new(self, suite, overridesuite) -> bool:
        """Check if upload is NEW
        An upload is NEW if it has binary or source packages that do not have
        an override in `overridesuite` OR if it references files ONLY in a
        tainted archive (eg. when it references files in NEW).
        Debug packages (*-dbgsym in Section: debug) are not considered as NEW
        if `suite` has a separate debug suite.
        :return: :const:`True` if the upload is NEW, :const:`False` otherwise
        """
        session = self.session
        new = False
        # Check for missing overrides
        if self._check_new_binary_overrides(suite, overridesuite):
            new = True
        if self.changes.source is not None:
            override = self._source_override(overridesuite, self.changes.source)
            if override is None:
                self.warnings.append(
                    "source:{0} is NEW.".format(self.changes.source.dsc["Source"])
                )
                new = True
        # Check if we reference a file only in a tainted archive
        files = list(self.changes.files.values())
        if self.changes.source is not None:
            files.extend(self.changes.source.files.values())
        for f in files:
            query = (
                session.query(ArchiveFile)
                .join(PoolFile)
                .filter(PoolFile.sha1sum == f.sha1sum)
            )
            query_untainted = query.join(Archive).filter(
                Archive.tainted == False  # noqa:E712
            )
            in_archive = query.first() is not None
            in_untainted_archive = query_untainted.first() is not None
            if in_archive and not in_untainted_archive:
                self.warnings.append("{0} is only available in NEW.".format(f.filename))
                new = True
        return new 
[docs]    def _final_suites(self) -> list[Suite]:
        session = self.session
        mapped_suites = self._mapped_suites()
        final_suites: list[Suite] = []
        for suite in mapped_suites:
            overridesuite = suite
            if suite.overridesuite is not None:
                overridesuite = (
                    session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
                )
            if self._check_new(suite, overridesuite):
                self.new = True
            if suite not in final_suites:
                final_suites.append(suite)
        return final_suites 
[docs]    def _binary_override(
        self,
        suite: Suite,
        binary: "Union[daklib.upload.Binary, daklib.packagelist.PackageListEntry]",
    ) -> Optional[Override]:
        """Get override entry for a binary
        :param suite: suite to get override for
        :param binary: binary to get override for
        :return: override for the given binary or :const:`None`
        """
        if suite.overridesuite is not None:
            suite = (
                self.session.query(Suite)
                .filter_by(suite_name=suite.overridesuite)
                .one()
            )
        mapped_component = get_mapped_component(binary.component)
        if mapped_component is None:
            return None
        query = (
            self.session.query(Override)
            .filter_by(suite=suite, package=binary.name)
            .join(Component)
            .filter(Component.component_name == mapped_component.component_name)
            .join(OverrideType)
            .filter(OverrideType.overridetype == binary.type)
        )
        return query.one_or_none() 
[docs]    def _source_override(
        self, suite: Suite, source: daklib.upload.Source
    ) -> Optional[Override]:
        """Get override entry for a source
        :param suite: suite to get override for
        :param source: source to get override for
        :return: override for the given source or :const:`None`
        """
        if suite.overridesuite is not None:
            suite = (
                self.session.query(Suite)
                .filter_by(suite_name=suite.overridesuite)
                .one()
            )
        query = (
            self.session.query(Override)
            .filter_by(suite=suite, package=source.dsc["Source"])
            .join(OverrideType)
            .filter(OverrideType.overridetype == "dsc")
        )
        component = source_component_from_package_list(source.package_list, suite)
        if component is not None:
            query = query.filter(Override.component == component)
        return query.one_or_none() 
[docs]    def _binary_component(
        self, suite: Suite, binary: daklib.upload.Binary, only_overrides: bool = True
    ) -> Optional[Component]:
        """get component for a binary
        By default this will only look at overrides to get the right component;
        if `only_overrides` is :const:`False` this method will also look at the
        Section field.
        :param only_overrides: only use overrides to get the right component
        """
        override = self._binary_override(suite, binary)
        if override is not None:
            return override.component
        if only_overrides:
            return None
        return get_mapped_component(binary.component, self.session) 
[docs]    def _source_component(
        self, suite: Suite, source: daklib.upload.Binary, only_overrides: bool = True
    ) -> Optional[Component]:
        """get component for a source
        By default this will only look at overrides to get the right component;
        if `only_overrides` is :const:`False` this method will also look at the
        Section field.
        :param only_overrides: only use overrides to get the right component
        """
        override = self._source_override(suite, source)
        if override is not None:
            return override.component
        if only_overrides:
            return None
        return get_mapped_component(source.component, self.session) 
[docs]    def _run_checks(
        self,
        force: bool,
        simple_checks: Iterable[checks.Check],
        per_suite_checks: Sequence[checks.Check],
        suites: Sequence[Suite],
    ) -> bool:
        try:
            for check in simple_checks:
                check().check(self)
            if per_suite_checks and not suites:
                raise ValueError(
                    "Per-suite checks should be called, but no suites given."
                )
            for check in per_suite_checks:
                for suite in suites:
                    check().per_suite_check(self, suite)
        except checks.Reject as e:
            self.reject_reasons.append(str(e))
            return False
        except Exception as e:
            self.reject_reasons.append(
                "Processing raised an exception: {0}.\n{1}".format(
                    e, traceback.format_exc()
                )
            )
            return False
        if len(self.reject_reasons) != 0:
            return False
        return True 
[docs]    def _run_checks_very_early(self, force: bool) -> bool:
        """
        run very early checks
        These check validate signatures on .changes and hashes.
        """
        return self._run_checks(
            force=force,
            simple_checks=[
                checks.SignatureAndHashesCheck,
                checks.WeakSignatureCheck,
                checks.SignatureTimestampCheck,
            ],
            per_suite_checks=[],
            suites=[],
        ) 
[docs]    def _run_checks_early(self, force: bool) -> bool:
        """
        run early checks
        These are checks that run after checking signatures, but
        before deciding the target suite.
        This should cover archive-wide policies, sanity checks, ...
        """
        return self._run_checks(
            force=force,
            simple_checks=[
                checks.ChangesCheck,
                checks.ExternalHashesCheck,
                checks.SourceCheck,
                checks.BinaryCheck,
                checks.BinaryMembersCheck,
                checks.BinaryTimestampCheck,
                checks.SingleDistributionCheck,
                checks.ArchAllBinNMUCheck,
            ],
            per_suite_checks=[],
            suites=[],
        ) 
[docs]    def _run_checks_late(self, force: bool, suites: Sequence[Suite]) -> bool:
        """
        run late checks
        These are checks that run after the target suites are known.
        This should cover permission checks, suite-specific polices
        (e.g., lintian), version constraints, ...
        """
        return self._run_checks(
            force=force,
            simple_checks=[
                checks.TransitionCheck,
                checks.ACLCheck,
                checks.NewOverrideCheck,
                checks.NoSourceOnlyCheck,
                checks.LintianCheck,
            ],
            per_suite_checks=[
                checks.SuiteCheck,
                checks.ACLCheck,
                checks.SourceFormatCheck,
                checks.SuiteArchitectureCheck,
                checks.VersionCheck,
            ],
            suites=suites,
        ) 
[docs]    def _handle_tag2upload(self) -> bool:
        """
        check if upload is via tag2upload
        if so, determine who authorized the upload to notify them of
        rejections and for ACL checks
        """
        if not self.fingerprint.keyring.tag2upload:
            return True
        source = self.changes.source
        if not source:
            self.reject_reasons.append("tag2upload: upload missing source")
            return False
        try:
            tag2upload_file, info = get_tag2upload_info_for_upload(self)
        except Exception as e:
            self.reject_reasons.append(f"tag2upload: invalid metadata: {e}")
            return False
        self._extra_source_files.append(tag2upload_file)
        success = True
        if self.changes.binaries:
            success = False
            self.reject_reasons.append("tag2upload: upload includes binaries")
        if self.changes.byhand_files:
            success = False
            self.reject_reasons.append("tag2upload: upload included by-hand files")
        if not info.signed_file.valid:
            success = False
            self.reject_reasons.append("tag2upload: no valid signature on tag")
        else:
            # Only set with a valid signature, but also when we reject
            # the upload so the signer might get included in the
            # rejection mail.
            self.authorized_by_fingerprint = (
                self.session.query(Fingerprint)
                .filter_by(fingerprint=info.signed_file.primary_fingerprint)
                .one()
            )
        if info.signed_file.weak_signature:
            success = False
            self.reject_reasons.append(
                "tag2upload: tag was signed using a weak algorithm (such as SHA-1)"
            )
        try:
            checks.check_signature_timestamp("tag2upload", info.signed_file)
        except checks.Reject as e:
            success = False
            self.reject_reasons.append(str(e))
        if info.metadata.get("distro") != "debian":
            success = False
            self.reject_reasons.append("tag2upload: upload not targeted at Debian.")
        if info.metadata.get("source") != source.dsc["Source"]:
            success = False
            self.reject_reasons.append(
                "tag2upload: source from tag metadata does not match upload"
            )
        if info.metadata.get("version") != source.dsc["Version"]:
            success = False
            self.reject_reasons.append(
                "tag2upload: version from tag metadata does not match upload"
            )
        tag_info_field = source.dsc.get("Git-Tag-Info")
        if not tag_info_field:
            success = False
            self.reject_reasons.append("tag2upload: source misses Git-Tag-Info field")
        else:
            try:
                tag_info = parse_git_tag_info(tag_info_field)
            except ValueError:
                success = False
                self.reject_reasons.append("tag2upload: could not parse Git-Tag-Info")
            else:
                if tag_info.fp.upper() != info.signed_file.fingerprint:
                    success = False
                    self.reject_reasons.append(
                        "tag2upload: signing key from Git and Git-Tag-Info differ"
                    )
        return success 
[docs]    def check(self, force: bool = False) -> bool:
        """run checks against the upload
        :param force: ignore failing forcable checks
        :return: :const:`True` if all checks passed, :const:`False` otherwise
        """
        # XXX: needs to be better structured.
        assert self.changes.valid_signature
        # Validate signatures and hashes before we do any real work:
        if not self._run_checks_very_early(force):
            return False
        if not self._handle_tag2upload():
            return False
        if not self._run_checks_early(force):
            return False
        try:
            final_suites = self._final_suites()
        except Exception as e:
            self.reject_reasons.append(
                "Processing raised an exception: {0}.\n{1}".format(
                    e, traceback.format_exc()
                )
            )
            return False
        if len(final_suites) == 0:
            self.reject_reasons.append(
                "No target suite found. Please check your target distribution and that you uploaded to the right archive."
            )
            return False
        self.final_suites = final_suites
        if not self._run_checks_late(force, final_suites):
            return False
        if len(self.reject_reasons) != 0:
            return False
        self._checked = True
        return True 
[docs]    def _install_to_suite(
        self,
        target_suite: Suite,
        suite: Suite,
        source_component_func: Callable[[daklib.upload.Source], Component],
        binary_component_func: Callable[[daklib.upload.Binary], Component],
        source_suites=None,
        extra_source_archives: Optional[Iterable[Archive]] = None,
        policy_upload: bool = False,
    ) -> tuple[Optional[DBSource], list[DBBinary]]:
        """Install upload to the given suite
        :param target_suite: target suite (before redirection to policy queue or NEW)
        :param suite: suite to install the package into. This is the real suite,
                      ie. after any redirection to NEW or a policy queue
        :param source_component_func: function to get the :class:`daklib.dbconn.Component`
                                      for a :class:`daklib.upload.Source` object
        :param binary_component_func: function to get the :class:`daklib.dbconn.Component`
                                      for a :class:`daklib.upload.Binary` object
        :param source_suites: see :meth:`daklib.archive.ArchiveTransaction.install_binary`
        :param extra_source_archives: see :meth:`daklib.archive.ArchiveTransaction.install_binary`
        :param policy_upload: Boolean indicating upload to policy queue (including NEW)
        :return: tuple with two elements. The first is a :class:`daklib.dbconn.DBSource`
                 object for the install source or :const:`None` if no source was
                 included. The second is a list of :class:`daklib.dbconn.DBBinary`
                 objects for the installed binary packages.
        """
        # XXX: move this function to ArchiveTransaction?
        control = self.changes.changes
        changed_by = get_or_set_maintainer(
            control.get("Changed-By", control["Maintainer"]), self.session
        )
        if source_suites is None:
            source_suites = (
                self.session.query(Suite)
                .join((VersionCheck, VersionCheck.reference_id == Suite.suite_id))
                .filter(VersionCheck.check == "Enhances")
                .filter(VersionCheck.suite == suite)
                .subquery()
            )
        source = self.changes.source
        if source is not None:
            component = source_component_func(source)
            db_source = self.transaction.install_source(
                self.directory,
                source,
                suite,
                component,
                changed_by,
                fingerprint=self.fingerprint,
                authorized_by_fingerprint=self.authorized_by_fingerprint,
                extra_source_files=self._extra_source_files,
            )
        else:
            db_source = None
        db_binaries = []
        for binary in sorted(self.changes.binaries, key=lambda x: x.name):
            copy_to_suite = suite
            if (
                daklib.utils.is_in_debug_section(binary.control)
                and suite.debug_suite is not None
            ):
                copy_to_suite = suite.debug_suite
            component = binary_component_func(binary)
            db_binary = self.transaction.install_binary(
                self.directory,
                binary,
                copy_to_suite,
                component,
                fingerprint=self.fingerprint,
                authorized_by_fingerprint=self.authorized_by_fingerprint,
                source_suites=source_suites,
                extra_source_archives=extra_source_archives,
            )
            db_binaries.append(db_binary)
            if not policy_upload:
                check_upload_for_external_signature_request(
                    self.session, target_suite, copy_to_suite, db_binary
                )
        if suite.copychanges:
            src = os.path.join(self.directory, self.changes.filename)
            dst = os.path.join(
                suite.archive.path, "dists", suite.suite_name, self.changes.filename
            )
            self.transaction.fs.copy(src, dst, mode=suite.archive.mode)
        suite.update_last_changed()
        return (db_source, db_binaries) 
[docs]    def _install_changes(self) -> DBChange:
        assert self.changes.valid_signature
        control = self.changes.changes
        session = self.transaction.session
        changelog_id = None
        # Only add changelog for sourceful uploads and binNMUs
        if self.changes.sourceful or re_bin_only_nmu.search(control["Version"]):
            query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
            changelog_id = session.execute(
                query, {"changelog": control["Changes"]}
            ).scalar()
            assert changelog_id is not None
        db_changes = DBChange()
        db_changes.changesname = self.changes.filename
        db_changes.source = control["Source"]
        db_changes.binaries = control.get("Binary", None)
        db_changes.architecture = control["Architecture"]
        db_changes.version = control["Version"]
        db_changes.distribution = control["Distribution"]
        db_changes.urgency = control["Urgency"]
        db_changes.maintainer = control["Maintainer"]
        db_changes.changedby = control.get("Changed-By", control["Maintainer"])
        db_changes.date = control["Date"]
        db_changes.fingerprint = self.fingerprint.fingerprint
        db_changes.authorized_by_fingerprint = (
            self.authorized_by_fingerprint.fingerprint
        )
        db_changes.changelog_id = changelog_id
        db_changes.closes = self.changes.closed_bugs
        try:
            self.transaction.session.add(db_changes)
            self.transaction.session.flush()
        except sqlalchemy.exc.IntegrityError:
            raise ArchiveException(
                "{0} is already known.".format(self.changes.filename)
            )
        return db_changes 
[docs]    def _install_policy(
        self, policy_queue, target_suite, db_changes, db_source, db_binaries
    ) -> PolicyQueueUpload:
        """install upload to policy queue"""
        u = PolicyQueueUpload()
        u.policy_queue = policy_queue
        u.target_suite = target_suite
        u.changes = db_changes
        u.source = db_source
        u.binaries = db_binaries
        self.transaction.session.add(u)
        self.transaction.session.flush()
        queue_files = [self.changes.filename]
        queue_files.extend(f.filename for f in self.changes.buildinfo_files)
        for fn in queue_files:
            src = os.path.join(self.changes.directory, fn)
            dst = os.path.join(policy_queue.path, fn)
            self.transaction.fs.copy(src, dst, mode=policy_queue.change_perms)
        return u 
[docs]    def try_autobyhand(self) -> bool:
        """Try AUTOBYHAND
        Try to handle byhand packages automatically.
        """
        assert len(self.reject_reasons) == 0
        assert self.changes.valid_signature
        assert self.final_suites is not None
        assert self._checked
        byhand = self.changes.byhand_files
        if len(byhand) == 0:
            return True
        suites = list(self.final_suites)
        assert len(suites) == 1, "BYHAND uploads must be to a single suite"
        suite = suites[0]
        cnf = Config()
        control = self.changes.changes
        automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages")
        remaining = []
        for f in byhand:
            if "_" in f.filename:
                parts = f.filename.split("_", 2)
                if len(parts) != 3:
                    print(
                        "W: unexpected byhand filename {0}. No automatic processing.".format(
                            f.filename
                        )
                    )
                    remaining.append(f)
                    continue
                package, _, archext = parts
                arch, ext = archext.split(".", 1)
            else:
                parts = f.filename.split(".")
                if len(parts) < 2:
                    print(
                        "W: unexpected byhand filename {0}. No automatic processing.".format(
                            f.filename
                        )
                    )
                    remaining.append(f)
                    continue
                package = parts[0]
                arch = "all"
                ext = parts[-1]
            try:
                rule = automatic_byhand_packages.subtree(package)
            except KeyError:
                remaining.append(f)
                continue
            if (
                rule["Source"] != self.changes.source_name
                or rule["Section"] != f.section
                or ("Extension" in rule and rule["Extension"] != ext)
            ):
                remaining.append(f)
                continue
            script = rule["Script"]
            retcode = subprocess.call(
                [
                    script,
                    os.path.join(self.directory, f.filename),
                    control["Version"],
                    arch,
                    os.path.join(self.directory, self.changes.filename),
                    suite.suite_name,
                ],
                shell=False,
            )
            if retcode != 0:
                print("W: error processing {0}.".format(f.filename))
                remaining.append(f)
        return len(remaining) == 0 
[docs]    def _install_byhand(
        self,
        policy_queue_upload: PolicyQueueUpload,
        hashed_file: daklib.upload.HashedFile,
    ) -> PolicyQueueByhandFile:
        """install byhand file"""
        fs = self.transaction.fs
        session = self.transaction.session
        policy_queue = policy_queue_upload.policy_queue
        byhand_file = PolicyQueueByhandFile()
        byhand_file.upload = policy_queue_upload
        byhand_file.filename = hashed_file.filename
        session.add(byhand_file)
        session.flush()
        src = os.path.join(self.directory, hashed_file.filename)
        dst = os.path.join(policy_queue.path, hashed_file.filename)
        fs.copy(src, dst, mode=policy_queue.change_perms)
        return byhand_file 
[docs]    def _do_bts_versiontracking(self) -> None:
        cnf = Config()
        fs = self.transaction.fs
        btsdir = cnf.get("Dir::BTSVersionTrack")
        if btsdir is None or btsdir == "":
            return
        base = os.path.join(btsdir, self.changes.filename[:-8])
        # version history
        sourcedir = self.unpacked_source()
        if sourcedir is not None:
            dch_path = os.path.join(sourcedir, "debian", "changelog")
            with open(dch_path, "r") as fh:
                versions = fs.create("{0}.versions".format(base), mode=0o644)
                for line in fh.readlines():
                    if re_changelog_versions.match(line):
                        versions.write(line)
                versions.close()
        # binary -> source mapping
        if self.changes.binaries:
            debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
            for binary in self.changes.binaries:
                control = binary.control
                source_package, source_version = binary.source
                line = " ".join(
                    [
                        control["Package"],
                        control["Version"],
                        control["Architecture"],
                        source_package,
                        source_version,
                    ]
                )
                print(line, file=debinfo)
            debinfo.close() 
[docs]    def _policy_queue(self, suite) -> Optional[PolicyQueue]:
        if suite.policy_queue is not None:
            return suite.policy_queue
        return None 
[docs]    def install(self) -> None:
        """install upload
        Install upload to a suite or policy queue.  This method does **not**
        handle uploads to NEW.
        You need to have called the :meth:`check` method before calling this method.
        """
        assert len(self.reject_reasons) == 0
        assert self.changes.valid_signature
        assert self.final_suites is not None
        assert self._checked
        assert not self.new
        db_changes = self._install_changes()
        for suite in self.final_suites:
            overridesuite = suite
            if suite.overridesuite is not None:
                overridesuite = (
                    self.session.query(Suite)
                    .filter_by(suite_name=suite.overridesuite)
                    .one()
                )
            policy_queue = self._policy_queue(suite)
            policy_upload = False
            redirected_suite = suite
            if policy_queue is not None:
                redirected_suite = policy_queue.suite
                policy_upload = True
            # source can be in the suite we install to or any suite we enhance
            source_suite_ids = set([suite.suite_id, redirected_suite.suite_id])
            for (enhanced_suite_id,) in (
                self.session.query(VersionCheck.reference_id)
                .filter(VersionCheck.suite_id.in_(source_suite_ids))
                .filter(VersionCheck.check == "Enhances")
            ):
                source_suite_ids.add(enhanced_suite_id)
            source_suites = (
                self.session.query(Suite)
                .filter(Suite.suite_id.in_(source_suite_ids))
                .subquery()
            )
            def source_component_func(source):
                return self._source_component(
                    overridesuite, source, only_overrides=False
                )
            def binary_component_func(binary):
                return self._binary_component(
                    overridesuite, binary, only_overrides=False
                )
            (db_source, db_binaries) = self._install_to_suite(
                suite,
                redirected_suite,
                source_component_func,
                binary_component_func,
                source_suites=source_suites,
                extra_source_archives=[suite.archive],
                policy_upload=policy_upload,
            )
            if policy_queue is not None:
                self._install_policy(
                    policy_queue, suite, db_changes, db_source, db_binaries
                )
            # copy to build queues
            if policy_queue is None or policy_queue.send_to_build_queues:
                for build_queue in suite.copy_queues:
                    self._install_to_suite(
                        suite,
                        build_queue.suite,
                        source_component_func,
                        binary_component_func,
                        source_suites=source_suites,
                        extra_source_archives=[suite.archive],
                    )
        self._do_bts_versiontracking() 
[docs]    def install_to_new(self) -> None:
        """install upload to NEW
        Install upload to NEW.  This method does **not** handle regular uploads
        to suites or policy queues.
        You need to have called the :meth:`check` method before calling this method.
        """
        # Uploads to NEW are special as we don't have overrides.
        assert len(self.reject_reasons) == 0
        assert self.changes.valid_signature
        assert self.final_suites is not None
        binaries = self.changes.binaries
        byhand = self.changes.byhand_files
        # we need a suite to guess components
        suites = list(self.final_suites)
        assert len(suites) == 1, "NEW uploads must be to a single suite"
        suite = suites[0]
        # decide which NEW queue to use
        if suite.new_queue is None:
            new_queue = (
                self.transaction.session.query(PolicyQueue)
                .filter_by(queue_name="new")
                .one()
            )
        else:
            new_queue = suite.new_queue
        if len(byhand) > 0:
            # There is only one global BYHAND queue
            new_queue = (
                self.transaction.session.query(PolicyQueue)
                .filter_by(queue_name="byhand")
                .one()
            )
        new_suite = new_queue.suite
        def binary_component_func(binary):
            return self._binary_component(suite, binary, only_overrides=False)
        # guess source component
        # XXX: should be moved into an extra method
        binary_component_names = set()
        for binary in binaries:
            component = binary_component_func(binary)
            binary_component_names.add(component.component_name)
        source_component_name = None
        for c in self.session.query(Component).order_by(Component.component_id):
            guess = c.component_name
            if guess in binary_component_names:
                source_component_name = guess
                break
        if source_component_name is None:
            source_component = (
                self.session.query(Component).order_by(Component.component_id).first()
            )
        else:
            source_component = (
                self.session.query(Component)
                .filter_by(component_name=source_component_name)
                .one()
            )
        def source_component_func(source):
            return source_component
        db_changes = self._install_changes()
        (db_source, db_binaries) = self._install_to_suite(
            suite,
            new_suite,
            source_component_func,
            binary_component_func,
            source_suites=True,
            extra_source_archives=[suite.archive],
            policy_upload=True,
        )
        policy_upload = self._install_policy(
            new_queue, suite, db_changes, db_source, db_binaries
        )
        for f in byhand:
            self._install_byhand(policy_upload, f)
        self._do_bts_versiontracking() 
[docs]    def commit(self) -> None:
        """commit changes"""
        self.transaction.commit() 
[docs]    def rollback(self) -> None:
        """rollback changes"""
        self.transaction.rollback() 
    def __enter__(self):
        self.prepare()
        return self
    def __exit__(self, type, value, traceback):
        if self.directory is not None:
            shutil.rmtree(self.directory)
            self.directory = None
        self.changes = None
        self.transaction.rollback()
        return None