Source code for daklib.dbconn

"""DB access class

@contact: Debian FTPMaster <ftpmaster@debian.org>
@copyright: 2000, 2001, 2002, 2003, 2004, 2006  James Troup <james@nocrew.org>
@copyright: 2008-2009  Mark Hymers <mhy@debian.org>
@copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
@copyright: 2009  Mike O'Connor <stew@debian.org>
@license: GNU General Public License version 2 or later
"""

# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.

# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.

# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA

################################################################################

# < mhy> I need a funny comment
# < sgran> two peanuts were walking down a dark street
# < sgran> one was a-salted
#  * mhy looks up the definition of "funny"

################################################################################

import functools
import inspect
import os
import re
import subprocess
import warnings
from collections.abc import Callable, Iterable
from datetime import datetime, timedelta
from os.path import normpath
from tarfile import TarFile
from typing import TYPE_CHECKING, Any, Optional, TypedDict, Union, overload, override

import apt_pkg
import sqlalchemy
import sqlalchemy.types
from debian.deb822 import Deb822
from sqlalchemy import (
    Column,
    ForeignKey,
    Table,
    UniqueConstraint,
    create_engine,
    desc,
    sql,
)
from sqlalchemy.dialects.postgresql import ARRAY

# Don't remove this, we re-export the exceptions to scripts which import us
from sqlalchemy.exc import IntegrityError, OperationalError, SAWarning, SQLAlchemyError
from sqlalchemy.ext.associationproxy import AssociationProxy, association_proxy
from sqlalchemy.orm import (
    DynamicMapped,
    Mapped,
    attribute_keyed_dict,
    mapped_column,
    object_session,
    relationship,
    sessionmaker,
)
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.sql import func
from sqlalchemy.types import CHAR, BigInteger, Boolean, DateTime, Integer, String, Text

import daklib.gpg

from .aptversion import AptVersion

# Only import Config until Queue stuff is changed to store its config
# in the database
from .config import Config
from .textutils import fix_maintainer

# suppress some deprecation warnings in squeeze related to sqlalchemy
warnings.filterwarnings(
    "ignore", "Predicate of partial index .* ignored during reflection", SAWarning
)

# (Debian 12 "bookworm") Silence warning targeted at SQLAlchemy dialect maintainers
warnings.filterwarnings(
    "ignore",
    "Dialect postgresql:psycopg2 will not make use of SQL compilation caching.*",
    SAWarning,
)

from .database.base import Base, BaseTimestamp

if TYPE_CHECKING:
    import sqlalchemy.orm.query
    from sqlalchemy.orm import Session


################################################################################


[docs] class DebVersion(sqlalchemy.types.UserDefinedType):
[docs] @override def get_col_spec(self, **kw: Any) -> str: return "DEBVERSION"
[docs] @override def bind_processor(self, dialect): return None
[docs] @override def result_processor(self, dialect, coltype): return None
################################################################################ __all__ = ["IntegrityError", "SQLAlchemyError", "DebVersion"] ################################################################################
[docs] def session_wrapper[**P, R](fn: Callable[P, R]) -> Callable[P, R]: """ Wrapper around common ".., session=None):" handling. If the wrapped function is called without passing 'session', we create a local one and destroy it when the function ends. Also attaches a commit_or_flush method to the session; if we created a local session, this is a synonym for session.commit(), otherwise it is a synonym for session.flush(). """ @functools.wraps(fn) def wrapped(*args, **kwargs): private_transaction = False # Find the session object session = kwargs.get("session") if session is None: if len(args) < len(inspect.getfullargspec(fn).args): # No session specified as last argument or in kwargs private_transaction = True kwargs["session"] = session = DBConn().session() else: # Session is last argument in args session = args[-1] if session is None: session = DBConn().session() args = (*args[:-1], session) private_transaction = True if private_transaction: session.commit_or_flush = session.commit # type: ignore[union-attr] else: session.commit_or_flush = session.flush # type: ignore[union-attr] try: return fn(*args, **kwargs) finally: if private_transaction: # We created a session; close it. session.close() return wrapped
__all__.append("session_wrapper") ################################################################################
[docs] class ORMObject(Base): """ ORMObject is a base class for all ORM classes mapped by SQLalchemy. All derived classes must implement the properties() method. """ __abstract__ = True
[docs] def properties(self) -> list[str]: """ This method should be implemented by all derived classes and returns a list of the important properties. The properties 'created' and 'modified' will be added automatically. A suffix '_count' should be added to properties that are lists or query objects. The most important property name should be returned as the first element in the list because it is used by repr(). """ return []
[docs] def classname(self) -> str: """ Returns the name of the class. """ return type(self).__name__
@override def __repr__(self): """ Returns a short string representation of the object using the first element from the properties() method. """ primary_property = self.properties()[0] value = getattr(self, primary_property) return "<%s %s>" % (self.classname(), str(value)) @override def __str__(self): """ Returns a human readable form of the object using the properties() method. """ return "<%s(...)>" % (self.classname())
[docs] @classmethod @session_wrapper def get(cls, primary_key, session: "Session | None" = None): """ This is a support function that allows getting an object by its primary key. Architecture.get(3[, session]) instead of the more verbose session.query(Architecture).get(3) """ assert session is not None return session.query(cls).get(primary_key)
[docs] def session(self) -> "Session | None": """ Returns the current session that is associated with the object. May return None is object is in detached state. """ return object_session(self)
__all__.append("ORMObject") ################################################################################
[docs] class ACL(ORMObject): __tablename__ = "acl" id: Mapped[int] = mapped_column(primary_key=True) name: Mapped[str] = mapped_column() # TODO: add unique=True is_global: Mapped[bool] = mapped_column(default=False) match_fingerprint: Mapped[bool] = mapped_column(default=False) match_keyring_id: Mapped[int | None] = mapped_column(ForeignKey("keyrings.id")) allow_new: Mapped[bool] = mapped_column(default=False) allow_source: Mapped[bool] = mapped_column(default=False) allow_binary: Mapped[bool] = mapped_column(default=False) allow_binary_all: Mapped[bool] = mapped_column(default=False) allow_binary_only: Mapped[bool] = mapped_column(default=False) allow_hijack: Mapped[bool] = mapped_column(default=False) allow_per_source: Mapped[bool] = mapped_column(default=False) deny_per_source: Mapped[bool] = mapped_column(default=False) architectures: Mapped[set["Architecture"]] = relationship( secondary="acl_architecture_map" ) fingerprints: Mapped[set["Fingerprint"]] = relationship( secondary="acl_fingerprint_map" ) match_keyring: Mapped["Keyring"] = relationship(foreign_keys=[match_keyring_id]) per_source: Mapped[set["ACLPerSource"]] = relationship(back_populates="acl") per_suite: Mapped[set["ACLPerSuite"]] = relationship(back_populates="acl") @override def __repr__(self): return "<ACL {0}>".format(self.name)
__all__.append("ACL") class AclArchitectureMap(Base): __tablename__ = "acl_architecture_map" acl_id: Mapped[int] = mapped_column( ForeignKey("acl.id", ondelete="CASCADE"), primary_key=True ) architecture_id: Mapped[int] = mapped_column( ForeignKey("architecture.id", ondelete="CASCADE"), primary_key=True ) class AclFingerprintMap(Base): __tablename__ = "acl_fingerprint_map" acl_id: Mapped[int] = mapped_column( ForeignKey("acl.id", ondelete="CASCADE"), primary_key=True ) fingerprint_id: Mapped[int] = mapped_column( ForeignKey("fingerprint.id", ondelete="CASCADE"), primary_key=True )
[docs] class ACLPerSource(ORMObject): __tablename__ = "acl_per_source" acl_id: Mapped[int] = mapped_column( ForeignKey("acl.id", ondelete="CASCADE"), primary_key=True ) fingerprint_id: Mapped[int] = mapped_column( ForeignKey("fingerprint.id", ondelete="CASCADE"), primary_key=True ) source: Mapped[str] = mapped_column(primary_key=True) reason: Mapped[str | None] = mapped_column(default=None) created_by_id: Mapped[int | None] = mapped_column(ForeignKey("fingerprint.id")) created: Mapped[datetime] = mapped_column( DateTime(timezone=False), server_default=func.now() ) acl: Mapped[ACL] = relationship(back_populates="per_source") fingerprint: Mapped["Fingerprint"] = relationship(foreign_keys=[fingerprint_id]) created_by: Mapped["Fingerprint"] = relationship(foreign_keys=[created_by_id]) @override def __repr__(self): return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format( self.acl.name, self.fingerprint.fingerprint, self.source, self.reason )
__all__.append("ACLPerSource")
[docs] class ACLPerSuite(ORMObject): __tablename__ = "acl_per_suite" acl_id: Mapped[int] = mapped_column( ForeignKey("acl.id", ondelete="CASCADE"), primary_key=True ) fingerprint_id: Mapped[int] = mapped_column( ForeignKey("fingerprint.id", ondelete="CASCADE"), primary_key=True ) suite_id: Mapped[int] = mapped_column( ForeignKey("suite.id", ondelete="CASCADE"), primary_key=True ) reason: Mapped[str | None] = mapped_column(default=None) created_by_id: Mapped[int | None] = mapped_column(ForeignKey("fingerprint.id")) created: Mapped[datetime] = mapped_column( DateTime(timezone=False), server_default=func.now() ) acl: Mapped[ACL] = relationship(back_populates="per_suite") fingerprint: Mapped["Fingerprint"] = relationship(foreign_keys=[fingerprint_id]) suite: Mapped["Suite"] = relationship() created_by: Mapped["Fingerprint"] = relationship(foreign_keys=[created_by_id]) @override def __repr__(self): return "<ACLPerSuite acl={0} fingerprint={1} suite={2} reason={3}>".format( self.acl.name, self.fingerprint.fingerprint, self.suite.suite_name, self.reason, )
__all__.append("ACLPerSuite") ################################################################################
[docs] class Architecture(BaseTimestamp): __tablename__ = "architecture" arch_id: Mapped[int] = mapped_column("id", Integer(), primary_key=True) arch_string: Mapped[str] = mapped_column(Text(), unique=True) description: Mapped[str] suites: Mapped[list["Suite"]] = relationship( secondary="suite_architectures", back_populates="architectures" ) def __init__(self, arch_string=None, description=None): self.arch_string = arch_string self.description = description @override def __str__(self): return self.arch_string @override def __repr__(self): return "<{} {}>".format( self.__class__.__name__, self.arch_string, ) @override def __eq__(self, val): if isinstance(val, str): warnings.warn( "comparison with a `str` is deprecated", DeprecationWarning, stacklevel=2, ) return self.arch_string == val # This signals to use the normal comparison operator return NotImplemented @override def __ne__(self, val): if isinstance(val, str): warnings.warn( "comparison with a `str` is deprecated", DeprecationWarning, stacklevel=2, ) return self.arch_string != val # This signals to use the normal comparison operator return NotImplemented __hash__ = BaseTimestamp.__hash__
__all__.append("Architecture")
[docs] @session_wrapper def get_architecture( architecture: str, session: "Session | None" = None ) -> Optional[Architecture]: """ Returns database id for given `architecture`. :param architecture: The name of the architecture :param session: Optional SQLA session object (a temporary one will be generated if not supplied) :return: Architecture object for the given arch (None if not present) """ assert session is not None q = session.query(Architecture).filter_by(arch_string=architecture) return q.one_or_none()
__all__.append("get_architecture") ################################################################################
[docs] class Archive(Base): __tablename__ = "archive" archive_id: Mapped[int] = mapped_column("id", primary_key=True) archive_name: Mapped[str] = mapped_column("name", unique=True) description: Mapped[str | None] = mapped_column(default=None) created: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) primary_mirror: Mapped[str | None] = mapped_column(default=None) path: Mapped[str] mode: Mapped[str] = mapped_column(CHAR(length=4), default="0644") tainted: Mapped[bool] = mapped_column(default=False) use_morgue: Mapped[bool] = mapped_column(default=True) stayofexecution: Mapped[timedelta] = mapped_column(default=timedelta(hours=36)) changelog: Mapped[str | None] files: Mapped[list["ArchiveFile"]] = relationship(back_populates="archive") suites: Mapped[list["Suite"]] = relationship(back_populates="archive") def __init__(self, *args, **kwargs): pass @override def __repr__(self): return "<Archive %s>" % self.archive_name
__all__.append("Archive")
[docs] @session_wrapper def get_archive(archive: str, session: "Session | None" = None) -> Optional[Archive]: """ returns database id for given `archive`. :param archive: the name of the arhive :param session: Optional SQLA session object (a temporary one will be generated if not supplied) :return: Archive object for the given name (None if not present) """ assert session is not None archive = archive.lower() q = session.query(Archive).filter_by(archive_name=archive) return q.one_or_none()
__all__.append("get_archive") ################################################################################
[docs] class ArchiveFile(Base): __tablename__ = "files_archive_map" file_id: Mapped[int] = mapped_column(ForeignKey("files.id"), primary_key=True) archive_id: Mapped[int] = mapped_column(ForeignKey("archive.id"), primary_key=True) component_id: Mapped[int] = mapped_column( ForeignKey("component.id"), primary_key=True ) last_used: Mapped[datetime | None] = mapped_column( DateTime(timezone=False), default=None ) created: Mapped[datetime] = mapped_column( DateTime(timezone=False), server_default=func.now() ) archive: Mapped[Archive] = relationship(back_populates="files") component: Mapped["Component"] = relationship() file: Mapped["PoolFile"] = relationship(back_populates="archives") def __init__(self, archive=None, component=None, file=None): self.archive = archive self.component = component self.file = file @property def path(self): return os.path.join( self.archive.path, "pool", self.component.component_name, self.file.filename )
__all__.append("ArchiveFile") ################################################################################
[docs] class BinContents(ORMObject): __tablename__ = "bin_contents" binary_id: Mapped[int] = mapped_column( ForeignKey("binaries.id", ondelete="CASCADE"), primary_key=True ) file: Mapped[str] = mapped_column(primary_key=True) binary: Mapped["DBBinary"] = relationship(back_populates="contents") def __init__(self, file=None, binary=None): self.file = file self.binary = binary
[docs] @override def properties(self) -> list[str]: return ["file", "binary"]
__all__.append("BinContents") ################################################################################
[docs] class DBBinary(ORMObject): __tablename__ = "binaries" __table_args = ( UniqueConstraint( "package", "version", "architecture", name="binaries_package_key" ), ) binary_id: Mapped[int] = mapped_column("id", primary_key=True) package: Mapped[str] version: Mapped[str] = mapped_column(DebVersion()) maintainer_id: Mapped[int] = mapped_column( "maintainer", ForeignKey("maintainer.id") ) source_id: Mapped[int] = mapped_column("source", ForeignKey("source.id")) arch_id: Mapped[int] = mapped_column("architecture", ForeignKey("architecture.id")) poolfile_id: Mapped[int] = mapped_column( "file", ForeignKey("files.id"), unique=True ) binarytype: Mapped[str] = mapped_column("type") fingerprint_id: Mapped[int | None] = mapped_column( "sig_fpr", ForeignKey("fingerprint.id") ) install_date: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) stanza: Mapped[str] authorized_by_fingerprint_id: Mapped[int | None] = mapped_column( ForeignKey("fingerprint.id") ) maintainer: Mapped["Maintainer"] = relationship() source: Mapped["DBSource"] = relationship(back_populates="binaries") architecture: Mapped[Architecture] = relationship() poolfile: Mapped["PoolFile"] = relationship() fingerprint: Mapped["Fingerprint"] = relationship(foreign_keys=[fingerprint_id]) authorized_by_fingerprint: Mapped["Fingerprint"] = relationship( foreign_keys=[authorized_by_fingerprint_id] ) suites: Mapped[list["Suite"]] = relationship( secondary="bin_associations", back_populates="binaries" ) extra_sources: Mapped[list["DBSource"]] = relationship( secondary="extra_src_references", back_populates="extra_binary_references" ) key: Mapped[dict[str, "BinaryMetadata"]] = relationship( collection_class=attribute_keyed_dict("key"), cascade="all" ) contents: DynamicMapped["BinContents"] = relationship( cascade="all", back_populates="binary" ) metadata_proxy: AssociationProxy[dict["MetadataKey", str]] = association_proxy( "key", "value" ) def __init__( self, package=None, source=None, version=None, maintainer=None, architecture=None, poolfile=None, binarytype="deb", fingerprint=None, ): self.package = package self.source = source self.version = version self.maintainer = maintainer self.architecture = architecture self.poolfile = poolfile self.binarytype = binarytype self.fingerprint = fingerprint @property def pkid(self) -> int: return self.binary_id @property def name(self) -> str: return self.package @property def arch_string(self) -> str: return "%s" % self.architecture
[docs] @override def properties(self) -> list[str]: return [ "package", "version", "maintainer", "source", "architecture", "poolfile", "binarytype", "fingerprint", "install_date", "suites_count", "binary_id", "contents_count", "extra_sources", ]
[docs] def scan_contents(self) -> Iterable[str]: """ Yields the contents of the package. Only regular files are yielded and the path names are normalized after converting them from either utf-8 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the package does not contain any regular file. """ fullpath = self.poolfile.fullpath dpkg_cmd = ("dpkg-deb", "--fsys-tarfile", fullpath) dpkg = subprocess.Popen(dpkg_cmd, stdout=subprocess.PIPE) tar = TarFile.open(fileobj=dpkg.stdout, mode="r|") for member in tar.getmembers(): if not member.isdir(): name = normpath(member.name) yield name tar.close() assert dpkg.stdout is not None dpkg.stdout.close() dpkg.wait()
[docs] def read_control(self) -> bytes: """ Reads the control information from a binary. :return: stanza text of the control section. """ from . import utils fullpath = self.poolfile.fullpath return utils.deb_extract_control(fullpath)
[docs] def read_control_fields(self) -> apt_pkg.TagSection: """ Reads the control information from a binary and return as a dictionary. :return: fields of the control section as a dictionary. """ stanza = self.read_control() return apt_pkg.TagSection(stanza)
@property def proxy(self) -> "MetadataProxy": session = object_session(self) assert session is not None query = session.query(BinaryMetadata).filter_by(binary=self) return MetadataProxy(session, query)
__all__.append("DBBinary")
[docs] @session_wrapper def get_suites_binary_in( package: str, session: "Session | None" = None ) -> "list[Suite]": """ Returns list of Suite objects which given `package` name is in :param package: DBBinary package name to search for :return: list of Suite objects for the given package """ assert session is not None return ( session.query(Suite) .filter(Suite.binaries.any(DBBinary.package == package)) .all() )
__all__.append("get_suites_binary_in")
[docs] @session_wrapper def get_component_by_package_suite( package: str, suite_list: list[str], arch_list: Optional[str] = None, session: "Session | None" = None, ) -> Optional[str]: """ Returns the component name of the newest binary package in suite_list or None if no package is found. The result can be optionally filtered by a list of architecture names. :param package: DBBinary package name to search for :param suite_list: list of suite_name items :param arch_list: optional list of arch_string items that defaults to [] :return: name of component or None """ assert session is not None q = ( session.query(DBBinary) .filter_by(package=package) .join(DBBinary.suites) .filter(Suite.suite_name.in_(suite_list)) ) if arch_list: q = q.join(DBBinary.architecture).filter( Architecture.arch_string.in_(arch_list) ) binary = q.order_by(desc(DBBinary.version)).first() if binary is None: return None else: return binary.poolfile.component.component_name
__all__.append("get_component_by_package_suite") class BinAssociations(Base): __tablename__ = "bin_associations" __table_args = ( UniqueConstraint("suite", "bin", name="bin_associations_suite_key"), ) id: Mapped[int] = mapped_column(primary_key=True) suite: Mapped[int] = mapped_column(ForeignKey("suite.id")) bin: Mapped[int] = mapped_column(ForeignKey("binaries.id")) created: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) class ExtraSrcReferences(Base): __tablename__ = "extra_src_references" bin_id: Mapped[int] = mapped_column( ForeignKey("binaries.id", ondelete="CASCADE"), primary_key=True ) src_id: Mapped[int] = mapped_column(ForeignKey("source.id"), primary_key=True) ################################################################################
[docs] class BuildQueue(Base): __tablename__ = "build_queue" queue_id: Mapped[int] = mapped_column("id", primary_key=True) queue_name: Mapped[str] = mapped_column(unique=True) generate_metadata: Mapped[bool] = mapped_column(default=False) stay_of_execution: Mapped[int] = mapped_column(default=86400) created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) suite_id: Mapped[int] = mapped_column(ForeignKey("suite.id")) suite: Mapped["Suite"] = relationship() def __init__(self, *args, **kwargs): pass @override def __repr__(self): return "<BuildQueue %s>" % self.queue_name
__all__.append("BuildQueue") ################################################################################
[docs] class Component(ORMObject): __tablename__ = "component" component_id: Mapped[int] = mapped_column("id", primary_key=True) component_name: Mapped[str] = mapped_column("name", unique=True) description: Mapped[str | None] meets_dfsg: Mapped[bool | None] created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) ordering: Mapped[int] = mapped_column(unique=True) suites: Mapped[list["Suite"]] = relationship( secondary="component_suite", back_populates="components" ) overrides: DynamicMapped["Override"] = relationship(back_populates="component") def __init__(self, component_name=None): self.component_name = component_name @override def __eq__(self, val: Any): if isinstance(val, str): warnings.warn( "comparison with a `str` is deprecated", DeprecationWarning, stacklevel=2, ) return self.component_name == val # This signals to use the normal comparison operator return NotImplemented @override def __ne__(self, val: Any): if isinstance(val, str): warnings.warn( "comparison with a `str` is deprecated", DeprecationWarning, stacklevel=2, ) return self.component_name != val # This signals to use the normal comparison operator return NotImplemented __hash__ = ORMObject.__hash__
[docs] @override def properties(self) -> list[str]: return [ "component_name", "component_id", "description", "meets_dfsg", "overrides_count", ]
__all__.append("Component")
[docs] @session_wrapper def get_component( component: str, session: "Session | None" = None ) -> Optional[Component]: """ Returns database id for given `component`. :param component: The name of the override type :return: the database id for the given component """ assert session is not None component = component.lower() q = session.query(Component).filter_by(component_name=component) return q.one_or_none()
__all__.append("get_component")
[docs] def get_mapped_component_name(component_name: str) -> str: cnf = Config() for m in cnf.value_list("ComponentMappings"): (src, dst) = m.split() if component_name == src: component_name = dst return component_name
__all__.append("get_mapped_component_name")
[docs] @session_wrapper def get_mapped_component( component_name: str, session: "Session | None" = None ) -> Optional[Component]: """get component after mappings Evaluate component mappings from ComponentMappings in dak.conf for the given component name. .. todo:: ansgar wants to get rid of this. It's currently only used for the security archive :param component_name: component name :param session: database session :return: component after applying maps or :const:`None` """ assert session is not None component_name = get_mapped_component_name(component_name) component = ( session.query(Component).filter_by(component_name=component_name).first() ) return component
__all__.append("get_mapped_component")
[docs] @session_wrapper def get_component_names(session: "Session | None" = None) -> list[str]: """ Returns list of strings of component names. :return: list of strings of component names """ assert session is not None return [x.component_name for x in session.query(Component).all()]
__all__.append("get_component_names") class ComponentSuite(Base): __tablename__ = "component_suite" component_id: Mapped[int] = mapped_column( ForeignKey("component.id", ondelete="CASCADE"), primary_key=True ) suite_id: Mapped[int] = mapped_column( ForeignKey("suite.id", ondelete="CASCADE"), primary_key=True ) ################################################################################
[docs] class DBConfig(Base): __tablename__ = "config" config_id: Mapped[int] = mapped_column("id", primary_key=True) name: Mapped[str] = mapped_column(unique=True) value: Mapped[str | None] created: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) def __init__(self, *args, **kwargs): pass @override def __repr__(self): return "<DBConfig %s>" % self.name
__all__.append("DBConfig") ################################################################################
[docs] class DSCFile(Base): __tablename__ = "dsc_files" __table_args = (UniqueConstraint("source", "file", name="dsc_files_source_key"),) dscfile_id: Mapped[int] = mapped_column("id", primary_key=True) source_id: Mapped[int] = mapped_column("source", ForeignKey("source.id")) poolfile_id: Mapped[int] = mapped_column("file", ForeignKey("files.id")) created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) extra_file: Mapped[bool] = mapped_column(default=True) source: Mapped["DBSource"] = relationship(back_populates="srcfiles") poolfile: Mapped["PoolFile"] = relationship() def __init__(self, *args, **kwargs): pass @override def __repr__(self): return "<DSCFile %s>" % self.dscfile_id
__all__.append("DSCFile")
[docs] @session_wrapper def get_dscfiles( dscfile_id: Optional[int] = None, source_id: Optional[int] = None, poolfile_id: Optional[int] = None, session: "Session | None" = None, ) -> list[DSCFile]: """ Returns a list of DSCFiles which may be empty :param dscfile_id: the dscfile_id of the DSCFiles to find :param source_id: the source id related to the DSCFiles to find :param poolfile_id: the poolfile id related to the DSCFiles to find :return: Possibly empty list of DSCFiles """ assert session is not None q = session.query(DSCFile) if dscfile_id is not None: q = q.filter_by(dscfile_id=dscfile_id) if source_id is not None: q = q.filter_by(source_id=source_id) if poolfile_id is not None: q = q.filter_by(poolfile_id=poolfile_id) return q.all()
__all__.append("get_dscfiles") ################################################################################
[docs] class ExternalOverride(ORMObject): __tablename__ = "external_overrides" suite_id: Mapped[int] = mapped_column( "suite", ForeignKey("suite.id"), primary_key=True ) component_id: Mapped[int] = mapped_column( "component", ForeignKey("component.id"), primary_key=True ) package: Mapped[str] = mapped_column(primary_key=True) key: Mapped[str] = mapped_column(primary_key=True) value: Mapped[str] suite: Mapped["Suite"] = relationship() component: Mapped["Component"] = relationship() def __init__(self, *args, **kwargs): pass @override def __repr__(self): return "<ExternalOverride %s = %s: %s>" % (self.package, self.key, self.value)
__all__.append("ExternalOverride") ################################################################################
[docs] class PoolFile(ORMObject): __tablename__ = "files" file_id: Mapped[int] = mapped_column("id", primary_key=True) filename: Mapped[str] # TODO: add unique=True? filesize: Mapped[int] = mapped_column("size", BigInteger()) md5sum: Mapped[str] last_used: Mapped[datetime | None] sha1sum: Mapped[str] sha256sum: Mapped[str] created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) archives: Mapped[list["ArchiveFile"]] = relationship(back_populates="file") def __init__(self, filename=None, filesize=-1, md5sum=None): self.filename = filename self.filesize = filesize self.md5sum = md5sum @property def fullpath(self) -> str: session = object_session(self) assert session is not None af = ( session.query(ArchiveFile) .join(Archive) .filter(ArchiveFile.file == self) .order_by(Archive.tainted.desc()) .first() ) assert af is not None return af.path @property def component(self) -> Component: session = object_session(self) assert session is not None component_id = ( session.query(ArchiveFile.component_id) .filter(ArchiveFile.file == self) .group_by(ArchiveFile.component_id) .one() ) return session.get_one(Component, component_id) @property def basename(self) -> str: return os.path.basename(self.filename)
[docs] @override def properties(self) -> list[str]: return [ "filename", "file_id", "filesize", "md5sum", "sha1sum", "sha256sum", "source", "binary", "last_used", ]
__all__.append("PoolFile") ################################################################################
[docs] class Fingerprint(ORMObject): __tablename__ = "fingerprint" fingerprint_id: Mapped[int] = mapped_column("id", primary_key=True) fingerprint: Mapped[str] = mapped_column(unique=True) uid_id: Mapped[int | None] = mapped_column("uid", ForeignKey("uid.id")) keyring_id: Mapped[int | None] = mapped_column("keyring", ForeignKey("keyrings.id")) created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) acl_id: Mapped[int | None] = mapped_column(ForeignKey("acl.id")) uid: Mapped["Uid | None"] = relationship(back_populates="fingerprint") keyring: Mapped["Keyring | None"] = relationship() acl: Mapped["ACL | None"] = relationship() def __init__(self, fingerprint=None): self.fingerprint = fingerprint
[docs] @override def properties(self) -> list[str]: return ["fingerprint", "fingerprint_id", "keyring", "uid", "binary_reject"]
__all__.append("Fingerprint")
[docs] @session_wrapper def get_fingerprint( fpr: str, session: "Session | None" = None ) -> Optional[Fingerprint]: """ Returns Fingerprint object for given fpr. :param fpr: The fpr to find / add :param session: Optional SQL session object (a temporary one will be generated if not supplied). :return: the Fingerprint object for the given fpr or None """ assert session is not None q = session.query(Fingerprint).filter_by(fingerprint=fpr) return q.one_or_none()
__all__.append("get_fingerprint")
[docs] @session_wrapper def get_or_set_fingerprint(fpr: str, session: "Session | None" = None) -> Fingerprint: """ Returns Fingerprint object for given fpr. If no matching fpr is found, a row is inserted. :param fpr: The fpr to find / add :param session: Optional SQL session object (a temporary one will be generated if not supplied). If not passed, a commit will be performed at the end of the function, otherwise the caller is responsible for commiting. A flush will be performed either way. :return: the Fingerprint object for the given fpr """ assert session is not None q = session.query(Fingerprint).filter_by(fingerprint=fpr) try: ret = q.one() except NoResultFound: fingerprint = Fingerprint() fingerprint.fingerprint = fpr session.add(fingerprint) session.commit_or_flush() # type: ignore[attr-defined] ret = fingerprint return ret
__all__.append("get_or_set_fingerprint") ################################################################################ # Helper routine for Keyring class def get_ldap_name(entry) -> str: name = [] for k in ["cn", "mn", "sn"]: ret = entry.get(k) if not ret: continue value = ret[0].decode() if value and value[0] != "-": name.append(value) return " ".join(name) ################################################################################ class _Key(TypedDict, total=False): email: str name: str fingerprints: list[str] uid: str
[docs] class Keyring(Base): __tablename__ = "keyrings" keyring_id: Mapped[int] = mapped_column("id", primary_key=True) keyring_name: Mapped[str] = mapped_column("name", unique=True) priority: Mapped[int] = mapped_column(default=100) created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) active: Mapped[bool | None] = mapped_column(default=True) acl_id: Mapped[int | None] = mapped_column(ForeignKey("acl.id")) tag2upload: Mapped[bool] = mapped_column(default=False) acl: Mapped["ACL | None"] = relationship(foreign_keys=[acl_id]) keys: dict[str, _Key] = {} fpr_lookup: dict[str, str] = {} def __init__(self, *args, **kwargs): pass @override def __repr__(self): return "<Keyring %s>" % self.keyring_name
[docs] def de_escape_gpg_str(self, txt: str) -> str: esclist = re.split(r"(\\x..)", txt) for x in range(1, len(esclist), 2): esclist[x] = "%c" % (int(esclist[x][2:], 16)) return "".join(esclist)
[docs] def parse_address(self, uid: str) -> tuple[str, str]: """parses uid and returns a tuple of real name and email address""" import email.utils (name, address) = email.utils.parseaddr(uid) name = re.sub(r"\s*[(].*[)]", "", name) name = self.de_escape_gpg_str(name) if name == "": name = uid return (name, address)
[docs] def load_keys(self, keyring: str) -> None: if not self.keyring_id: raise Exception("Must be initialized with database information") cmd = [ "gpg", "--no-default-keyring", "--keyring", keyring, "--with-colons", "--fingerprint", "--fingerprint", ] p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) key = None need_fingerprint = False assert p.stdout is not None for line_raw in p.stdout: try: line = line_raw.decode() except UnicodeDecodeError: # Some old UIDs might not use UTF-8 encoding. We assume they # use latin1. line = line_raw.decode("latin1") field = line.split(":") if field[0] == "pub": key = field[4] self.keys[key] = {} (name, addr) = self.parse_address(field[9]) if "@" in addr: self.keys[key]["email"] = addr self.keys[key]["name"] = name need_fingerprint = True elif key and field[0] == "uid": assert key is not None (name, addr) = self.parse_address(field[9]) if "email" not in self.keys[key] and "@" in addr: self.keys[key]["email"] = addr self.keys[key]["name"] = name elif need_fingerprint and field[0] == "fpr": assert key is not None self.keys[key]["fingerprints"] = [field[9]] self.fpr_lookup[field[9]] = key need_fingerprint = False (out, err) = p.communicate() r = p.returncode if r != 0: raise daklib.gpg.GpgException( "command failed: %s\nstdout: %r\nstderr: %r\n" % (cmd, out, err) )
[docs] def import_users_from_ldap( self, session: "Session" ) -> tuple[dict[str, tuple[int, str]], dict[int, tuple[str, str]]]: import ldap # type: ignore from .utils import open_ldap_connection conn = open_ldap_connection() cnf = Config() LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"] Attrs = conn.search_s( LDAPDn, ldap.SCOPE_ONELEVEL, "(&(keyfingerprint=*)(supplementaryGid=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]), ["uid", "keyfingerprint", "cn", "mn", "sn"], ) byuid: dict[int, tuple[str, str]] = {} byname: dict[str, tuple[int, str]] = {} for i in Attrs: entry = i[1] uid = entry["uid"][0].decode() name = get_ldap_name(entry) fingerprints = entry["keyFingerPrint"] keyid = None for f_raw in fingerprints: f = f_raw.decode() key = self.fpr_lookup.get(f, None) if key not in self.keys: continue self.keys[key]["uid"] = uid if keyid is not None: continue keyid = get_or_set_uid(uid, session).uid_id byuid[keyid] = (uid, name) byname[uid] = (keyid, name) return (byname, byuid)
[docs] def generate_users_from_keyring( self, format: str, session: "Session" ) -> tuple[dict[str, tuple[int, str]], dict[int, tuple[str, str]]]: byuid: dict[int, tuple[str, str]] = {} byname: dict[str, tuple[int, str]] = {} any_invalid = False for x in list(self.keys.keys()): if "email" not in self.keys[x]: any_invalid = True self.keys[x]["uid"] = format % "invalid-uid" else: uid = format % self.keys[x]["email"] keyid = get_or_set_uid(uid, session).uid_id byuid[keyid] = (uid, self.keys[x]["name"]) byname[uid] = (keyid, self.keys[x]["name"]) self.keys[x]["uid"] = uid if any_invalid: uid = format % "invalid-uid" keyid = get_or_set_uid(uid, session).uid_id byuid[keyid] = (uid, "ungeneratable user id") byname[uid] = (keyid, "ungeneratable user id") return (byname, byuid)
__all__.append("Keyring")
[docs] @session_wrapper def get_keyring(keyring: str, session: "Session | None" = None) -> Optional[Keyring]: """ If `keyring` does not have an entry in the `keyrings` table yet, return None If `keyring` already has an entry, simply return the existing :class:`Keyring` :param keyring: the keyring name :return: the :class:`Keyring` object for this keyring """ assert session is not None q = session.query(Keyring).filter_by(keyring_name=keyring) return q.one_or_none()
__all__.append("get_keyring")
[docs] @session_wrapper def get_active_keyring_paths(session: "Session | None" = None) -> list[str]: """ :return: list of active keyring paths """ assert session is not None return [ x.keyring_name for x in session.query(Keyring) .filter(Keyring.active == True) # noqa:E712 .order_by(desc(Keyring.priority)) .all() ]
__all__.append("get_active_keyring_paths") ################################################################################
[docs] class DBChange(Base): __tablename__ = "changes" change_id: Mapped[int] = mapped_column("id", primary_key=True) changesname: Mapped[str] seen: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) source: Mapped[str] binaries: Mapped[str | None] architecture: Mapped[str] version: Mapped[str] distribution: Mapped[str] urgency: Mapped[str] maintainer: Mapped[str] fingerprint: Mapped[str] changedby: Mapped[str] date: Mapped[str] created: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) changelog_id: Mapped[int | None] closes: Mapped[list[str] | None] = mapped_column(ARRAY(String())) authorized_by_fingerprint: Mapped[str | None] def __init__(self, *args, **kwargs): pass @override def __repr__(self): return "<DBChange %s>" % self.changesname
__all__.append("DBChange")
[docs] @session_wrapper def get_dbchange(filename: str, session: "Session | None" = None) -> Optional[DBChange]: """ returns DBChange object for given `filename`. :param filename: the name of the file :param session: Optional SQLA session object (a temporary one will be generated if not supplied) :return: DBChange object for the given filename (:const:`None` if not present) """ assert session is not None q = session.query(DBChange).filter_by(changesname=filename) return q.one_or_none()
__all__.append("get_dbchange") ################################################################################
[docs] class DBChangelog(Base): __tablename__ = "changelogs_text" id: Mapped[int] = mapped_column(primary_key=True) changelog: Mapped[str | None] def __init__(self, *args, **kwargs): pass @override def __repr__(self): return "<DBChangelog %s>" % self.id
__all__.append("DBChangelog") ################################################################################
[docs] class Maintainer(ORMObject): __tablename__ = "maintainer" maintainer_id: Mapped[int] = mapped_column("id", primary_key=True) name: Mapped[str] = mapped_column(unique=True) created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) maintains_sources: Mapped[list["DBSource"]] = relationship( back_populates="maintainer", foreign_keys=lambda: DBSource.maintainer_id ) changed_sources: Mapped[list["DBSource"]] = relationship( back_populates="changedby", foreign_keys=lambda: DBSource.changedby_id ) def __init__(self, name=None): self.name = name
[docs] @override def properties(self) -> list[str]: return ["name", "maintainer_id"]
[docs] def get_split_maintainer(self) -> tuple[str, str, str, str]: if not hasattr(self, "name") or self.name is None: return ("", "", "", "") return fix_maintainer(self.name.strip())
__all__.append("Maintainer")
[docs] @session_wrapper def get_or_set_maintainer(name: str, session: "Session | None" = None) -> Maintainer: """ Returns Maintainer object for given maintainer name. If no matching maintainer name is found, a row is inserted. :param name: The maintainer name to add :param session: Optional SQL session object (a temporary one will be generated if not supplied). If not passed, a commit will be performed at the end of the function, otherwise the caller is responsible for commiting. A flush will be performed either way. :return: the Maintainer object for the given maintainer """ assert session is not None q = session.query(Maintainer).filter_by(name=name) try: ret = q.one() except NoResultFound: maintainer = Maintainer() maintainer.name = name session.add(maintainer) session.commit_or_flush() # type: ignore[attr-defined] ret = maintainer return ret
__all__.append("get_or_set_maintainer")
[docs] @session_wrapper def get_maintainer( maintainer_id: int, session: "Session | None" = None ) -> Optional[Maintainer]: """ Return the name of the maintainer behind `maintainer_id` or :const:`None` if that `maintainer_id` is invalid. :param maintainer_id: the id of the maintainer :return: the Maintainer with this `maintainer_id` """ assert session is not None return session.get(Maintainer, maintainer_id)
__all__.append("get_maintainer") ################################################################################
[docs] class NewComment(Base): __tablename__ = "new_comments" comment_id: Mapped[int] = mapped_column("id", primary_key=True) package: Mapped[str] version: Mapped[str] comment: Mapped[str] author: Mapped[str] notedate: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) trainee: Mapped[bool] = mapped_column(default=False) created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) policy_queue_id: Mapped[int] = mapped_column(ForeignKey("policy_queue.id")) policy_queue: Mapped["PolicyQueue"] = relationship() def __init__(self, *args, **kwargs): pass @override def __repr__(self): return """<NewComment for '%s %s' (%s)>""" % ( self.package, self.version, self.comment_id, )
__all__.append("NewComment")
[docs] @session_wrapper def has_new_comment( policy_queue: "PolicyQueue", package: str, version: str, session: "Session | None" = None, ) -> bool: """ Returns :const:`True` if the given combination of `package`, `version` has a comment. :param package: name of the package :param version: package version :param session: Optional SQLA session object (a temporary one will be generated if not supplied) """ assert session is not None q = session.query(NewComment).filter_by(policy_queue=policy_queue) q = q.filter_by(package=package) q = q.filter_by(version=version) return bool(q.count() > 0)
__all__.append("has_new_comment")
[docs] @session_wrapper def get_new_comments( policy_queue: "PolicyQueue", package: Optional[str] = None, version: Optional[str] = None, comment_id: Optional[int] = None, session: "Session | None" = None, ) -> list[NewComment]: """ Returns (possibly empty) list of NewComment objects for the given parameters :param package: name of the package :param version: package version :param comment_id: An id of a comment :param session: Optional SQLA session object (a temporary one will be generated if not supplied) :return: A (possibly empty) list of NewComment objects will be returned """ assert session is not None q = session.query(NewComment).filter_by(policy_queue=policy_queue) if package is not None: q = q.filter_by(package=package) if version is not None: q = q.filter_by(version=version) if comment_id is not None: q = q.filter_by(comment_id=comment_id) return q.all()
__all__.append("get_new_comments") ################################################################################
[docs] class Override(ORMObject): __tablename__ = "override" suite_id: Mapped[int] = mapped_column( "suite", ForeignKey("suite.id"), primary_key=True ) component_id: Mapped[int] = mapped_column( "component", ForeignKey("component.id"), primary_key=True ) package: Mapped[str] = mapped_column(primary_key=True) overridetype_id: Mapped[int] = mapped_column( "type", ForeignKey("override_type.id"), primary_key=True ) priority_id: Mapped[int | None] = mapped_column( "priority", ForeignKey("priority.id") ) section_id: Mapped[int] = mapped_column("section", ForeignKey("section.id")) maintainer: Mapped[str | None] created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) suite: Mapped["Suite"] = relationship(back_populates="overrides") component: Mapped["Component"] = relationship(back_populates="overrides") priority: Mapped["Priority"] = relationship(back_populates="overrides") section: Mapped["Section"] = relationship(back_populates="overrides") overridetype: Mapped["OverrideType"] = relationship(back_populates="overrides") def __init__( self, package=None, suite=None, component=None, overridetype=None, section=None, priority=None, ): self.package = package self.suite = suite self.component = component self.overridetype = overridetype self.section = section self.priority = priority
[docs] @override def properties(self) -> list[str]: return ["package", "suite", "component", "overridetype", "section", "priority"]
__all__.append("Override")
[docs] @session_wrapper def get_override( package: str, suite: Union[str, list[str], None] = None, component: Union[str, list[str], None] = None, overridetype: Union[str, list[str], None] = None, session: "Session | None" = None, ) -> list[Override]: """ Returns Override object for the given parameters :param package: The name of the package :param suite: The name of the suite (or suites if a list) to limit to. If None, don't limit. Defaults to None. :param component: The name of the component (or components if a list) to limit to. If None, don't limit. Defaults to None. :param overridetype: The name of the overridetype (or overridetypes if a list) to limit to. If None, don't limit. Defaults to None. :param session: Optional SQLA session object (a temporary one will be generated if not supplied) :return: A (possibly empty) list of Override objects will be returned """ assert session is not None q = session.query(Override) q = q.filter_by(package=package) if suite is not None: if not isinstance(suite, list): suite = [suite] q = q.join(Suite).filter(Suite.suite_name.in_(suite)) if component is not None: if not isinstance(component, list): component = [component] q = q.join(Component).filter(Component.component_name.in_(component)) if overridetype is not None: if not isinstance(overridetype, list): overridetype = [overridetype] q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype)) return q.all()
__all__.append("get_override") ################################################################################
[docs] class OverrideType(ORMObject): __tablename__ = "override_type" overridetype_id: Mapped[int] = mapped_column("id", primary_key=True) overridetype: Mapped[str] = mapped_column("type", unique=True) created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) overrides: DynamicMapped["Override"] = relationship(back_populates="overridetype") def __init__(self, overridetype=None): self.overridetype = overridetype
[docs] @override def properties(self) -> list[str]: return ["overridetype", "overridetype_id", "overrides_count"]
__all__.append("OverrideType")
[docs] @session_wrapper def get_override_type( override_type: str, session: "Session | None" = None ) -> Optional[OverrideType]: """ Returns OverrideType object for given `override_type`. :param override_type: The name of the override type :param session: Optional SQLA session object (a temporary one will be generated if not supplied) :return: the database id for the given override type """ assert session is not None q = session.query(OverrideType).filter_by(overridetype=override_type) return q.one_or_none()
__all__.append("get_override_type") ################################################################################
[docs] class PolicyQueue(Base): __tablename__ = "policy_queue" policy_queue_id: Mapped[int] = mapped_column("id", primary_key=True) queue_name: Mapped[str] = mapped_column(unique=True) path: Mapped[str] change_perms: Mapped[str] = mapped_column(CHAR(length=4)) generate_metadata: Mapped[bool] = mapped_column(default=False) created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) send_to_build_queues: Mapped[bool] = mapped_column(default=False) suite_id: Mapped[int] = mapped_column(ForeignKey("suite.id")) suite: Mapped["Suite"] = relationship(foreign_keys=[suite_id]) uploads: Mapped[list["PolicyQueueUpload"]] = relationship( back_populates="policy_queue" ) def __init__(self, *args, **kwargs): pass @override def __repr__(self): return "<PolicyQueue %s>" % self.queue_name
__all__.append("PolicyQueue")
[docs] @session_wrapper def get_policy_queue( queuename: str, session: "Session | None" = None ) -> Optional[PolicyQueue]: """ Returns PolicyQueue object for given `queuename` :param queuename: The name of the queue :param session: Optional SQLA session object (a temporary one will be generated if not supplied) :return: PolicyQueue object for the given queue """ assert session is not None q = session.query(PolicyQueue).filter_by(queue_name=queuename) return q.one_or_none()
__all__.append("get_policy_queue") ################################################################################
[docs] @functools.total_ordering class PolicyQueueUpload(Base): __tablename__ = "policy_queue_upload" __table_args = ( UniqueConstraint( "policy_queue_id", "target_suite_id", "changes_id", name="policy_queue_upload_policy_queue_id_target_suite_id_changes_key", ), ) id: Mapped[int] = mapped_column(primary_key=True) policy_queue_id: Mapped[int] = mapped_column(ForeignKey("policy_queue.id")) target_suite_id: Mapped[int] = mapped_column(ForeignKey("suite.id")) changes_id: Mapped[int] = mapped_column(ForeignKey("changes.id")) source_id: Mapped[int | None] = mapped_column(ForeignKey("source.id")) changes: Mapped["DBChange"] = relationship() policy_queue: Mapped["PolicyQueue"] = relationship(back_populates="uploads") target_suite: Mapped["Suite"] = relationship() source: Mapped["DBSource | None"] = relationship() binaries: Mapped[list["DBBinary"]] = relationship( secondary="policy_queue_upload_binaries_map" ) byhand: Mapped[list["PolicyQueueByhandFile"]] = relationship( back_populates="upload" )
[docs] def _key(self): return ( self.changes.source, AptVersion(self.changes.version), self.source is None, self.changes.changesname, )
@override def __eq__(self, other: Any) -> bool: if not isinstance(other, PolicyQueueUpload): return NotImplemented return self._key() == other._key() def __lt__(self, other): return self._key() < other._key()
__all__.append("PolicyQueueUpload") class PolicyQueueUploadBinariesMap(Base): __tablename__ = "policy_queue_upload_binaries_map" policy_queue_upload_id: Mapped[int] = mapped_column( ForeignKey("policy_queue_upload.id", ondelete="CASCADE"), primary_key=True ) binary_id: Mapped[int] = mapped_column(ForeignKey("binaries.id"), primary_key=True) ################################################################################
[docs] class PolicyQueueByhandFile(Base): __tablename__ = "policy_queue_byhand_file" id: Mapped[int] = mapped_column(primary_key=True) upload_id: Mapped[int] = mapped_column(ForeignKey("policy_queue_upload.id")) filename: Mapped[str] processed: Mapped[bool] = mapped_column(default=False) upload: Mapped[PolicyQueueUpload] = relationship(back_populates="byhand")
__all__.append("PolicyQueueByhandFile") ################################################################################
[docs] class Priority(ORMObject): __tablename__ = "priority" priority_id: Mapped[int] = mapped_column("id", primary_key=True) priority: Mapped[str] = mapped_column(unique=True) level: Mapped[int] = mapped_column(unique=True) created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) overrides: DynamicMapped["Override"] = relationship(back_populates="priority") def __init__(self, priority=None, level=None): self.priority = priority self.level = level
[docs] @override def properties(self) -> list[str]: return ["priority", "priority_id", "level", "overrides_count"]
@override def __eq__(self, val: Any): if isinstance(val, str): warnings.warn( "comparison with a `str` is deprecated", DeprecationWarning, stacklevel=2, ) return self.priority == val # This signals to use the normal comparison operator return NotImplemented @override def __ne__(self, val: Any): if isinstance(val, str): warnings.warn( "comparison with a `str` is deprecated", DeprecationWarning, stacklevel=2, ) return self.priority != val # This signals to use the normal comparison operator return NotImplemented __hash__ = ORMObject.__hash__
__all__.append("Priority")
[docs] @session_wrapper def get_priority(priority: str, session: "Session | None" = None) -> Optional[Priority]: """ Returns Priority object for given `priority` name. :param priority: The name of the priority :param session: Optional SQLA session object (a temporary one will be generated if not supplied) :return: Priority object for the given priority """ assert session is not None q = session.query(Priority).filter_by(priority=priority) return q.one_or_none()
__all__.append("get_priority")
[docs] @session_wrapper def get_priorities(session: "Session | None" = None) -> dict[str, int]: """ Returns dictionary of priority names -> id mappings :param session: Optional SQL session object (a temporary one will be generated if not supplied) :return: dictionary of priority names -> id mappings """ assert session is not None ret = {} q = session.query(Priority) for x in q.all(): ret[x.priority] = x.priority_id return ret
__all__.append("get_priorities") ################################################################################
[docs] class Section(BaseTimestamp): __tablename__ = "section" section_id: Mapped[int] = mapped_column("id", primary_key=True) section: Mapped[str] = mapped_column(unique=True) overrides: DynamicMapped["Override"] = relationship(back_populates="section") def __init__(self, section=None): self.section = section @override def __str__(self): return self.section @override def __repr__(self): return "<{} {}>".format( self.__class__.__name__, self.section, ) @override def __eq__(self, val: Any): if isinstance(val, str): warnings.warn( "comparison with a `str` is deprecated", DeprecationWarning, stacklevel=2, ) return self.section == val # This signals to use the normal comparison operator return NotImplemented @override def __ne__(self, val: Any): if isinstance(val, str): warnings.warn( "comparison with a `str` is deprecated", DeprecationWarning, stacklevel=2, ) return self.section != val # This signals to use the normal comparison operator return NotImplemented __hash__ = BaseTimestamp.__hash__
__all__.append("Section")
[docs] @session_wrapper def get_section(section: str, session: "Session | None" = None) -> Optional[Section]: """ Returns Section object for given `section` name. :param section: The name of the section :param session: Optional SQLA session object (a temporary one will be generated if not supplied) :return: Section object for the given section name """ assert session is not None q = session.query(Section).filter_by(section=section) return q.one_or_none()
__all__.append("get_section")
[docs] @session_wrapper def get_sections(session: "Session | None" = None) -> dict[str, int]: """ Returns dictionary of section names -> id mappings :param session: Optional SQL session object (a temporary one will be generated if not supplied) :return: dictionary of section names -> id mappings """ assert session is not None ret = {} q = session.query(Section) for x in q.all(): ret[x.section] = x.section_id return ret
__all__.append("get_sections") ################################################################################
[docs] class SignatureHistory(ORMObject): __tablename__ = "signature_history" fingerprint: Mapped[str] = mapped_column(primary_key=True) signature_timestamp: Mapped[datetime] = mapped_column( DateTime(timezone=False), primary_key=True ) contents_sha1: Mapped[str] = mapped_column(primary_key=True) seen: Mapped[datetime] = mapped_column( DateTime(timezone=False), server_default=func.now() )
[docs] @classmethod def from_signed_file( cls, signed_file: "daklib.gpg.SignedFile" ) -> "SignatureHistory": """signature history entry from signed file :param signed_file: signed file """ self = cls() self.fingerprint = signed_file.primary_fingerprint self.signature_timestamp = signed_file.signature_timestamp self.contents_sha1 = signed_file.contents_sha1 return self
[docs] def query(self, session): return ( session.query(SignatureHistory) .filter_by( fingerprint=self.fingerprint, signature_timestamp=self.signature_timestamp, contents_sha1=self.contents_sha1, ) .first() )
__all__.append("SignatureHistory") ################################################################################
[docs] class SrcContents(ORMObject): __tablename__ = "src_contents" source_id: Mapped[int] = mapped_column( ForeignKey("source.id", ondelete="CASCADE"), primary_key=True ) file: Mapped[str] = mapped_column(primary_key=True) source: Mapped["DBSource"] = relationship(back_populates="contents") def __init__(self, file=None, source=None): self.file = file self.source = source
[docs] @override def properties(self) -> list[str]: return ["file", "source"]
__all__.append("SrcContents") ################################################################################
[docs] class DBSource(ORMObject): __tablename__ = "source" __table_args = (UniqueConstraint("source", "version", name="source_source_key"),) source_id: Mapped[int] = mapped_column("id", primary_key=True) source: Mapped[str] version: Mapped[str] = mapped_column(DebVersion()) maintainer_id: Mapped[int] = mapped_column( "maintainer", ForeignKey("maintainer.id") ) poolfile_id: Mapped[int] = mapped_column( "file", ForeignKey("files.id"), unique=True ) fingerprint_id: Mapped[int | None] = mapped_column( "sig_fpr", ForeignKey("fingerprint.id") ) install_date: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) changedby_id: Mapped[int] = mapped_column("changedby", ForeignKey("maintainer.id")) dm_upload_allowed: Mapped[bool] = mapped_column(default=False) created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) stanza: Mapped[str] authorized_by_fingerprint_id: Mapped[int | None] = mapped_column( ForeignKey("fingerprint.id") ) poolfile: Mapped["PoolFile"] = relationship() fingerprint: Mapped["Fingerprint | None"] = relationship( foreign_keys=[fingerprint_id] ) authorized_by_fingerprint: Mapped["Fingerprint | None"] = relationship( foreign_keys=[authorized_by_fingerprint_id] ) srcfiles: Mapped[list["DSCFile"]] = relationship(back_populates="source") suites: Mapped[list["Suite"]] = relationship( secondary="src_associations", back_populates="sources" ) uploaders: Mapped[list["Maintainer"]] = relationship(secondary="src_uploaders") key: Mapped[dict[str, "SourceMetadata"]] = relationship( collection_class=attribute_keyed_dict("key"), cascade="all", back_populates="source", ) contents: DynamicMapped["SrcContents"] = relationship( cascade="all", back_populates="source", ) binaries: Mapped[list["DBBinary"]] = relationship(back_populates="source") extra_binary_references: DynamicMapped["DBBinary"] = relationship( secondary="extra_src_references", back_populates="extra_sources" ) maintainer: Mapped["Maintainer"] = relationship( back_populates="maintains_sources", foreign_keys=[maintainer_id] ) changedby: Mapped["Maintainer"] = relationship( back_populates="changed_sources", foreign_keys=[changedby_id] ) metadata_proxy: AssociationProxy[dict["MetadataKey", str]] = association_proxy( "key", "value" ) def __init__( self, source=None, version=None, maintainer=None, changedby=None, poolfile=None, install_date=None, fingerprint=None, ): self.source = source self.version = version self.maintainer = maintainer self.changedby = changedby self.poolfile = poolfile self.install_date = install_date self.fingerprint = fingerprint @property def pkid(self) -> int: return self.source_id @property def name(self) -> str: return self.source @property def arch_string(self) -> str: return "source"
[docs] @override def properties(self) -> list[str]: return [ "source", "source_id", "maintainer", "changedby", "fingerprint", "poolfile", "version", "suites_count", "install_date", "binaries_count", "uploaders_count", ]
[docs] def read_control_fields(self) -> Deb822: """ Reads the control information from a dsc :return: fields is the dsc information in a dictionary form """ with open(self.poolfile.fullpath, "r") as fd: fields = Deb822(fd) return fields
[docs] def scan_contents(self) -> set[str]: """ Returns a set of names for non directories. The path names are normalized after converting them from either utf-8 or iso8859-1 encoding. """ fullpath = self.poolfile.fullpath from daklib.contents import UnpackedSource unpacked = UnpackedSource(fullpath) fileset = set() for name in unpacked.get_all_filenames(): fileset.add(name) return fileset
@property def proxy(self) -> "MetadataProxy": session = object_session(self) assert session is not None query = session.query(SourceMetadata).filter_by(source=self) return MetadataProxy(session, query)
__all__.append("DBSource")
[docs] @session_wrapper def get_suites_source_in(source: str, session=None) -> "list[Suite]": """ Returns list of Suite objects which given `source` name is in :param source: DBSource package name to search for :return: list of Suite objects for the given source """ return session.query(Suite).filter(Suite.sources.any(source=source)).all()
__all__.append("get_suites_source_in") # FIXME: This function fails badly if it finds more than 1 source package and # its implementation is trivial enough to be inlined.
[docs] @session_wrapper def get_source_in_suite( source: str, suite_name: Optional[str], session: "Session | None" = None ) -> Optional[DBSource]: """ Returns a DBSource object for a combination of `source` and `suite_name`. :param source: source package name :param suite_name: the suite name :return: the version for `source` in `suite` """ assert session is not None if suite_name is None: return None suite = get_suite(suite_name, session) if suite is None: return None return suite.get_sources(source).one_or_none()
__all__.append("get_source_in_suite")
[docs] @session_wrapper def import_metadata_into_db( obj: Union[DBBinary, DBSource], session: "Session | None" = None ) -> None: """ This routine works on either DBBinary or DBSource objects and imports their metadata into the database """ assert session is not None fields = obj.read_control_fields() for k in fields.keys(): try: # Try raw ASCII val = str(fields[k]) except UnicodeEncodeError: # Fall back to UTF-8 try: val = fields[k].encode("utf-8") except UnicodeEncodeError: # Finally try iso8859-1 val = fields[k].encode("iso8859-1") # Otherwise we allow the exception to percolate up and we cause # a reject as someone is playing silly buggers obj.metadata_proxy[get_or_set_metadatakey(k, session)] = val session.commit_or_flush() # type: ignore[attr-defined]
__all__.append("import_metadata_into_db") class SrcAssociations(Base): __tablename__ = "src_associations" __table_args = ( UniqueConstraint("suite", "source", name="src_associations_suite_key"), ) id: Mapped[int] = mapped_column(primary_key=True) suite: Mapped[int] = mapped_column(ForeignKey("suite.id")) source: Mapped[int] = mapped_column(ForeignKey("source.id")) created: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) class SrcUploaders(Base): __tablename__ = "src_uploaders" __table_args = ( UniqueConstraint("source", "maintainer", name="src_uploaders_source_key"), ) id: Mapped[int] = mapped_column(primary_key=True) source: Mapped[int] = mapped_column(ForeignKey("source.id", ondelete="CASCADE")) maintainer: Mapped[int] = mapped_column( ForeignKey("maintainer.id", ondelete="CASCADE") ) created: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) ################################################################################
[docs] class SrcFormat(Base): __tablename__ = "src_format" src_format_id: Mapped[int] = mapped_column("id", primary_key=True) format_name: Mapped[str] = mapped_column(unique=True) created: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) suites: DynamicMapped["Suite"] = relationship( secondary="suite_src_formats", back_populates="srcformats" ) def __init__(self, *args, **kwargs): pass @override def __repr__(self): return "<SrcFormat %s>" % (self.format_name)
__all__.append("SrcFormat") ################################################################################ SUITE_FIELDS = [ ("SuiteName", "suite_name"), ("SuiteID", "suite_id"), ("Version", "version"), ("Origin", "origin"), ("Label", "label"), ("Description", "description"), ("Untouchable", "untouchable"), ("Announce", "announce"), ("Codename", "codename"), ("OverrideCodename", "overridecodename"), ("ValidTime", "validtime"), ("Priority", "priority"), ("NotAutomatic", "notautomatic"), ("CopyChanges", "copychanges"), ("OverrideSuite", "overridesuite"), ] # Why the heck don't we have any UNIQUE constraints in table suite? # TODO: Add UNIQUE constraints for appropriate columns.
[docs] class Suite(ORMObject): __tablename__ = "suite" suite_id: Mapped[int] = mapped_column("id", primary_key=True) suite_name: Mapped[str] = mapped_column(unique=True) version: Mapped[str | None] origin: Mapped[str | None] label: Mapped[str | None] description: Mapped[str | None] untouchable: Mapped[bool] = mapped_column(default=False) codename: Mapped[str | None] overridecodename: Mapped[str | None] validtime: Mapped[int] = mapped_column(default=604800) priority: Mapped[int] = mapped_column(default=0) notautomatic: Mapped[bool] = mapped_column(default=False) copychanges: Mapped[str | None] overridesuite: Mapped[str | None] policy_queue_id: Mapped[int | None] = mapped_column(ForeignKey("policy_queue.id")) created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) changelog: Mapped[str | None] butautomaticupgrades: Mapped[bool] = mapped_column(default=False) signingkeys: Mapped[list[str] | None] = mapped_column(ARRAY(String())) announce: Mapped[list[str] | None] = mapped_column(ARRAY(String())) include_long_description: Mapped[bool] = mapped_column(default=False) overrideprocess: Mapped[bool] = mapped_column(default=False) overrideorigin: Mapped[str | None] allowcsset: Mapped[bool] = mapped_column(default=False) archive_id: Mapped[int] = mapped_column(ForeignKey("archive.id")) new_queue_id: Mapped[int | None] = mapped_column(ForeignKey("policy_queue.id")) close_bugs: Mapped[bool | None] mail_whitelist: Mapped[str | None] indices_compression: Mapped[list[str] | None] = mapped_column( ARRAY(String()), default=["xz"] ) i18n_compression: Mapped[list[str] | None] = mapped_column( ARRAY(String()), default=["xz"] ) release_suite: Mapped[str | None] debugsuite_id: Mapped[int | None] = mapped_column(ForeignKey("suite.id")) changelog_url: Mapped[str | None] accept_source_uploads: Mapped[bool | None] = mapped_column(default=True) accept_binary_uploads: Mapped[bool | None] = mapped_column(default=True) checksums: Mapped[list[str] | None] = mapped_column( ARRAY(String()), default=["sha256"] ) last_changed: Mapped[datetime] = mapped_column( DateTime(timezone=False), server_default=func.now() ) byhash: Mapped[bool | None] = mapped_column(default=True) separate_contents_architecture_all: Mapped[bool] = mapped_column(default=False) separate_packages_architecture_all: Mapped[bool] = mapped_column(default=False) merged_pdiffs: Mapped[bool] = mapped_column(default=True) stayofexecution: Mapped[timedelta] = mapped_column(default=timedelta(hours=0)) policy_queue: Mapped["PolicyQueue | None"] = relationship( foreign_keys=[policy_queue_id] ) new_queue: Mapped["PolicyQueue | None"] = relationship(foreign_keys=[new_queue_id]) debug_suite: Mapped["Suite | None"] = relationship(remote_side=[suite_id]) copy_queues: Mapped[list["BuildQueue"]] = relationship( secondary="suite_build_queue_copy" ) srcformats: Mapped[list["SrcFormat"]] = relationship( secondary="suite_src_formats", back_populates="suites" ) archive: Mapped[Archive] = relationship(back_populates="suites") acls: Mapped[set[ACL]] = relationship(secondary="suite_acl_map") components: Mapped[list["Component"]] = relationship( secondary="component_suite", back_populates="suites", order_by=lambda: Component.ordering, ) architectures: Mapped[list[Architecture]] = relationship( secondary="suite_architectures", back_populates="suites" ) binaries: DynamicMapped["DBBinary"] = relationship( secondary="bin_associations", back_populates="suites", ) sources: DynamicMapped["DBSource"] = relationship( secondary="src_associations", back_populates="suites" ) overrides: DynamicMapped["Override"] = relationship(back_populates="suite") def __init__(self, suite_name=None, version=None): self.suite_name = suite_name self.version = version
[docs] @override def properties(self) -> list[str]: return [ "suite_name", "version", "sources_count", "binaries_count", "overrides_count", ]
@override def __eq__(self, val: Any): if isinstance(val, str): warnings.warn( "comparison with a `str` is deprecated", DeprecationWarning, stacklevel=2, ) return self.suite_name == val # This signals to use the normal comparison operator return NotImplemented @override def __ne__(self, val: Any): if isinstance(val, str): warnings.warn( "comparison with a `str` is deprecated", DeprecationWarning, stacklevel=2, ) return self.suite_name != val # This signals to use the normal comparison operator return NotImplemented __hash__ = ORMObject.__hash__
[docs] def details(self) -> str: ret = [] for disp, field in SUITE_FIELDS: val = getattr(self, field, None) if val is not None: ret.append("%s: %s" % (disp, val)) return "\n".join(ret)
[docs] def get_architectures( self, skipsrc: bool = False, skipall: bool = False ) -> list[Architecture]: """ Returns list of Architecture objects :param skipsrc: Whether to skip returning the 'source' architecture entry :param skipall: Whether to skip returning the 'all' architecture entry :return: list of Architecture objects for the given name (may be empty) """ session = object_session(self) assert session is not None q = session.query(Architecture).with_parent(self) if skipsrc: q = q.filter(Architecture.arch_string != "source") if skipall: q = q.filter(Architecture.arch_string != "all") return q.order_by(Architecture.arch_string).all()
[docs] def get_sources(self, source: str) -> sqlalchemy.orm.query.Query: """ Returns a query object representing DBSource that is part of this suite. :param source: source package name :return: a query of DBSource """ session = object_session(self) assert session is not None return session.query(DBSource).filter_by(source=source).with_parent(self)
[docs] def get_overridesuite(self) -> "Suite": if self.overridesuite is None: return self session = object_session(self) assert session is not None return session.query(Suite).filter_by(suite_name=self.overridesuite).one()
[docs] def update_last_changed(self) -> None: self.last_changed = sqlalchemy.func.now()
@property def path(self) -> str: return os.path.join(self.archive.path, "dists", self.suite_name) @property def release_suite_output(self) -> str: if self.release_suite is not None: return self.release_suite return self.suite_name
__all__.append("Suite")
[docs] @session_wrapper def get_suite(suite: str, session: "Session | None" = None) -> Optional[Suite]: """ Returns Suite object for given `suite` name. :param suite: The name of the suite :param session: Optional SQLA session object (a temporary one will be generated if not supplied) :return: Suite object for the requested suite name (None if not present) """ assert session is not None # Start by looking for the dak internal name q = session.query(Suite).filter_by(suite_name=suite) try: return q.one() except NoResultFound: pass # Now try codename q = session.query(Suite).filter_by(codename=suite) try: return q.one() except NoResultFound: pass # Finally give release_suite a try q = session.query(Suite).filter_by(release_suite=suite) return q.one_or_none()
__all__.append("get_suite") class SuiteBuildQueueCopy(Base): __tablename__ = "suite_build_queue_copy" suite: Mapped[int] = mapped_column(ForeignKey("suite.id"), primary_key=True) build_queue_id: Mapped[int] = mapped_column( ForeignKey("build_queue.id"), primary_key=True ) created: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) class SuiteSrcFormats(Base): __tablename__ = "suite_src_formats" suite: Mapped[int] = mapped_column(ForeignKey("suite.id"), primary_key=True) src_format: Mapped[int] = mapped_column( ForeignKey("src_format.id"), primary_key=True ) created: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) class SuiteAclMap(Base): __tablename__ = "suite_acl_map" suite_id: Mapped[int] = mapped_column( ForeignKey("suite.id", ondelete="CASCADE"), primary_key=True ) acl_id: Mapped[int] = mapped_column(ForeignKey("acl.id"), primary_key=True) class SuiteArchitectures(Base): __tablename__ = "suite_architectures" suite: Mapped[int] = mapped_column(ForeignKey("suite.id"), primary_key=True) architecture: Mapped[int] = mapped_column( ForeignKey("architecture.id"), primary_key=True ) created: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime] = mapped_column( DateTime(timezone=True), server_default=func.now() ) ################################################################################
[docs] @session_wrapper def get_suite_architectures( suite_name: str, skipsrc: bool = False, skipall: bool = False, session: "Session | None" = None, ) -> list[Architecture]: """ Returns list of Architecture objects for given `suite` name. The list is empty if `suite` does not exist. :param suite_name: Suite name to search for :param skipsrc: Whether to skip returning the 'source' architecture entry :param skipall: Whether to skip returning the 'all' architecture entry :param session: Optional SQL session object (a temporary one will be generated if not supplied) :return: list of Architecture objects for the given name (may be empty) """ assert session is not None suite = get_suite(suite_name, session) if suite is None: return [] return suite.get_architectures(skipsrc, skipall)
__all__.append("get_suite_architectures") ################################################################################
[docs] class Uid(ORMObject): __tablename__ = "uid" uid_id: Mapped[int] = mapped_column("id", primary_key=True) uid: Mapped[str] = mapped_column(unique=True) name: Mapped[str | None] created: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) modified: Mapped[datetime | None] = mapped_column( DateTime(timezone=True), server_default=func.now() ) fingerprint: Mapped[list["Fingerprint"]] = relationship(back_populates="uid") def __init__(self, uid=None, name=None): self.uid = uid self.name = name @override def __eq__(self, val: Any): if isinstance(val, str): warnings.warn( "comparison with a `str` is deprecated", DeprecationWarning, stacklevel=2, ) return self.uid == val # This signals to use the normal comparison operator return NotImplemented @override def __ne__(self, val: Any): if isinstance(val, str): warnings.warn( "comparison with a `str` is deprecated", DeprecationWarning, stacklevel=2, ) return self.uid != val # This signals to use the normal comparison operator return NotImplemented __hash__ = ORMObject.__hash__
[docs] @override def properties(self) -> list[str]: return ["uid", "name", "fingerprint"]
__all__.append("Uid")
[docs] @session_wrapper def get_or_set_uid(uidname: str, session: "Session | None" = None) -> Uid: """ Returns uid object for given uidname. If no matching uidname is found, a row is inserted. :param uidname: The uid to add :param session: Optional SQL session object (a temporary one will be generated if not supplied). If not passed, a commit will be performed at the end of the function, otherwise the caller is responsible for commiting. :return: the uid object for the given uidname """ assert session is not None q = session.query(Uid).filter_by(uid=uidname) try: ret = q.one() except NoResultFound: uid = Uid() uid.uid = uidname session.add(uid) session.commit_or_flush() # type: ignore[attr-defined] ret = uid return ret
__all__.append("get_or_set_uid")
[docs] @session_wrapper def get_uid_from_fingerprint( fpr: str, session: "Session | None" = None ) -> Optional[Uid]: assert session is not None q = session.query(Uid) q = q.join(Fingerprint).filter_by(fingerprint=fpr) return q.one_or_none()
__all__.append("get_uid_from_fingerprint") ################################################################################
[docs] class MetadataKey(ORMObject): __tablename__ = "metadata_keys" key_id: Mapped[int] = mapped_column(primary_key=True) key: Mapped[str] = mapped_column(unique=True) ordering: Mapped[int] = mapped_column(default=0) def __init__(self, key=None): self.key = key
[docs] @override def properties(self) -> list[str]: return ["key"]
__all__.append("MetadataKey")
[docs] @session_wrapper def get_or_set_metadatakey( keyname: str, session: "Session | None" = None ) -> MetadataKey: """ Returns MetadataKey object for given uidname. If no matching keyname is found, a row is inserted. :param keyname: The keyname to add :param session: Optional SQL session object (a temporary one will be generated if not supplied). If not passed, a commit will be performed at the end of the function, otherwise the caller is responsible for commiting. :return: the metadatakey object for the given keyname """ assert session is not None q = session.query(MetadataKey).filter_by(key=keyname) try: ret = q.one() except NoResultFound: ret = MetadataKey(keyname) session.add(ret) session.commit_or_flush() # type: ignore[attr-defined] return ret
__all__.append("get_or_set_metadatakey") ################################################################################
[docs] class BinaryMetadata(ORMObject): __tablename__ = "binaries_metadata" binary_id: Mapped[int] = mapped_column( "bin_id", ForeignKey("binaries.id", ondelete="CASCADE"), primary_key=True ) key_id: Mapped[int] = mapped_column( ForeignKey("metadata_keys.key_id"), primary_key=True ) value: Mapped[str] binary: Mapped["DBBinary"] = relationship(back_populates="key") key: Mapped[MetadataKey] = relationship() def __init__(self, key=None, value=None, binary=None): self.key = key self.value = value if binary is not None: self.binary = binary
[docs] @override def properties(self) -> list[str]: return ["binary", "key", "value"]
__all__.append("BinaryMetadata") ################################################################################
[docs] class SourceMetadata(ORMObject): __tablename__ = "source_metadata" source_id: Mapped[int] = mapped_column( "src_id", ForeignKey("source.id", ondelete="CASCADE"), primary_key=True ) key_id: Mapped[int] = mapped_column( ForeignKey("metadata_keys.key_id"), primary_key=True ) value: Mapped[str] source: Mapped["DBSource"] = relationship(back_populates="key") key: Mapped[MetadataKey] = relationship() def __init__(self, key=None, value=None, source=None): self.key = key self.value = value if source is not None: self.source = source
[docs] @override def properties(self) -> list[str]: return ["source", "key", "value"]
__all__.append("SourceMetadata") ################################################################################ class MetadataProxy: def __init__(self, session, query): self.session = session self.query = query def _get(self, key): metadata_key = self.session.query(MetadataKey).filter_by(key=key).first() if metadata_key is None: return None metadata = self.query.filter_by(key=metadata_key).first() return metadata def __contains__(self, key: str) -> bool: if self._get(key) is not None: return True return False def __getitem__(self, key: str) -> str: metadata = self._get(key) if metadata is None: raise KeyError return metadata.value @overload def get(self, key: str, default: str) -> str: ... # noqa: E704 @overload def get(self, key: str, default: None = None) -> str | None: ... # noqa: E704 def get(self, key, default=None): try: return self[key] except KeyError: return default ################################################################################
[docs] class VersionCheck(ORMObject): __tablename__ = "version_check" suite_id: Mapped[int] = mapped_column( "suite", ForeignKey("suite.id"), primary_key=True ) check: Mapped[str] = mapped_column(primary_key=True) reference_id: Mapped[int] = mapped_column( "reference", ForeignKey("suite.id"), primary_key=True ) suite: Mapped["Suite"] = relationship(foreign_keys=[suite_id]) reference: Mapped["Suite"] = relationship( foreign_keys=[reference_id], lazy="joined" ) def __init__(self, *args, **kwargs): pass
[docs] @override def properties(self) -> list[str]: return ["check"]
__all__.append("VersionCheck")
[docs] @session_wrapper def get_version_checks( suite_name: str, check: Optional[str] = None, session: "Session | None" = None ) -> list[VersionCheck]: assert session is not None suite = get_suite(suite_name, session) if not suite: # Make sure that what we return is iterable so that list comprehensions # involving this don't cause a traceback return [] q = session.query(VersionCheck).filter_by(suite=suite) if check: q = q.filter_by(check=check) return q.all()
__all__.append("get_version_checks") ################################################################################ class ExternalSignatureRequests(Base): __tablename__ = "external_signature_requests" association_id: Mapped[int] = mapped_column( ForeignKey("bin_associations.id", ondelete="CASCADE"), primary_key=True ) suite_id: Mapped[int] = mapped_column( ForeignKey("suite.id", ondelete="CASCADE"), primary_key=True ) ################################################################################ package_list = Table( "package_list", Base.metadata, Column("package", String()), Column("version", DebVersion()), Column("source", String()), Column("source_version", DebVersion()), Column("suite", String()), Column("codename", String()), Column("archive", String()), Column("component", String()), Column("display_suite", String()), Column("architecture_is_source", Boolean()), Column("architecture", String()), Column("type", String()), ) ################################################################################
[docs] class DBConn: """ database module init. """ __shared_state: dict[str, Any] = {} def __init__(self, *, debug=False) -> None: self.__dict__ = self.__shared_state if not getattr(self, "initialised", False): self.initialised = True self.debug = debug self.__createconn() ## Connection functions def __createconn(self) -> None: from .config import Config cnf = Config() if "DB::Service" in cnf: connstr = "postgresql://service=%s" % cnf["DB::Service"] elif "DB::Host" in cnf: # TCP/IP connstr = "postgresql://%s" % cnf["DB::Host"] if "DB::Port" in cnf and cnf["DB::Port"] != "-1": connstr += ":%s" % cnf["DB::Port"] connstr += "/%s" % cnf["DB::Name"] else: # Unix Socket connstr = "postgresql:///%s" % cnf["DB::Name"] if "DB::Port" in cnf and cnf["DB::Port"] != "-1": connstr += "?port=%s" % cnf["DB::Port"] engine_args: dict[str, object] = {"echo": self.debug} if "DB::PoolSize" in cnf: engine_args["pool_size"] = int(cnf["DB::PoolSize"]) if "DB::MaxOverflow" in cnf: engine_args["max_overflow"] = int(cnf["DB::MaxOverflow"]) # we don't support non-utf-8 connections engine_args["client_encoding"] = "utf-8" # Monkey patch a new dialect in in order to support service= syntax import sqlalchemy.dialects.postgresql from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2 class PGDialect_psycopg2_dak(PGDialect_psycopg2): @override def create_connect_args(self, url): if str(url).startswith("postgresql://service="): # Eww servicename = str(url)[21:] return (["service=%s" % servicename], {}) else: return PGDialect_psycopg2.create_connect_args(self, url) sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak # type: ignore[attr-defined] try: self.db_pg = create_engine(connstr, **engine_args) self.db_smaker = sessionmaker( bind=self.db_pg, autoflush=True, autocommit=False ) except OperationalError as e: from . import utils utils.fubar("Cannot connect to database (%s)" % str(e)) self.pid = os.getpid()
[docs] def session(self, work_mem=0) -> "Session": """ Returns a new session object. If a work_mem parameter is provided a new transaction is started and the work_mem parameter is set for this transaction. The work_mem parameter is measured in MB. A default value will be used if the parameter is not set. """ # reinitialize DBConn in new processes if self.pid != os.getpid(): self.__createconn() session = self.db_smaker() if work_mem > 0: session.execute(sql.text("SET LOCAL work_mem TO '%d MB'" % work_mem)) return session
__all__.append("DBConn")