1# Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
2#
3# This program is free software; you can redistribute it and/or modify
4# it under the terms of the GNU General Public License as published by
5# the Free Software Foundation; either version 2 of the License, or
6# (at your option) any later version.
7#
8# This program is distributed in the hope that it will be useful,
9# but WITHOUT ANY WARRANTY; without even the implied warranty of
10# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11# GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program; if not, write to the Free Software Foundation, Inc.,
15# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17"""module to manipulate the archive
19This module provides classes to manipulate the archive.
20"""
22import os
23import shutil
24import subprocess
25import traceback
26from collections.abc import Callable, Iterable, Sequence
27from typing import TYPE_CHECKING, Optional, Union
29import sqlalchemy.exc
30from sqlalchemy.orm import object_session
31from sqlalchemy.orm.exc import NoResultFound
33import daklib.checks as checks
34import daklib.upload
35import daklib.utils
36from daklib.config import Config
37from daklib.dbconn import (
38 Archive,
39 ArchiveFile,
40 Component,
41 DBBinary,
42 DBChange,
43 DBConn,
44 DBSource,
45 DSCFile,
46 Fingerprint,
47 Maintainer,
48 Override,
49 OverrideType,
50 PolicyQueue,
51 PolicyQueueByhandFile,
52 PolicyQueueUpload,
53 PoolFile,
54 Suite,
55 VersionCheck,
56 get_architecture,
57 get_mapped_component,
58 get_or_set_maintainer,
59 import_metadata_into_db,
60)
61from daklib.externalsignature import check_upload_for_external_signature_request
62from daklib.fstransactions import FilesystemTransaction
63from daklib.regexes import re_bin_only_nmu, re_changelog_versions
64from daklib.tag2upload import get_tag2upload_info_for_upload, parse_git_tag_info
66if TYPE_CHECKING: 66 ↛ 67line 66 didn't jump to line 67, because the condition on line 66 was never true
67 import daklib.packagelist
70class ArchiveException(Exception):
71 pass
74class HashMismatchException(ArchiveException):
75 pass
78class ArchiveTransaction:
79 """manipulate the archive in a transaction"""
81 def __init__(self):
82 self.fs = FilesystemTransaction()
83 self.session = DBConn().session()
85 def get_file(
86 self,
87 hashed_file: daklib.upload.HashedFile,
88 source_name: str,
89 check_hashes: bool = True,
90 ) -> PoolFile:
91 """Look for file `hashed_file` in database
93 :param hashed_file: file to look for in the database
94 :param source_name: source package name
95 :param check_hashes: check size and hashes match
96 :return: database entry for the file
97 :raises KeyError: file was not found in the database
98 :raises HashMismatchException: hash mismatch
99 """
100 poolname = os.path.join(daklib.utils.poolify(source_name), hashed_file.filename)
101 try:
102 poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
103 if check_hashes and ( 103 ↛ 109line 103 didn't jump to line 109, because the condition on line 103 was never true
104 poolfile.filesize != hashed_file.size
105 or poolfile.md5sum != hashed_file.md5sum
106 or poolfile.sha1sum != hashed_file.sha1sum
107 or poolfile.sha256sum != hashed_file.sha256sum
108 ):
109 raise HashMismatchException(
110 "{0}: Does not match file already existing in the pool.".format(
111 hashed_file.filename
112 )
113 )
114 return poolfile
115 except NoResultFound:
116 raise KeyError("{0} not found in database.".format(poolname))
118 def _install_file(
119 self, directory, hashed_file, archive, component, source_name
120 ) -> PoolFile:
121 """Install a file
123 Will not give an error when the file is already present.
125 :return: database object for the new file
126 """
127 session = self.session
129 poolname = os.path.join(daklib.utils.poolify(source_name), hashed_file.filename)
130 try:
131 poolfile = self.get_file(hashed_file, source_name)
132 except KeyError:
133 poolfile = PoolFile(filename=poolname, filesize=hashed_file.size)
134 poolfile.md5sum = hashed_file.md5sum
135 poolfile.sha1sum = hashed_file.sha1sum
136 poolfile.sha256sum = hashed_file.sha256sum
137 session.add(poolfile)
138 session.flush()
140 try:
141 session.query(ArchiveFile).filter_by(
142 archive=archive, component=component, file=poolfile
143 ).one()
144 except NoResultFound:
145 archive_file = ArchiveFile(archive, component, poolfile)
146 session.add(archive_file)
147 session.flush()
149 path = os.path.join(
150 archive.path, "pool", component.component_name, poolname
151 )
152 hashed_file_path = os.path.join(directory, hashed_file.input_filename)
153 self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
155 return poolfile
157 def install_binary(
158 self,
159 directory: str,
160 binary: daklib.upload.Binary,
161 suite: Suite,
162 component: Component,
163 *,
164 allow_tainted: bool = False,
165 fingerprint: Optional[Fingerprint] = None,
166 authorized_by_fingerprint: Optional[Fingerprint] = None,
167 source_suites=None,
168 extra_source_archives: Optional[Iterable[Archive]] = None,
169 ) -> DBBinary:
170 """Install a binary package
172 :param directory: directory the binary package is located in
173 :param binary: binary package to install
174 :param suite: target suite
175 :param component: target component
176 :param allow_tainted: allow to copy additional files from tainted archives
177 :param fingerprint: optional fingerprint
178 :param source_suites: suites to copy the source from if they are not
179 in `suite` or :const:`True` to allow copying from any
180 suite.
181 Can be a SQLAlchemy subquery for :class:`Suite` or :const:`True`.
182 :param extra_source_archives: extra archives to copy Built-Using sources from
183 :return: database object for the new package
184 """
185 session = self.session
186 control = binary.control
187 maintainer = get_or_set_maintainer(control["Maintainer"], session)
188 architecture = get_architecture(control["Architecture"], session)
190 (source_name, source_version) = binary.source
191 source_query = session.query(DBSource).filter_by(
192 source=source_name, version=source_version
193 )
194 source = source_query.filter(DBSource.suites.contains(suite)).first()
195 if source is None:
196 if source_suites is not True:
197 source_query = source_query.join(DBSource.suites).filter(
198 Suite.suite_id == source_suites.c.id
199 )
200 source = source_query.first()
201 if source is None: 201 ↛ 202line 201 didn't jump to line 202, because the condition on line 201 was never true
202 raise ArchiveException(
203 "{0}: trying to install to {1}, but could not find source ({2} {3})".format(
204 binary.hashed_file.filename,
205 suite.suite_name,
206 source_name,
207 source_version,
208 )
209 )
210 self.copy_source(source, suite, source.poolfile.component)
212 db_file = self._install_file(
213 directory, binary.hashed_file, suite.archive, component, source_name
214 )
216 unique = dict(
217 package=control["Package"],
218 version=control["Version"],
219 architecture=architecture,
220 )
221 rest = dict(
222 source=source,
223 maintainer=maintainer,
224 poolfile=db_file,
225 binarytype=binary.type,
226 )
227 # Other attributes that are ignored for purposes of equality with
228 # an existing source
229 rest2 = dict(
230 fingerprint=fingerprint,
231 authorized_by_fingerprint=authorized_by_fingerprint,
232 )
234 try:
235 db_binary = session.query(DBBinary).filter_by(**unique).one()
236 for key, value in rest.items():
237 if getattr(db_binary, key) != value: 237 ↛ 238line 237 didn't jump to line 238, because the condition on line 237 was never true
238 raise ArchiveException(
239 "{0}: Does not match binary in database.".format(
240 binary.hashed_file.filename
241 )
242 )
243 except NoResultFound:
244 db_binary = DBBinary(**unique)
245 for key, value in rest.items():
246 setattr(db_binary, key, value)
247 for key, value in rest2.items():
248 setattr(db_binary, key, value)
249 session.add(db_binary)
250 session.flush()
251 import_metadata_into_db(db_binary, session)
253 self._add_built_using(
254 db_binary,
255 binary.hashed_file.filename,
256 control,
257 suite,
258 extra_archives=extra_source_archives,
259 )
261 if suite not in db_binary.suites:
262 db_binary.suites.append(suite)
264 session.flush()
266 return db_binary
268 def _ensure_extra_source_exists(
269 self,
270 filename: str,
271 source: DBSource,
272 archive: Archive,
273 extra_archives: Optional[Iterable[Archive]] = None,
274 ):
275 """ensure source exists in the given archive
277 This is intended to be used to check that Built-Using sources exist.
279 :param filename: filename to use in error messages
280 :param source: source to look for
281 :param archive: archive to look in
282 :param extra_archives: list of archives to copy the source package from
283 if it is not yet present in `archive`
284 """
285 session = self.session
286 db_file = (
287 session.query(ArchiveFile)
288 .filter_by(file=source.poolfile, archive=archive)
289 .first()
290 )
291 if db_file is not None: 291 ↛ 295line 291 didn't jump to line 295, because the condition on line 291 was never false
292 return True
294 # Try to copy file from one extra archive
295 if extra_archives is None:
296 extra_archives = []
297 db_file = (
298 session.query(ArchiveFile)
299 .filter_by(file=source.poolfile)
300 .filter(ArchiveFile.archive_id.in_([a.archive_id for a in extra_archives]))
301 .first()
302 )
303 if db_file is None:
304 raise ArchiveException(
305 "{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.".format(
306 filename, source.source, source.version, archive.archive_name
307 )
308 )
310 source_archive = db_file.archive
311 for dsc_file in source.srcfiles:
312 af = (
313 session.query(ArchiveFile)
314 .filter_by(
315 file=dsc_file.poolfile,
316 archive=source_archive,
317 component=db_file.component,
318 )
319 .one()
320 )
321 # We were given an explicit list of archives so it is okay to copy from tainted archives.
322 self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
324 def _add_built_using(
325 self, db_binary, filename, control, suite, extra_archives=None
326 ) -> None:
327 """Add Built-Using sources to ``db_binary.extra_sources``"""
328 session = self.session
330 for bu_source_name, bu_source_version in daklib.utils.parse_built_using(
331 control
332 ):
333 bu_source = (
334 session.query(DBSource)
335 .filter_by(source=bu_source_name, version=bu_source_version)
336 .first()
337 )
338 if bu_source is None: 338 ↛ 339line 338 didn't jump to line 339, because the condition on line 338 was never true
339 raise ArchiveException(
340 "{0}: Built-Using refers to non-existing source package {1} (= {2})".format(
341 filename, bu_source_name, bu_source_version
342 )
343 )
345 self._ensure_extra_source_exists(
346 filename, bu_source, suite.archive, extra_archives=extra_archives
347 )
349 db_binary.extra_sources.append(bu_source)
351 def _add_dsc_files(
352 self,
353 directory: str,
354 archive: Archive,
355 component: Component,
356 source: DBSource,
357 files: Iterable[daklib.upload.HashedFile],
358 *,
359 allow_tainted: bool,
360 extra_file: bool = False,
361 ) -> None:
362 for hashed_file in files:
363 hashed_file_path = os.path.join(directory, hashed_file.input_filename)
364 if os.path.exists(hashed_file_path): 364 ↛ 370line 364 didn't jump to line 370, because the condition on line 364 was never false
365 db_file = self._install_file(
366 directory, hashed_file, archive, component, source.source
367 )
368 self.session.add(db_file)
369 else:
370 db_file = self.get_file(hashed_file, source.source)
371 self._copy_file(
372 db_file, archive, component, allow_tainted=allow_tainted
373 )
375 db_dsc_file = DSCFile()
376 db_dsc_file.source = source
377 db_dsc_file.poolfile = db_file
378 db_dsc_file.extra_file = extra_file
379 self.session.add(db_dsc_file)
381 def install_source_to_archive(
382 self,
383 directory: str,
384 source: daklib.upload.Source,
385 archive: Archive,
386 component: Component,
387 changed_by: Maintainer,
388 *,
389 allow_tainted=False,
390 fingerprint: Optional[Fingerprint] = None,
391 authorized_by_fingerprint: Optional[Fingerprint] = None,
392 extra_source_files: Iterable[daklib.upload.HashedFile] = [],
393 ) -> DBSource:
394 """Install source package to archive"""
395 session = self.session
396 control = source.dsc
397 maintainer = get_or_set_maintainer(control["Maintainer"], session)
398 source_name = control["Source"]
400 ### Add source package to database
402 # We need to install the .dsc first as the DBSource object refers to it.
403 db_file_dsc = self._install_file(
404 directory, source._dsc_file, archive, component, source_name
405 )
407 unique = dict(
408 source=source_name,
409 version=control["Version"],
410 )
411 rest = dict(
412 maintainer=maintainer,
413 poolfile=db_file_dsc,
414 dm_upload_allowed=(control.get("DM-Upload-Allowed", "no") == "yes"),
415 )
416 # Other attributes that are ignored for purposes of equality with
417 # an existing source
418 rest2 = dict(
419 changedby=changed_by,
420 fingerprint=fingerprint,
421 authorized_by_fingerprint=authorized_by_fingerprint,
422 )
424 created = False
425 try:
426 db_source = session.query(DBSource).filter_by(**unique).one()
427 for key, value in rest.items():
428 if getattr(db_source, key) != value: 428 ↛ 429line 428 didn't jump to line 429, because the condition on line 428 was never true
429 raise ArchiveException(
430 "{0}: Does not match source in database.".format(
431 source._dsc_file.filename
432 )
433 )
434 except NoResultFound:
435 created = True
436 db_source = DBSource(**unique)
437 for key, value in rest.items():
438 setattr(db_source, key, value)
439 for key, value in rest2.items():
440 setattr(db_source, key, value)
441 session.add(db_source)
442 session.flush()
444 # Add .dsc file. Other files will be added later.
445 db_dsc_file = DSCFile()
446 db_dsc_file.source = db_source
447 db_dsc_file.poolfile = db_file_dsc
448 session.add(db_dsc_file)
449 session.flush()
451 if not created:
452 for f in db_source.srcfiles:
453 self._copy_file(
454 f.poolfile, archive, component, allow_tainted=allow_tainted
455 )
456 return db_source
458 ### Now add remaining files and copy them to the archive.
459 self._add_dsc_files(
460 directory,
461 archive,
462 component,
463 db_source,
464 source.files.values(),
465 allow_tainted=allow_tainted,
466 )
467 self._add_dsc_files(
468 directory,
469 archive,
470 component,
471 db_source,
472 extra_source_files,
473 allow_tainted=allow_tainted,
474 extra_file=True,
475 )
477 session.flush()
479 # Importing is safe as we only arrive here when we did not find the source already installed earlier.
480 import_metadata_into_db(db_source, session)
482 # Uploaders are the maintainer and co-maintainers from the Uploaders field
483 db_source.uploaders.append(maintainer)
484 if "Uploaders" in control:
485 from daklib.textutils import split_uploaders
487 for u in split_uploaders(control["Uploaders"]):
488 db_source.uploaders.append(get_or_set_maintainer(u, session))
489 session.flush()
491 return db_source
493 def install_source(
494 self,
495 directory: str,
496 source: daklib.upload.Source,
497 suite: Suite,
498 component: Component,
499 changed_by: Maintainer,
500 *,
501 allow_tainted: bool = False,
502 fingerprint: Optional[Fingerprint] = None,
503 authorized_by_fingerprint: Optional[Fingerprint] = None,
504 extra_source_files: Iterable[daklib.upload.HashedFile] = [],
505 ) -> DBSource:
506 """Install a source package
508 :param directory: directory the source package is located in
509 :param source: source package to install
510 :param suite: target suite
511 :param component: target component
512 :param changed_by: person who prepared this version of the package
513 :param allow_tainted: allow to copy additional files from tainted archives
514 :param fingerprint: optional fingerprint
515 :return: database object for the new source
516 """
517 db_source = self.install_source_to_archive(
518 directory,
519 source,
520 suite.archive,
521 component,
522 changed_by,
523 allow_tainted=allow_tainted,
524 fingerprint=fingerprint,
525 authorized_by_fingerprint=authorized_by_fingerprint,
526 extra_source_files=extra_source_files,
527 )
529 if suite in db_source.suites:
530 return db_source
531 db_source.suites.append(suite)
532 self.session.flush()
534 return db_source
536 def _copy_file(
537 self,
538 db_file: PoolFile,
539 archive: Archive,
540 component: Component,
541 allow_tainted: bool = False,
542 ) -> None:
543 """Copy a file to the given archive and component
545 :param db_file: file to copy
546 :param archive: target archive
547 :param component: target component
548 :param allow_tainted: allow to copy from tainted archives (such as NEW)
549 """
550 session = self.session
552 if (
553 session.query(ArchiveFile)
554 .filter_by(archive=archive, component=component, file=db_file)
555 .first()
556 is None
557 ):
558 query = session.query(ArchiveFile).filter_by(file=db_file)
559 if not allow_tainted:
560 query = query.join(Archive).filter(
561 Archive.tainted == False # noqa:E712
562 )
564 source_af = query.first()
565 if source_af is None: 565 ↛ 566line 565 didn't jump to line 566, because the condition on line 565 was never true
566 raise ArchiveException(
567 "cp: Could not find {0} in any archive.".format(db_file.filename)
568 )
569 target_af = ArchiveFile(archive, component, db_file)
570 session.add(target_af)
571 session.flush()
572 self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode)
574 def copy_binary(
575 self,
576 db_binary: DBBinary,
577 suite: Suite,
578 component: Component,
579 allow_tainted: bool = False,
580 extra_archives: Optional[Iterable[Archive]] = None,
581 ) -> None:
582 """Copy a binary package to the given suite and component
584 :param db_binary: binary to copy
585 :param suite: target suite
586 :param component: target component
587 :param allow_tainted: allow to copy from tainted archives (such as NEW)
588 :param extra_archives: extra archives to copy Built-Using sources from
589 """
590 session = self.session
591 archive = suite.archive
592 if archive.tainted:
593 allow_tainted = True
595 filename = db_binary.poolfile.filename
597 # make sure source is present in target archive
598 db_source = db_binary.source
599 if ( 599 ↛ 605line 599 didn't jump to line 605
600 session.query(ArchiveFile)
601 .filter_by(archive=archive, file=db_source.poolfile)
602 .first()
603 is None
604 ):
605 raise ArchiveException(
606 "{0}: cannot copy to {1}: source is not present in target archive".format(
607 filename, suite.suite_name
608 )
609 )
611 # make sure built-using packages are present in target archive
612 for db_source in db_binary.extra_sources:
613 self._ensure_extra_source_exists(
614 filename, db_source, archive, extra_archives=extra_archives
615 )
617 # copy binary
618 db_file = db_binary.poolfile
619 self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
620 if suite not in db_binary.suites:
621 db_binary.suites.append(suite)
622 self.session.flush()
624 def copy_source(
625 self,
626 db_source: DBSource,
627 suite: Suite,
628 component: Component,
629 allow_tainted: bool = False,
630 ) -> None:
631 """Copy a source package to the given suite and component
633 :param db_source: source to copy
634 :param suite: target suite
635 :param component: target component
636 :param allow_tainted: allow to copy from tainted archives (such as NEW)
637 """
638 archive = suite.archive
639 if archive.tainted:
640 allow_tainted = True
641 for db_dsc_file in db_source.srcfiles:
642 self._copy_file(
643 db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted
644 )
645 if suite not in db_source.suites:
646 db_source.suites.append(suite)
647 self.session.flush()
649 def remove_file(
650 self, db_file: PoolFile, archive: Archive, component: Component
651 ) -> None:
652 """Remove a file from a given archive and component
654 :param db_file: file to remove
655 :param archive: archive to remove the file from
656 :param component: component to remove the file from
657 """
658 af = self.session.query(ArchiveFile).filter_by(
659 file=db_file, archive=archive, component=component
660 )
661 self.fs.unlink(af.path)
662 self.session.delete(af)
664 def remove_binary(self, binary: DBBinary, suite: Suite) -> None:
665 """Remove a binary from a given suite and component
667 :param binary: binary to remove
668 :param suite: suite to remove the package from
669 """
670 binary.suites.remove(suite)
671 self.session.flush()
673 def remove_source(self, source: DBSource, suite: Suite) -> None:
674 """Remove a source from a given suite and component
676 :param source: source to remove
677 :param suite: suite to remove the package from
679 :raises ArchiveException: source package is still referenced by other
680 binaries in the suite
681 """
682 session = self.session
684 query = (
685 session.query(DBBinary)
686 .filter_by(source=source)
687 .filter(DBBinary.suites.contains(suite))
688 )
689 if query.first() is not None: 689 ↛ 690line 689 didn't jump to line 690, because the condition on line 689 was never true
690 raise ArchiveException(
691 "src:{0} is still used by binaries in suite {1}".format(
692 source.source, suite.suite_name
693 )
694 )
696 source.suites.remove(suite)
697 session.flush()
699 def commit(self) -> None:
700 """commit changes"""
701 try:
702 self.session.commit()
703 self.fs.commit()
704 finally:
705 self.session.rollback()
706 self.fs.rollback()
708 def rollback(self) -> None:
709 """rollback changes"""
710 self.session.rollback()
711 self.fs.rollback()
713 def flush(self) -> None:
714 """flush underlying database session"""
715 self.session.flush()
717 def __enter__(self):
718 return self
720 def __exit__(self, type, value, traceback):
721 if type is None:
722 self.commit()
723 else:
724 self.rollback()
725 return None
728def source_component_from_package_list(
729 package_list: "daklib.packagelist.PackageList", suite: Suite
730) -> Optional[Component]:
731 """Get component for a source package
733 This function will look at the Package-List field to determine the
734 component the source package belongs to. This is the first component
735 the source package provides binaries for (first with respect to the
736 ordering of components).
738 It the source package has no Package-List field, None is returned.
740 :param package_list: package list of the source to get the override for
741 :param suite: suite to consider for binaries produced
742 :return: component for the given source or :const:`None`
743 """
744 if package_list.fallback: 744 ↛ 745line 744 didn't jump to line 745, because the condition on line 744 was never true
745 return None
746 session = object_session(suite)
747 packages = package_list.packages_for_suite(suite)
748 components = set(p.component for p in packages)
749 query = (
750 session.query(Component)
751 .order_by(Component.ordering)
752 .filter(Component.component_name.in_(components))
753 )
754 return query.first()
757class ArchiveUpload:
758 """handle an upload
760 This class can be used in a with-statement::
762 with ArchiveUpload(...) as upload:
763 ...
765 Doing so will automatically run any required cleanup and also rollback the
766 transaction if it was not committed.
767 """
769 def __init__(self, directory: str, changes, keyrings):
770 self.transaction: ArchiveTransaction = ArchiveTransaction()
771 """transaction used to handle the upload"""
773 self.session = self.transaction.session
774 """database session"""
776 self.original_directory: str = directory
777 self.original_changes = changes
779 self.changes: Optional[daklib.upload.Changes] = None
780 """upload to process"""
782 self._extra_source_files: list[daklib.upload.HashedFile] = []
783 """extra source files"""
785 self.directory: str = None
786 """directory with temporary copy of files. set by :meth:`prepare`"""
788 self.keyrings = keyrings
790 self.fingerprint: Fingerprint = (
791 self.session.query(Fingerprint)
792 .filter_by(fingerprint=changes.primary_fingerprint)
793 .one()
794 )
795 """fingerprint of the key used to sign the upload"""
797 self._authorized_by_fingerprint: Optional[Fingerprint] = None
798 """fingerprint of the key that authorized the upload"""
800 self.reject_reasons: list[str] = []
801 """reasons why the upload cannot by accepted"""
803 self.warnings: list[str] = []
804 """warnings
806 .. note::
808 Not used yet.
809 """
811 self.final_suites: Optional[list[Suite]] = None
813 self.new: bool = False
814 """upload is NEW. set by :meth:`check`"""
816 self._checked: bool = False
817 """checks passes. set by :meth:`check`"""
819 self._new_queue = (
820 self.session.query(PolicyQueue).filter_by(queue_name="new").one()
821 )
822 self._new = self._new_queue.suite
824 @property
825 def authorized_by_fingerprint(self) -> Fingerprint:
826 """
827 fingerprint of the key that authorized the upload
828 """
830 return (
831 self._authorized_by_fingerprint
832 if self._authorized_by_fingerprint is not None
833 else self.fingerprint
834 )
836 @authorized_by_fingerprint.setter
837 def authorized_by_fingerprint(self, fingerprint: Fingerprint) -> None:
838 self._authorized_by_fingerprint = fingerprint
840 def warn(self, message: str) -> None:
841 """add a warning message
843 Adds a warning message that can later be seen in :attr:`warnings`
845 :param message: warning message
846 """
847 self.warnings.append(message)
849 def prepare(self):
850 """prepare upload for further processing
852 This copies the files involved to a temporary directory. If you use
853 this method directly, you have to remove the directory given by the
854 :attr:`directory` attribute later on your own.
856 Instead of using the method directly, you can also use a with-statement::
858 with ArchiveUpload(...) as upload:
859 ...
861 This will automatically handle any required cleanup.
862 """
863 assert self.directory is None
864 assert self.original_changes.valid_signature
866 cnf = Config()
867 session = self.transaction.session
869 group = cnf.get("Dinstall::UnprivGroup") or None
870 self.directory = daklib.utils.temp_dirname(
871 parent=cnf.get("Dir::TempPath"), mode=0o2750, group=group
872 )
873 with FilesystemTransaction() as fs:
874 src = os.path.join(self.original_directory, self.original_changes.filename)
875 dst = os.path.join(self.directory, self.original_changes.filename)
876 fs.copy(src, dst, mode=0o640)
878 self.changes = daklib.upload.Changes(
879 self.directory, self.original_changes.filename, self.keyrings
880 )
882 files = {}
883 try:
884 files = self.changes.files
885 except daklib.upload.InvalidChangesException:
886 # Do not raise an exception; upload will be rejected later
887 # due to the missing files
888 pass
890 for f in files.values():
891 src = os.path.join(self.original_directory, f.filename)
892 dst = os.path.join(self.directory, f.filename)
893 if not os.path.exists(src):
894 continue
895 fs.copy(src, dst, mode=0o640)
897 source = None
898 try:
899 source = self.changes.source
900 except Exception:
901 # Do not raise an exception here if the .dsc is invalid.
902 pass
904 if source is not None:
905 for f in source.files.values():
906 src = os.path.join(self.original_directory, f.filename)
907 dst = os.path.join(self.directory, f.filename)
908 if not os.path.exists(dst):
909 try:
910 db_file = self.transaction.get_file(
911 f, source.dsc["Source"], check_hashes=False
912 )
913 db_archive_file = (
914 session.query(ArchiveFile)
915 .filter_by(file=db_file)
916 .first()
917 )
918 fs.copy(db_archive_file.path, dst, mode=0o640)
919 except KeyError:
920 # Ignore if get_file could not find it. Upload will
921 # probably be rejected later.
922 pass
924 def unpacked_source(self) -> Optional[str]:
925 """Path to unpacked source
927 Get path to the unpacked source. This method does unpack the source
928 into a temporary directory under :attr:`directory` if it has not
929 been done so already.
931 :return: string giving the path to the unpacked source directory
932 or :const:`None` if no source was included in the upload.
933 """
934 assert self.directory is not None
936 source = self.changes.source
937 if source is None:
938 return None
939 dsc_path = os.path.join(self.directory, source._dsc_file.filename)
941 sourcedir = os.path.join(self.directory, "source")
942 if not os.path.exists(sourcedir):
943 subprocess.check_call(
944 ["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir],
945 shell=False,
946 stdout=subprocess.DEVNULL,
947 )
948 if not os.path.isdir(sourcedir):
949 raise Exception(
950 "{0} is not a directory after extracting source package".format(
951 sourcedir
952 )
953 )
954 return sourcedir
956 def _map_suite(self, suite_name):
957 suite_names = set((suite_name,))
958 for rule in Config().value_list("SuiteMappings"):
959 fields = rule.split()
960 rtype = fields[0]
961 if rtype == "map" or rtype == "silent-map": 961 ↛ 962line 961 didn't jump to line 962, because the condition on line 961 was never true
962 (src, dst) = fields[1:3]
963 if src in suite_names:
964 suite_names.remove(src)
965 suite_names.add(dst)
966 if rtype != "silent-map":
967 self.warnings.append("Mapping {0} to {1}.".format(src, dst))
968 elif rtype == "copy" or rtype == "silent-copy": 968 ↛ 969line 968 didn't jump to line 969, because the condition on line 968 was never true
969 (src, dst) = fields[1:3]
970 if src in suite_names:
971 suite_names.add(dst)
972 if rtype != "silent-copy":
973 self.warnings.append("Copy {0} to {1}.".format(src, dst))
974 elif rtype == "ignore": 974 ↛ 975line 974 didn't jump to line 975, because the condition on line 974 was never true
975 ignored = fields[1]
976 if ignored in suite_names:
977 suite_names.remove(ignored)
978 self.warnings.append("Ignoring target suite {0}.".format(ignored))
979 elif rtype == "reject": 979 ↛ 980line 979 didn't jump to line 980, because the condition on line 979 was never true
980 rejected = fields[1]
981 if rejected in suite_names:
982 raise checks.Reject(
983 "Uploads to {0} are not accepted.".format(rejected)
984 )
985 ## XXX: propup-version and map-unreleased not yet implemented
986 return suite_names
988 def _mapped_suites(self) -> list[Suite]:
989 """Get target suites after mappings
991 :return: list giving the mapped target suites of this upload
992 """
993 session = self.session
995 suite_names = set()
996 for dist in self.changes.distributions:
997 suite_names.update(self._map_suite(dist))
999 suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
1000 return suites.all()
1002 def _check_new_binary_overrides(self, suite, overridesuite):
1003 new = False
1004 source = self.changes.source
1006 # Check binaries listed in the source package's Package-List field:
1007 if source is not None and not source.package_list.fallback:
1008 packages = source.package_list.packages_for_suite(suite)
1009 binaries = [entry for entry in packages]
1010 for b in binaries:
1011 override = self._binary_override(overridesuite, b)
1012 if override is None:
1013 self.warnings.append("binary:{0} is NEW.".format(b.name))
1014 new = True
1016 # Check all uploaded packages.
1017 # This is necessary to account for packages without a Package-List
1018 # field, really late binary-only uploads (where an unused override
1019 # was already removed), and for debug packages uploaded to a suite
1020 # without a debug suite (which are then considered as NEW).
1021 binaries = self.changes.binaries
1022 for b in binaries:
1023 if (
1024 daklib.utils.is_in_debug_section(b.control)
1025 and suite.debug_suite is not None
1026 ):
1027 continue
1028 override = self._binary_override(overridesuite, b)
1029 if override is None:
1030 self.warnings.append("binary:{0} is NEW.".format(b.name))
1031 new = True
1033 return new
1035 def _check_new(self, suite, overridesuite) -> bool:
1036 """Check if upload is NEW
1038 An upload is NEW if it has binary or source packages that do not have
1039 an override in `overridesuite` OR if it references files ONLY in a
1040 tainted archive (eg. when it references files in NEW).
1042 Debug packages (*-dbgsym in Section: debug) are not considered as NEW
1043 if `suite` has a separate debug suite.
1045 :return: :const:`True` if the upload is NEW, :const:`False` otherwise
1046 """
1047 session = self.session
1048 new = False
1050 # Check for missing overrides
1051 if self._check_new_binary_overrides(suite, overridesuite):
1052 new = True
1053 if self.changes.source is not None:
1054 override = self._source_override(overridesuite, self.changes.source)
1055 if override is None:
1056 self.warnings.append(
1057 "source:{0} is NEW.".format(self.changes.source.dsc["Source"])
1058 )
1059 new = True
1061 # Check if we reference a file only in a tainted archive
1062 files = list(self.changes.files.values())
1063 if self.changes.source is not None:
1064 files.extend(self.changes.source.files.values())
1065 for f in files:
1066 query = (
1067 session.query(ArchiveFile)
1068 .join(PoolFile)
1069 .filter(PoolFile.sha1sum == f.sha1sum)
1070 )
1071 query_untainted = query.join(Archive).filter(
1072 Archive.tainted == False # noqa:E712
1073 )
1075 in_archive = query.first() is not None
1076 in_untainted_archive = query_untainted.first() is not None
1078 if in_archive and not in_untainted_archive:
1079 self.warnings.append("{0} is only available in NEW.".format(f.filename))
1080 new = True
1082 return new
1084 def _final_suites(self) -> list[Suite]:
1085 session = self.session
1087 mapped_suites = self._mapped_suites()
1088 final_suites: list[Suite] = []
1090 for suite in mapped_suites:
1091 overridesuite = suite
1092 if suite.overridesuite is not None:
1093 overridesuite = (
1094 session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
1095 )
1096 if self._check_new(suite, overridesuite):
1097 self.new = True
1098 if suite not in final_suites: 1098 ↛ 1090line 1098 didn't jump to line 1090, because the condition on line 1098 was never false
1099 final_suites.append(suite)
1101 return final_suites
1103 def _binary_override(
1104 self,
1105 suite: Suite,
1106 binary: "Union[daklib.upload.Binary, daklib.packagelist.PackageListEntry]",
1107 ) -> Optional[Override]:
1108 """Get override entry for a binary
1110 :param suite: suite to get override for
1111 :param binary: binary to get override for
1112 :return: override for the given binary or :const:`None`
1113 """
1114 if suite.overridesuite is not None:
1115 suite = (
1116 self.session.query(Suite)
1117 .filter_by(suite_name=suite.overridesuite)
1118 .one()
1119 )
1121 mapped_component = get_mapped_component(binary.component)
1122 if mapped_component is None: 1122 ↛ 1123line 1122 didn't jump to line 1123, because the condition on line 1122 was never true
1123 return None
1125 query = (
1126 self.session.query(Override)
1127 .filter_by(suite=suite, package=binary.name)
1128 .join(Component)
1129 .filter(Component.component_name == mapped_component.component_name)
1130 .join(OverrideType)
1131 .filter(OverrideType.overridetype == binary.type)
1132 )
1134 return query.one_or_none()
1136 def _source_override(
1137 self, suite: Suite, source: daklib.upload.Source
1138 ) -> Optional[Override]:
1139 """Get override entry for a source
1141 :param suite: suite to get override for
1142 :param source: source to get override for
1143 :return: override for the given source or :const:`None`
1144 """
1145 if suite.overridesuite is not None: 1145 ↛ 1146line 1145 didn't jump to line 1146
1146 suite = (
1147 self.session.query(Suite)
1148 .filter_by(suite_name=suite.overridesuite)
1149 .one()
1150 )
1152 query = (
1153 self.session.query(Override)
1154 .filter_by(suite=suite, package=source.dsc["Source"])
1155 .join(OverrideType)
1156 .filter(OverrideType.overridetype == "dsc")
1157 )
1159 component = source_component_from_package_list(source.package_list, suite)
1160 if component is not None:
1161 query = query.filter(Override.component == component)
1163 return query.one_or_none()
1165 def _binary_component(
1166 self, suite: Suite, binary: daklib.upload.Binary, only_overrides: bool = True
1167 ) -> Optional[Component]:
1168 """get component for a binary
1170 By default this will only look at overrides to get the right component;
1171 if `only_overrides` is :const:`False` this method will also look at the
1172 Section field.
1174 :param only_overrides: only use overrides to get the right component
1175 """
1176 override = self._binary_override(suite, binary)
1177 if override is not None:
1178 return override.component
1179 if only_overrides: 1179 ↛ 1180line 1179 didn't jump to line 1180, because the condition on line 1179 was never true
1180 return None
1181 return get_mapped_component(binary.component, self.session)
1183 def _source_component(
1184 self, suite: Suite, source: daklib.upload.Binary, only_overrides: bool = True
1185 ) -> Optional[Component]:
1186 """get component for a source
1188 By default this will only look at overrides to get the right component;
1189 if `only_overrides` is :const:`False` this method will also look at the
1190 Section field.
1192 :param only_overrides: only use overrides to get the right component
1193 """
1194 override = self._source_override(suite, source)
1195 if override is not None: 1195 ↛ 1197line 1195 didn't jump to line 1197, because the condition on line 1195 was never false
1196 return override.component
1197 if only_overrides:
1198 return None
1199 return get_mapped_component(source.component, self.session)
1201 def _run_checks(
1202 self,
1203 force: bool,
1204 simple_checks: Iterable[checks.Check],
1205 per_suite_checks: Sequence[checks.Check],
1206 suites: Sequence[Suite],
1207 ) -> bool:
1208 try:
1209 for check in simple_checks:
1210 check().check(self)
1212 if per_suite_checks and not suites: 1212 ↛ 1213line 1212 didn't jump to line 1213, because the condition on line 1212 was never true
1213 raise ValueError(
1214 "Per-suite checks should be called, but no suites given."
1215 )
1216 for check in per_suite_checks:
1217 for suite in suites:
1218 check().per_suite_check(self, suite)
1219 except checks.Reject as e: 1219 ↛ 1222line 1219 didn't jump to line 1222
1220 self.reject_reasons.append(str(e))
1221 return False
1222 except Exception as e:
1223 self.reject_reasons.append(
1224 "Processing raised an exception: {0}.\n{1}".format(
1225 e, traceback.format_exc()
1226 )
1227 )
1228 return False
1230 if len(self.reject_reasons) != 0: 1230 ↛ 1231line 1230 didn't jump to line 1231, because the condition on line 1230 was never true
1231 return False
1232 return True
1234 def _run_checks_very_early(self, force: bool) -> bool:
1235 """
1236 run very early checks
1238 These check validate signatures on .changes and hashes.
1239 """
1240 return self._run_checks(
1241 force=force,
1242 simple_checks=[
1243 checks.SignatureAndHashesCheck,
1244 checks.WeakSignatureCheck,
1245 checks.SignatureTimestampCheck,
1246 ],
1247 per_suite_checks=[],
1248 suites=[],
1249 )
1251 def _run_checks_early(self, force: bool) -> bool:
1252 """
1253 run early checks
1255 These are checks that run after checking signatures, but
1256 before deciding the target suite.
1258 This should cover archive-wide policies, sanity checks, ...
1259 """
1260 return self._run_checks(
1261 force=force,
1262 simple_checks=[
1263 checks.ChangesCheck,
1264 checks.ExternalHashesCheck,
1265 checks.SourceCheck,
1266 checks.BinaryCheck,
1267 checks.BinaryMembersCheck,
1268 checks.BinaryTimestampCheck,
1269 checks.SingleDistributionCheck,
1270 checks.ArchAllBinNMUCheck,
1271 ],
1272 per_suite_checks=[],
1273 suites=[],
1274 )
1276 def _run_checks_late(self, force: bool, suites: Sequence[Suite]) -> bool:
1277 """
1278 run late checks
1280 These are checks that run after the target suites are known.
1282 This should cover permission checks, suite-specific polices
1283 (e.g., lintian), version constraints, ...
1284 """
1285 return self._run_checks(
1286 force=force,
1287 simple_checks=[
1288 checks.TransitionCheck,
1289 checks.ACLCheck,
1290 checks.NewOverrideCheck,
1291 checks.NoSourceOnlyCheck,
1292 checks.LintianCheck,
1293 ],
1294 per_suite_checks=[
1295 checks.SuiteCheck,
1296 checks.ACLCheck,
1297 checks.SourceFormatCheck,
1298 checks.SuiteArchitectureCheck,
1299 checks.VersionCheck,
1300 ],
1301 suites=suites,
1302 )
1304 def _handle_tag2upload(self) -> bool:
1305 """
1306 check if upload is via tag2upload
1308 if so, determine who authorized the upload to notify them of
1309 rejections and for ACL checks
1310 """
1312 if not self.fingerprint.keyring.tag2upload:
1313 return True
1315 source = self.changes.source
1316 if not source: 1316 ↛ 1317line 1316 didn't jump to line 1317, because the condition on line 1316 was never true
1317 self.reject_reasons.append("tag2upload: upload missing source")
1318 return False
1320 try:
1321 tag2upload_file, info = get_tag2upload_info_for_upload(self)
1322 except Exception as e:
1323 self.reject_reasons.append(f"tag2upload: invalid metadata: {e}")
1324 return False
1325 self._extra_source_files.append(tag2upload_file)
1327 success = True
1329 if self.changes.binaries: 1329 ↛ 1330line 1329 didn't jump to line 1330, because the condition on line 1329 was never true
1330 success = False
1331 self.reject_reasons.append("tag2upload: upload includes binaries")
1332 if self.changes.byhand_files: 1332 ↛ 1333line 1332 didn't jump to line 1333, because the condition on line 1332 was never true
1333 success = False
1334 self.reject_reasons.append("tag2upload: upload included by-hand files")
1336 if not info.signed_file.valid: 1336 ↛ 1337line 1336 didn't jump to line 1337, because the condition on line 1336 was never true
1337 success = False
1338 self.reject_reasons.append("tag2upload: no valid signature on tag")
1339 else:
1340 # Only set with a valid signature, but also when we reject
1341 # the upload so the signer might get included in the
1342 # rejection mail.
1343 self.authorized_by_fingerprint = (
1344 self.session.query(Fingerprint)
1345 .filter_by(fingerprint=info.signed_file.primary_fingerprint)
1346 .one()
1347 )
1348 if info.signed_file.weak_signature: 1348 ↛ 1349line 1348 didn't jump to line 1349, because the condition on line 1348 was never true
1349 success = False
1350 self.reject_reasons.append(
1351 "tag2upload: tag was signed using a weak algorithm (such as SHA-1)"
1352 )
1353 try:
1354 checks.check_signature_timestamp("tag2upload", info.signed_file)
1355 except checks.Reject as e:
1356 success = False
1357 self.reject_reasons.append(str(e))
1359 if info.metadata.get("distro") != "debian": 1359 ↛ 1360line 1359 didn't jump to line 1360, because the condition on line 1359 was never true
1360 success = False
1361 self.reject_reasons.append("tag2upload: upload not targeted at Debian.")
1362 if info.metadata.get("source") != source.dsc["Source"]: 1362 ↛ 1363line 1362 didn't jump to line 1363, because the condition on line 1362 was never true
1363 success = False
1364 self.reject_reasons.append(
1365 "tag2upload: source from tag metadata does not match upload"
1366 )
1367 if info.metadata.get("version") != source.dsc["Version"]: 1367 ↛ 1368line 1367 didn't jump to line 1368, because the condition on line 1367 was never true
1368 success = False
1369 self.reject_reasons.append(
1370 "tag2upload: version from tag metadata does not match upload"
1371 )
1373 tag_info_field = source.dsc.get("Git-Tag-Info")
1374 if not tag_info_field: 1374 ↛ 1375line 1374 didn't jump to line 1375, because the condition on line 1374 was never true
1375 success = False
1376 self.reject_reasons.append("tag2upload: source misses Git-Tag-Info field")
1377 else:
1378 try:
1379 tag_info = parse_git_tag_info(tag_info_field)
1380 except ValueError:
1381 success = False
1382 self.reject_reasons.append("tag2upload: could not parse Git-Tag-Info")
1383 else:
1384 if tag_info.fp.upper() != info.signed_file.fingerprint: 1384 ↛ 1385line 1384 didn't jump to line 1385, because the condition on line 1384 was never true
1385 success = False
1386 self.reject_reasons.append(
1387 "tag2upload: signing key from Git and Git-Tag-Info differ"
1388 )
1390 return success
1392 def check(self, force: bool = False) -> bool:
1393 """run checks against the upload
1395 :param force: ignore failing forcable checks
1396 :return: :const:`True` if all checks passed, :const:`False` otherwise
1397 """
1398 # XXX: needs to be better structured.
1399 assert self.changes.valid_signature
1401 # Validate signatures and hashes before we do any real work:
1402 if not self._run_checks_very_early(force):
1403 return False
1405 if not self._handle_tag2upload(): 1405 ↛ 1406line 1405 didn't jump to line 1406, because the condition on line 1405 was never true
1406 return False
1408 if not self._run_checks_early(force): 1408 ↛ 1409line 1408 didn't jump to line 1409, because the condition on line 1408 was never true
1409 return False
1411 try:
1412 final_suites = self._final_suites()
1413 except Exception as e:
1414 self.reject_reasons.append(
1415 "Processing raised an exception: {0}.\n{1}".format(
1416 e, traceback.format_exc()
1417 )
1418 )
1419 return False
1420 if len(final_suites) == 0:
1421 self.reject_reasons.append(
1422 "No target suite found. Please check your target distribution and that you uploaded to the right archive."
1423 )
1424 return False
1426 self.final_suites = final_suites
1428 if not self._run_checks_late(force, final_suites):
1429 return False
1431 if len(self.reject_reasons) != 0: 1431 ↛ 1432line 1431 didn't jump to line 1432, because the condition on line 1431 was never true
1432 return False
1434 self._checked = True
1435 return True
1437 def _install_to_suite(
1438 self,
1439 target_suite: Suite,
1440 suite: Suite,
1441 source_component_func: Callable[[daklib.upload.Source], Component],
1442 binary_component_func: Callable[[daklib.upload.Binary], Component],
1443 source_suites=None,
1444 extra_source_archives: Optional[Iterable[Archive]] = None,
1445 policy_upload: bool = False,
1446 ) -> tuple[Optional[DBSource], list[DBBinary]]:
1447 """Install upload to the given suite
1449 :param target_suite: target suite (before redirection to policy queue or NEW)
1450 :param suite: suite to install the package into. This is the real suite,
1451 ie. after any redirection to NEW or a policy queue
1452 :param source_component_func: function to get the :class:`daklib.dbconn.Component`
1453 for a :class:`daklib.upload.Source` object
1454 :param binary_component_func: function to get the :class:`daklib.dbconn.Component`
1455 for a :class:`daklib.upload.Binary` object
1456 :param source_suites: see :meth:`daklib.archive.ArchiveTransaction.install_binary`
1457 :param extra_source_archives: see :meth:`daklib.archive.ArchiveTransaction.install_binary`
1458 :param policy_upload: Boolean indicating upload to policy queue (including NEW)
1459 :return: tuple with two elements. The first is a :class:`daklib.dbconn.DBSource`
1460 object for the install source or :const:`None` if no source was
1461 included. The second is a list of :class:`daklib.dbconn.DBBinary`
1462 objects for the installed binary packages.
1463 """
1464 # XXX: move this function to ArchiveTransaction?
1466 control = self.changes.changes
1467 changed_by = get_or_set_maintainer(
1468 control.get("Changed-By", control["Maintainer"]), self.session
1469 )
1471 if source_suites is None: 1471 ↛ 1472line 1471 didn't jump to line 1472
1472 source_suites = (
1473 self.session.query(Suite)
1474 .join((VersionCheck, VersionCheck.reference_id == Suite.suite_id))
1475 .filter(VersionCheck.check == "Enhances")
1476 .filter(VersionCheck.suite == suite)
1477 .subquery()
1478 )
1480 source = self.changes.source
1481 if source is not None:
1482 component = source_component_func(source)
1483 db_source = self.transaction.install_source(
1484 self.directory,
1485 source,
1486 suite,
1487 component,
1488 changed_by,
1489 fingerprint=self.fingerprint,
1490 authorized_by_fingerprint=self.authorized_by_fingerprint,
1491 extra_source_files=self._extra_source_files,
1492 )
1493 else:
1494 db_source = None
1496 db_binaries = []
1497 for binary in sorted(self.changes.binaries, key=lambda x: x.name):
1498 copy_to_suite = suite
1499 if (
1500 daklib.utils.is_in_debug_section(binary.control)
1501 and suite.debug_suite is not None
1502 ):
1503 copy_to_suite = suite.debug_suite
1505 component = binary_component_func(binary)
1506 db_binary = self.transaction.install_binary(
1507 self.directory,
1508 binary,
1509 copy_to_suite,
1510 component,
1511 fingerprint=self.fingerprint,
1512 authorized_by_fingerprint=self.authorized_by_fingerprint,
1513 source_suites=source_suites,
1514 extra_source_archives=extra_source_archives,
1515 )
1516 db_binaries.append(db_binary)
1518 if not policy_upload:
1519 check_upload_for_external_signature_request(
1520 self.session, target_suite, copy_to_suite, db_binary
1521 )
1523 if suite.copychanges: 1523 ↛ 1524line 1523 didn't jump to line 1524, because the condition on line 1523 was never true
1524 src = os.path.join(self.directory, self.changes.filename)
1525 dst = os.path.join(
1526 suite.archive.path, "dists", suite.suite_name, self.changes.filename
1527 )
1528 self.transaction.fs.copy(src, dst, mode=suite.archive.mode)
1530 suite.update_last_changed()
1532 return (db_source, db_binaries)
1534 def _install_changes(self) -> DBChange:
1535 assert self.changes.valid_signature
1536 control = self.changes.changes
1537 session = self.transaction.session
1539 changelog_id = None
1540 # Only add changelog for sourceful uploads and binNMUs
1541 if self.changes.sourceful or re_bin_only_nmu.search(control["Version"]):
1542 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
1543 changelog_id = session.execute(
1544 query, {"changelog": control["Changes"]}
1545 ).scalar()
1546 assert changelog_id is not None
1548 db_changes = DBChange()
1549 db_changes.changesname = self.changes.filename
1550 db_changes.source = control["Source"]
1551 db_changes.binaries = control.get("Binary", None)
1552 db_changes.architecture = control["Architecture"]
1553 db_changes.version = control["Version"]
1554 db_changes.distribution = control["Distribution"]
1555 db_changes.urgency = control["Urgency"]
1556 db_changes.maintainer = control["Maintainer"]
1557 db_changes.changedby = control.get("Changed-By", control["Maintainer"])
1558 db_changes.date = control["Date"]
1559 db_changes.fingerprint = self.fingerprint.fingerprint
1560 db_changes.authorized_by_fingerprint = (
1561 self.authorized_by_fingerprint.fingerprint
1562 )
1563 db_changes.changelog_id = changelog_id
1564 db_changes.closes = self.changes.closed_bugs
1566 try:
1567 self.transaction.session.add(db_changes)
1568 self.transaction.session.flush()
1569 except sqlalchemy.exc.IntegrityError:
1570 raise ArchiveException(
1571 "{0} is already known.".format(self.changes.filename)
1572 )
1574 return db_changes
1576 def _install_policy(
1577 self, policy_queue, target_suite, db_changes, db_source, db_binaries
1578 ) -> PolicyQueueUpload:
1579 """install upload to policy queue"""
1580 u = PolicyQueueUpload()
1581 u.policy_queue = policy_queue
1582 u.target_suite = target_suite
1583 u.changes = db_changes
1584 u.source = db_source
1585 u.binaries = db_binaries
1586 self.transaction.session.add(u)
1587 self.transaction.session.flush()
1589 queue_files = [self.changes.filename]
1590 queue_files.extend(f.filename for f in self.changes.buildinfo_files)
1591 for fn in queue_files:
1592 src = os.path.join(self.changes.directory, fn)
1593 dst = os.path.join(policy_queue.path, fn)
1594 self.transaction.fs.copy(src, dst, mode=policy_queue.change_perms)
1596 return u
1598 def try_autobyhand(self) -> bool:
1599 """Try AUTOBYHAND
1601 Try to handle byhand packages automatically.
1602 """
1603 assert len(self.reject_reasons) == 0
1604 assert self.changes.valid_signature
1605 assert self.final_suites is not None
1606 assert self._checked
1608 byhand = self.changes.byhand_files
1609 if len(byhand) == 0: 1609 ↛ 1612line 1609 didn't jump to line 1612, because the condition on line 1609 was never false
1610 return True
1612 suites = list(self.final_suites)
1613 assert len(suites) == 1, "BYHAND uploads must be to a single suite"
1614 suite = suites[0]
1616 cnf = Config()
1617 control = self.changes.changes
1618 automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages")
1620 remaining = []
1621 for f in byhand:
1622 if "_" in f.filename:
1623 parts = f.filename.split("_", 2)
1624 if len(parts) != 3:
1625 print(
1626 "W: unexpected byhand filename {0}. No automatic processing.".format(
1627 f.filename
1628 )
1629 )
1630 remaining.append(f)
1631 continue
1633 package, _, archext = parts
1634 arch, ext = archext.split(".", 1)
1635 else:
1636 parts = f.filename.split(".")
1637 if len(parts) < 2:
1638 print(
1639 "W: unexpected byhand filename {0}. No automatic processing.".format(
1640 f.filename
1641 )
1642 )
1643 remaining.append(f)
1644 continue
1646 package = parts[0]
1647 arch = "all"
1648 ext = parts[-1]
1650 try:
1651 rule = automatic_byhand_packages.subtree(package)
1652 except KeyError:
1653 remaining.append(f)
1654 continue
1656 if (
1657 rule["Source"] != self.changes.source_name
1658 or rule["Section"] != f.section
1659 or ("Extension" in rule and rule["Extension"] != ext)
1660 ):
1661 remaining.append(f)
1662 continue
1664 script = rule["Script"]
1665 retcode = subprocess.call(
1666 [
1667 script,
1668 os.path.join(self.directory, f.filename),
1669 control["Version"],
1670 arch,
1671 os.path.join(self.directory, self.changes.filename),
1672 suite.suite_name,
1673 ],
1674 shell=False,
1675 )
1676 if retcode != 0:
1677 print("W: error processing {0}.".format(f.filename))
1678 remaining.append(f)
1680 return len(remaining) == 0
1682 def _install_byhand(
1683 self,
1684 policy_queue_upload: PolicyQueueUpload,
1685 hashed_file: daklib.upload.HashedFile,
1686 ) -> PolicyQueueByhandFile:
1687 """install byhand file"""
1688 fs = self.transaction.fs
1689 session = self.transaction.session
1690 policy_queue = policy_queue_upload.policy_queue
1692 byhand_file = PolicyQueueByhandFile()
1693 byhand_file.upload = policy_queue_upload
1694 byhand_file.filename = hashed_file.filename
1695 session.add(byhand_file)
1696 session.flush()
1698 src = os.path.join(self.directory, hashed_file.filename)
1699 dst = os.path.join(policy_queue.path, hashed_file.filename)
1700 fs.copy(src, dst, mode=policy_queue.change_perms)
1702 return byhand_file
1704 def _do_bts_versiontracking(self) -> None:
1705 cnf = Config()
1706 fs = self.transaction.fs
1708 btsdir = cnf.get("Dir::BTSVersionTrack")
1709 if btsdir is None or btsdir == "": 1709 ↛ 1712line 1709 didn't jump to line 1712, because the condition on line 1709 was never false
1710 return
1712 base = os.path.join(btsdir, self.changes.filename[:-8])
1714 # version history
1715 sourcedir = self.unpacked_source()
1716 if sourcedir is not None:
1717 dch_path = os.path.join(sourcedir, "debian", "changelog")
1718 with open(dch_path, "r") as fh:
1719 versions = fs.create("{0}.versions".format(base), mode=0o644)
1720 for line in fh.readlines():
1721 if re_changelog_versions.match(line):
1722 versions.write(line)
1723 versions.close()
1725 # binary -> source mapping
1726 if self.changes.binaries:
1727 debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
1728 for binary in self.changes.binaries:
1729 control = binary.control
1730 source_package, source_version = binary.source
1731 line = " ".join(
1732 [
1733 control["Package"],
1734 control["Version"],
1735 control["Architecture"],
1736 source_package,
1737 source_version,
1738 ]
1739 )
1740 print(line, file=debinfo)
1741 debinfo.close()
1743 def _policy_queue(self, suite) -> Optional[PolicyQueue]:
1744 if suite.policy_queue is not None:
1745 return suite.policy_queue
1746 return None
1748 def install(self) -> None:
1749 """install upload
1751 Install upload to a suite or policy queue. This method does **not**
1752 handle uploads to NEW.
1754 You need to have called the :meth:`check` method before calling this method.
1755 """
1756 assert len(self.reject_reasons) == 0
1757 assert self.changes.valid_signature
1758 assert self.final_suites is not None
1759 assert self._checked
1760 assert not self.new
1762 db_changes = self._install_changes()
1764 for suite in self.final_suites:
1765 overridesuite = suite
1766 if suite.overridesuite is not None:
1767 overridesuite = (
1768 self.session.query(Suite)
1769 .filter_by(suite_name=suite.overridesuite)
1770 .one()
1771 )
1773 policy_queue = self._policy_queue(suite)
1774 policy_upload = False
1776 redirected_suite = suite
1777 if policy_queue is not None:
1778 redirected_suite = policy_queue.suite
1779 policy_upload = True
1781 # source can be in the suite we install to or any suite we enhance
1782 source_suite_ids = set([suite.suite_id, redirected_suite.suite_id])
1783 for (enhanced_suite_id,) in (
1784 self.session.query(VersionCheck.reference_id)
1785 .filter(VersionCheck.suite_id.in_(source_suite_ids))
1786 .filter(VersionCheck.check == "Enhances")
1787 ):
1788 source_suite_ids.add(enhanced_suite_id)
1790 source_suites = (
1791 self.session.query(Suite)
1792 .filter(Suite.suite_id.in_(source_suite_ids))
1793 .subquery()
1794 )
1796 def source_component_func(source):
1797 return self._source_component(
1798 overridesuite, source, only_overrides=False
1799 )
1801 def binary_component_func(binary):
1802 return self._binary_component(
1803 overridesuite, binary, only_overrides=False
1804 )
1806 (db_source, db_binaries) = self._install_to_suite(
1807 suite,
1808 redirected_suite,
1809 source_component_func,
1810 binary_component_func,
1811 source_suites=source_suites,
1812 extra_source_archives=[suite.archive],
1813 policy_upload=policy_upload,
1814 )
1816 if policy_queue is not None:
1817 self._install_policy(
1818 policy_queue, suite, db_changes, db_source, db_binaries
1819 )
1821 # copy to build queues
1822 if policy_queue is None or policy_queue.send_to_build_queues: 1822 ↛ 1764line 1822 didn't jump to line 1764, because the condition on line 1822 was never false
1823 for build_queue in suite.copy_queues:
1824 self._install_to_suite(
1825 suite,
1826 build_queue.suite,
1827 source_component_func,
1828 binary_component_func,
1829 source_suites=source_suites,
1830 extra_source_archives=[suite.archive],
1831 )
1833 self._do_bts_versiontracking()
1835 def install_to_new(self) -> None:
1836 """install upload to NEW
1838 Install upload to NEW. This method does **not** handle regular uploads
1839 to suites or policy queues.
1841 You need to have called the :meth:`check` method before calling this method.
1842 """
1843 # Uploads to NEW are special as we don't have overrides.
1844 assert len(self.reject_reasons) == 0
1845 assert self.changes.valid_signature
1846 assert self.final_suites is not None
1848 binaries = self.changes.binaries
1849 byhand = self.changes.byhand_files
1851 # we need a suite to guess components
1852 suites = list(self.final_suites)
1853 assert len(suites) == 1, "NEW uploads must be to a single suite"
1854 suite = suites[0]
1856 # decide which NEW queue to use
1857 if suite.new_queue is None: 1857 ↛ 1864line 1857 didn't jump to line 1864, because the condition on line 1857 was never false
1858 new_queue = (
1859 self.transaction.session.query(PolicyQueue)
1860 .filter_by(queue_name="new")
1861 .one()
1862 )
1863 else:
1864 new_queue = suite.new_queue
1865 if len(byhand) > 0: 1865 ↛ 1867line 1865 didn't jump to line 1867
1866 # There is only one global BYHAND queue
1867 new_queue = (
1868 self.transaction.session.query(PolicyQueue)
1869 .filter_by(queue_name="byhand")
1870 .one()
1871 )
1872 new_suite = new_queue.suite
1874 def binary_component_func(binary):
1875 return self._binary_component(suite, binary, only_overrides=False)
1877 # guess source component
1878 # XXX: should be moved into an extra method
1879 binary_component_names = set()
1880 for binary in binaries:
1881 component = binary_component_func(binary)
1882 binary_component_names.add(component.component_name)
1883 source_component_name = None
1884 for c in self.session.query(Component).order_by(Component.component_id):
1885 guess = c.component_name
1886 if guess in binary_component_names:
1887 source_component_name = guess
1888 break
1889 if source_component_name is None:
1890 source_component = (
1891 self.session.query(Component).order_by(Component.component_id).first()
1892 )
1893 else:
1894 source_component = (
1895 self.session.query(Component)
1896 .filter_by(component_name=source_component_name)
1897 .one()
1898 )
1900 def source_component_func(source):
1901 return source_component
1903 db_changes = self._install_changes()
1904 (db_source, db_binaries) = self._install_to_suite(
1905 suite,
1906 new_suite,
1907 source_component_func,
1908 binary_component_func,
1909 source_suites=True,
1910 extra_source_archives=[suite.archive],
1911 policy_upload=True,
1912 )
1913 policy_upload = self._install_policy(
1914 new_queue, suite, db_changes, db_source, db_binaries
1915 )
1917 for f in byhand: 1917 ↛ 1918line 1917 didn't jump to line 1918, because the loop on line 1917 never started
1918 self._install_byhand(policy_upload, f)
1920 self._do_bts_versiontracking()
1922 def commit(self) -> None:
1923 """commit changes"""
1924 self.transaction.commit()
1926 def rollback(self) -> None:
1927 """rollback changes"""
1928 self.transaction.rollback()
1930 def __enter__(self):
1931 self.prepare()
1932 return self
1934 def __exit__(self, type, value, traceback):
1935 if self.directory is not None: 1935 ↛ 1938line 1935 didn't jump to line 1938, because the condition on line 1935 was never false
1936 shutil.rmtree(self.directory)
1937 self.directory = None
1938 self.changes = None
1939 self.transaction.rollback()
1940 return None