Package daklib :: Module archive
[hide private]
[frames] | no frames]

Source Code for Module daklib.archive

   1  # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org> 
   2  # 
   3  # This program is free software; you can redistribute it and/or modify 
   4  # it under the terms of the GNU General Public License as published by 
   5  # the Free Software Foundation; either version 2 of the License, or 
   6  # (at your option) any later version. 
   7  # 
   8  # This program is distributed in the hope that it will be useful, 
   9  # but WITHOUT ANY WARRANTY; without even the implied warranty of 
  10  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the 
  11  # GNU General Public License for more details. 
  12  # 
  13  # You should have received a copy of the GNU General Public License along 
  14  # with this program; if not, write to the Free Software Foundation, Inc., 
  15  # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 
  16   
  17  """module to manipulate the archive 
  18   
  19  This module provides classes to manipulate the archive. 
  20  """ 
  21   
  22  from daklib.dbconn import * 
  23  import daklib.checks as checks 
  24  from daklib.config import Config 
  25  from daklib.externalsignature import check_upload_for_external_signature_request 
  26  import daklib.upload as upload 
  27  import daklib.utils 
  28  from daklib.fstransactions import FilesystemTransaction 
  29  from daklib.regexes import re_changelog_versions, re_bin_only_nmu 
  30   
  31  import os 
  32  import shutil 
  33  from sqlalchemy.orm.exc import NoResultFound 
  34  from sqlalchemy.orm import object_session 
  35  import sqlalchemy.exc 
  36  import subprocess 
  37  import traceback 
  38   
  39   
40 -class ArchiveException(Exception):
41 pass
42 43
44 -class HashMismatchException(ArchiveException):
45 pass
46 47
48 -class ArchiveTransaction:
49 """manipulate the archive in a transaction 50 """ 51
52 - def __init__(self):
53 self.fs = FilesystemTransaction() 54 self.session = DBConn().session()
55
56 - def get_file(self, hashed_file, source_name, check_hashes=True):
57 """Look for file C{hashed_file} in database 58 59 @type hashed_file: L{daklib.upload.HashedFile} 60 @param hashed_file: file to look for in the database 61 62 @type source_name: str 63 @param source_name: source package name 64 65 @type check_hashes: bool 66 @param check_hashes: check size and hashes match 67 68 @raise KeyError: file was not found in the database 69 @raise HashMismatchException: hash mismatch 70 71 @rtype: L{daklib.dbconn.PoolFile} 72 @return: database entry for the file 73 """ 74 poolname = os.path.join(daklib.utils.poolify(source_name), hashed_file.filename) 75 try: 76 poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one() 77 if check_hashes and (poolfile.filesize != hashed_file.size 78 or poolfile.md5sum != hashed_file.md5sum 79 or poolfile.sha1sum != hashed_file.sha1sum 80 or poolfile.sha256sum != hashed_file.sha256sum): 81 raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename)) 82 return poolfile 83 except NoResultFound: 84 raise KeyError('{0} not found in database.'.format(poolname))
85
86 - def _install_file(self, directory, hashed_file, archive, component, source_name):
87 """Install a file 88 89 Will not give an error when the file is already present. 90 91 @rtype: L{daklib.dbconn.PoolFile} 92 @return: database object for the new file 93 """ 94 session = self.session 95 96 poolname = os.path.join(daklib.utils.poolify(source_name), hashed_file.filename) 97 try: 98 poolfile = self.get_file(hashed_file, source_name) 99 except KeyError: 100 poolfile = PoolFile(filename=poolname, filesize=hashed_file.size) 101 poolfile.md5sum = hashed_file.md5sum 102 poolfile.sha1sum = hashed_file.sha1sum 103 poolfile.sha256sum = hashed_file.sha256sum 104 session.add(poolfile) 105 session.flush() 106 107 try: 108 session.query(ArchiveFile).filter_by(archive=archive, component=component, file=poolfile).one() 109 except NoResultFound: 110 archive_file = ArchiveFile(archive, component, poolfile) 111 session.add(archive_file) 112 session.flush() 113 114 path = os.path.join(archive.path, 'pool', component.component_name, poolname) 115 hashed_file_path = os.path.join(directory, hashed_file.input_filename) 116 self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode) 117 118 return poolfile
119
120 - def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None):
121 """Install a binary package 122 123 @type directory: str 124 @param directory: directory the binary package is located in 125 126 @type binary: L{daklib.upload.Binary} 127 @param binary: binary package to install 128 129 @type suite: L{daklib.dbconn.Suite} 130 @param suite: target suite 131 132 @type component: L{daklib.dbconn.Component} 133 @param component: target component 134 135 @type allow_tainted: bool 136 @param allow_tainted: allow to copy additional files from tainted archives 137 138 @type fingerprint: L{daklib.dbconn.Fingerprint} 139 @param fingerprint: optional fingerprint 140 141 @type source_suites: SQLAlchemy subquery for C{daklib.dbconn.Suite} or C{True} 142 @param source_suites: suites to copy the source from if they are not 143 in C{suite} or C{True} to allow copying from any 144 suite. 145 146 @type extra_source_archives: list of L{daklib.dbconn.Archive} 147 @param extra_source_archives: extra archives to copy Built-Using sources from 148 149 @rtype: L{daklib.dbconn.DBBinary} 150 @return: databse object for the new package 151 """ 152 session = self.session 153 control = binary.control 154 maintainer = get_or_set_maintainer(control['Maintainer'], session) 155 architecture = get_architecture(control['Architecture'], session) 156 157 (source_name, source_version) = binary.source 158 source_query = session.query(DBSource).filter_by(source=source_name, version=source_version) 159 source = source_query.filter(DBSource.suites.contains(suite)).first() 160 if source is None: 161 if source_suites is not True: 162 source_query = source_query.join(DBSource.suites) \ 163 .filter(Suite.suite_id == source_suites.c.id) 164 source = source_query.first() 165 if source is None: 166 raise ArchiveException('{0}: trying to install to {1}, but could not find source ({2} {3})'. 167 format(binary.hashed_file.filename, suite.suite_name, source_name, source_version)) 168 self.copy_source(source, suite, source.poolfile.component) 169 170 db_file = self._install_file(directory, binary.hashed_file, suite.archive, component, source_name) 171 172 unique = dict( 173 package=control['Package'], 174 version=control['Version'], 175 architecture=architecture, 176 ) 177 rest = dict( 178 source=source, 179 maintainer=maintainer, 180 poolfile=db_file, 181 binarytype=binary.type, 182 ) 183 # Other attributes that are ignored for purposes of equality with 184 # an existing source 185 rest2 = dict( 186 fingerprint=fingerprint, 187 ) 188 189 try: 190 db_binary = session.query(DBBinary).filter_by(**unique).one() 191 for key, value in rest.items(): 192 if getattr(db_binary, key) != value: 193 raise ArchiveException('{0}: Does not match binary in database.'.format(binary.hashed_file.filename)) 194 except NoResultFound: 195 db_binary = DBBinary(**unique) 196 for key, value in rest.items(): 197 setattr(db_binary, key, value) 198 for key, value in rest2.items(): 199 setattr(db_binary, key, value) 200 session.add(db_binary) 201 session.flush() 202 import_metadata_into_db(db_binary, session) 203 204 self._add_built_using(db_binary, binary.hashed_file.filename, control, suite, extra_archives=extra_source_archives) 205 206 if suite not in db_binary.suites: 207 db_binary.suites.append(suite) 208 209 session.flush() 210 211 return db_binary
212
213 - def _ensure_extra_source_exists(self, filename, source, archive, extra_archives=None):
214 """ensure source exists in the given archive 215 216 This is intended to be used to check that Built-Using sources exist. 217 218 @type filename: str 219 @param filename: filename to use in error messages 220 221 @type source: L{daklib.dbconn.DBSource} 222 @param source: source to look for 223 224 @type archive: L{daklib.dbconn.Archive} 225 @param archive: archive to look in 226 227 @type extra_archives: list of L{daklib.dbconn.Archive} 228 @param extra_archives: list of archives to copy the source package from 229 if it is not yet present in C{archive} 230 """ 231 session = self.session 232 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first() 233 if db_file is not None: 234 return True 235 236 # Try to copy file from one extra archive 237 if extra_archives is None: 238 extra_archives = [] 239 db_file = session.query(ArchiveFile).filter_by(file=source.poolfile).filter(ArchiveFile.archive_id.in_([a.archive_id for a in extra_archives])).first() 240 if db_file is None: 241 raise ArchiveException('{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.'.format(filename, source.source, source.version, archive.archive_name)) 242 243 source_archive = db_file.archive 244 for dsc_file in source.srcfiles: 245 af = session.query(ArchiveFile).filter_by(file=dsc_file.poolfile, archive=source_archive, component=db_file.component).one() 246 # We were given an explicit list of archives so it is okay to copy from tainted archives. 247 self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
248
249 - def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
250 """Add Built-Using sources to C{db_binary.extra_sources} 251 """ 252 session = self.session 253 254 for bu_source_name, bu_source_version in daklib.utils.parse_built_using(control): 255 bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first() 256 if bu_source is None: 257 raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version)) 258 259 self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives) 260 261 db_binary.extra_sources.append(bu_source)
262
263 - def install_source_to_archive(self, directory, source, archive, component, changed_by, allow_tainted=False, fingerprint=None):
264 session = self.session 265 control = source.dsc 266 maintainer = get_or_set_maintainer(control['Maintainer'], session) 267 source_name = control['Source'] 268 269 ### Add source package to database 270 271 # We need to install the .dsc first as the DBSource object refers to it. 272 db_file_dsc = self._install_file(directory, source._dsc_file, archive, component, source_name) 273 274 unique = dict( 275 source=source_name, 276 version=control['Version'], 277 ) 278 rest = dict( 279 maintainer=maintainer, 280 poolfile=db_file_dsc, 281 dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'), 282 ) 283 # Other attributes that are ignored for purposes of equality with 284 # an existing source 285 rest2 = dict( 286 changedby=changed_by, 287 fingerprint=fingerprint, 288 ) 289 290 created = False 291 try: 292 db_source = session.query(DBSource).filter_by(**unique).one() 293 for key, value in rest.items(): 294 if getattr(db_source, key) != value: 295 raise ArchiveException('{0}: Does not match source in database.'.format(source._dsc_file.filename)) 296 except NoResultFound: 297 created = True 298 db_source = DBSource(**unique) 299 for key, value in rest.items(): 300 setattr(db_source, key, value) 301 for key, value in rest2.items(): 302 setattr(db_source, key, value) 303 session.add(db_source) 304 session.flush() 305 306 # Add .dsc file. Other files will be added later. 307 db_dsc_file = DSCFile() 308 db_dsc_file.source = db_source 309 db_dsc_file.poolfile = db_file_dsc 310 session.add(db_dsc_file) 311 session.flush() 312 313 if not created: 314 for f in db_source.srcfiles: 315 self._copy_file(f.poolfile, archive, component, allow_tainted=allow_tainted) 316 return db_source 317 318 ### Now add remaining files and copy them to the archive. 319 320 for hashed_file in source.files.values(): 321 hashed_file_path = os.path.join(directory, hashed_file.input_filename) 322 if os.path.exists(hashed_file_path): 323 db_file = self._install_file(directory, hashed_file, archive, component, source_name) 324 session.add(db_file) 325 else: 326 db_file = self.get_file(hashed_file, source_name) 327 self._copy_file(db_file, archive, component, allow_tainted=allow_tainted) 328 329 db_dsc_file = DSCFile() 330 db_dsc_file.source = db_source 331 db_dsc_file.poolfile = db_file 332 session.add(db_dsc_file) 333 334 session.flush() 335 336 # Importing is safe as we only arrive here when we did not find the source already installed earlier. 337 import_metadata_into_db(db_source, session) 338 339 # Uploaders are the maintainer and co-maintainers from the Uploaders field 340 db_source.uploaders.append(maintainer) 341 if 'Uploaders' in control: 342 from daklib.textutils import split_uploaders 343 for u in split_uploaders(control['Uploaders']): 344 db_source.uploaders.append(get_or_set_maintainer(u, session)) 345 session.flush() 346 347 return db_source
348
349 - def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
350 """Install a source package 351 352 @type directory: str 353 @param directory: directory the source package is located in 354 355 @type source: L{daklib.upload.Source} 356 @param source: source package to install 357 358 @type suite: L{daklib.dbconn.Suite} 359 @param suite: target suite 360 361 @type component: L{daklib.dbconn.Component} 362 @param component: target component 363 364 @type changed_by: L{daklib.dbconn.Maintainer} 365 @param changed_by: person who prepared this version of the package 366 367 @type allow_tainted: bool 368 @param allow_tainted: allow to copy additional files from tainted archives 369 370 @type fingerprint: L{daklib.dbconn.Fingerprint} 371 @param fingerprint: optional fingerprint 372 373 @rtype: L{daklib.dbconn.DBSource} 374 @return: database object for the new source 375 """ 376 db_source = self.install_source_to_archive(directory, source, suite.archive, component, changed_by, allow_tainted, fingerprint) 377 378 if suite in db_source.suites: 379 return db_source 380 db_source.suites.append(suite) 381 self.session.flush() 382 383 return db_source
384
385 - def _copy_file(self, db_file, archive, component, allow_tainted=False):
386 """Copy a file to the given archive and component 387 388 @type db_file: L{daklib.dbconn.PoolFile} 389 @param db_file: file to copy 390 391 @type archive: L{daklib.dbconn.Archive} 392 @param archive: target archive 393 394 @type component: L{daklib.dbconn.Archive} 395 @param component: target component 396 397 @type allow_tainted: bool 398 @param allow_tainted: allow to copy from tainted archives (such as NEW) 399 """ 400 session = self.session 401 402 if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None: 403 query = session.query(ArchiveFile).filter_by(file=db_file) 404 if not allow_tainted: 405 query = query.join(Archive).filter(Archive.tainted == False) # noqa:E712 406 407 source_af = query.first() 408 if source_af is None: 409 raise ArchiveException('cp: Could not find {0} in any archive.'.format(db_file.filename)) 410 target_af = ArchiveFile(archive, component, db_file) 411 session.add(target_af) 412 session.flush() 413 self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode)
414
415 - def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None):
416 """Copy a binary package to the given suite and component 417 418 @type db_binary: L{daklib.dbconn.DBBinary} 419 @param db_binary: binary to copy 420 421 @type suite: L{daklib.dbconn.Suite} 422 @param suite: target suite 423 424 @type component: L{daklib.dbconn.Component} 425 @param component: target component 426 427 @type allow_tainted: bool 428 @param allow_tainted: allow to copy from tainted archives (such as NEW) 429 430 @type extra_archives: list of L{daklib.dbconn.Archive} 431 @param extra_archives: extra archives to copy Built-Using sources from 432 """ 433 session = self.session 434 archive = suite.archive 435 if archive.tainted: 436 allow_tainted = True 437 438 filename = db_binary.poolfile.filename 439 440 # make sure source is present in target archive 441 db_source = db_binary.source 442 if session.query(ArchiveFile).filter_by(archive=archive, file=db_source.poolfile).first() is None: 443 raise ArchiveException('{0}: cannot copy to {1}: source is not present in target archive'.format(filename, suite.suite_name)) 444 445 # make sure built-using packages are present in target archive 446 for db_source in db_binary.extra_sources: 447 self._ensure_extra_source_exists(filename, db_source, archive, extra_archives=extra_archives) 448 449 # copy binary 450 db_file = db_binary.poolfile 451 self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted) 452 if suite not in db_binary.suites: 453 db_binary.suites.append(suite) 454 self.session.flush()
455
456 - def copy_source(self, db_source, suite, component, allow_tainted=False):
457 """Copy a source package to the given suite and component 458 459 @type db_source: L{daklib.dbconn.DBSource} 460 @param db_source: source to copy 461 462 @type suite: L{daklib.dbconn.Suite} 463 @param suite: target suite 464 465 @type component: L{daklib.dbconn.Component} 466 @param component: target component 467 468 @type allow_tainted: bool 469 @param allow_tainted: allow to copy from tainted archives (such as NEW) 470 """ 471 archive = suite.archive 472 if archive.tainted: 473 allow_tainted = True 474 for db_dsc_file in db_source.srcfiles: 475 self._copy_file(db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted) 476 if suite not in db_source.suites: 477 db_source.suites.append(suite) 478 self.session.flush()
479
480 - def remove_file(self, db_file, archive, component):
481 """Remove a file from a given archive and component 482 483 @type db_file: L{daklib.dbconn.PoolFile} 484 @param db_file: file to remove 485 486 @type archive: L{daklib.dbconn.Archive} 487 @param archive: archive to remove the file from 488 489 @type component: L{daklib.dbconn.Component} 490 @param component: component to remove the file from 491 """ 492 af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component) 493 self.fs.unlink(af.path) 494 self.session.delete(af)
495
496 - def remove_binary(self, binary, suite):
497 """Remove a binary from a given suite and component 498 499 @type binary: L{daklib.dbconn.DBBinary} 500 @param binary: binary to remove 501 502 @type suite: L{daklib.dbconn.Suite} 503 @param suite: suite to remove the package from 504 """ 505 binary.suites.remove(suite) 506 self.session.flush()
507
508 - def remove_source(self, source, suite):
509 """Remove a source from a given suite and component 510 511 @type source: L{daklib.dbconn.DBSource} 512 @param source: source to remove 513 514 @type suite: L{daklib.dbconn.Suite} 515 @param suite: suite to remove the package from 516 517 @raise ArchiveException: source package is still referenced by other 518 binaries in the suite 519 """ 520 session = self.session 521 522 query = session.query(DBBinary).filter_by(source=source) \ 523 .filter(DBBinary.suites.contains(suite)) 524 if query.first() is not None: 525 raise ArchiveException('src:{0} is still used by binaries in suite {1}'.format(source.source, suite.suite_name)) 526 527 source.suites.remove(suite) 528 session.flush()
529
530 - def commit(self):
531 """commit changes""" 532 try: 533 self.session.commit() 534 self.fs.commit() 535 finally: 536 self.session.rollback() 537 self.fs.rollback()
538
539 - def rollback(self):
540 """rollback changes""" 541 self.session.rollback() 542 self.fs.rollback()
543
544 - def flush(self):
545 self.session.flush()
546
547 - def __enter__(self):
548 return self
549
550 - def __exit__(self, type, value, traceback):
551 if type is None: 552 self.commit() 553 else: 554 self.rollback() 555 return None
556 557
558 -def source_component_from_package_list(package_list, suite):
559 """Get component for a source package 560 561 This function will look at the Package-List field to determine the 562 component the source package belongs to. This is the first component 563 the source package provides binaries for (first with respect to the 564 ordering of components). 565 566 It the source package has no Package-List field, None is returned. 567 568 @type package_list: L{daklib.packagelist.PackageList} 569 @param package_list: package list of the source to get the override for 570 571 @type suite: L{daklib.dbconn.Suite} 572 @param suite: suite to consider for binaries produced 573 574 @rtype: L{daklib.dbconn.Component} or C{None} 575 @return: component for the given source or C{None} 576 """ 577 if package_list.fallback: 578 return None 579 session = object_session(suite) 580 packages = package_list.packages_for_suite(suite) 581 components = set(p.component for p in packages) 582 query = session.query(Component).order_by(Component.ordering) \ 583 .filter(Component.component_name.in_(components)) 584 return query.first()
585 586
587 -class ArchiveUpload:
588 """handle an upload 589 590 This class can be used in a with-statement:: 591 592 with ArchiveUpload(...) as upload: 593 ... 594 595 Doing so will automatically run any required cleanup and also rollback the 596 transaction if it was not committed. 597 """ 598
599 - def __init__(self, directory, changes, keyrings):
600 self.transaction = ArchiveTransaction() 601 """transaction used to handle the upload 602 @type: L{daklib.archive.ArchiveTransaction} 603 """ 604 605 self.session = self.transaction.session 606 """database session""" 607 608 self.original_directory = directory 609 self.original_changes = changes 610 611 self.changes = None 612 """upload to process 613 @type: L{daklib.upload.Changes} 614 """ 615 616 self.directory = None 617 """directory with temporary copy of files. set by C{prepare} 618 @type: str 619 """ 620 621 self.keyrings = keyrings 622 623 self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one() 624 """fingerprint of the key used to sign the upload 625 @type: L{daklib.dbconn.Fingerprint} 626 """ 627 628 self.reject_reasons = [] 629 """reasons why the upload cannot by accepted 630 @type: list of str 631 """ 632 633 self.warnings = [] 634 """warnings 635 @note: Not used yet. 636 @type: list of str 637 """ 638 639 self.final_suites = None 640 641 self.new = False 642 """upload is NEW. set by C{check} 643 @type: bool 644 """ 645 646 self._checked = False 647 """checks passes. set by C{check} 648 @type: bool 649 """ 650 651 self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one() 652 self._new = self._new_queue.suite
653
654 - def warn(self, message):
655 """add a warning message 656 657 Adds a warning message that can later be seen in C{self.warnings} 658 659 @type message: string 660 @param message: warning message 661 """ 662 self.warnings.append(message)
663
664 - def prepare(self):
665 """prepare upload for further processing 666 667 This copies the files involved to a temporary directory. If you use 668 this method directly, you have to remove the directory given by the 669 C{directory} attribute later on your own. 670 671 Instead of using the method directly, you can also use a with-statement:: 672 673 with ArchiveUpload(...) as upload: 674 ... 675 676 This will automatically handle any required cleanup. 677 """ 678 assert self.directory is None 679 assert self.original_changes.valid_signature 680 681 cnf = Config() 682 session = self.transaction.session 683 684 group = cnf.get('Dinstall::UnprivGroup') or None 685 self.directory = daklib.utils.temp_dirname(parent=cnf.get('Dir::TempPath'), 686 mode=0o2750, group=group) 687 with FilesystemTransaction() as fs: 688 src = os.path.join(self.original_directory, self.original_changes.filename) 689 dst = os.path.join(self.directory, self.original_changes.filename) 690 fs.copy(src, dst, mode=0o640) 691 692 self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings) 693 694 files = {} 695 try: 696 files = self.changes.files 697 except upload.InvalidChangesException: 698 # Do not raise an exception; upload will be rejected later 699 # due to the missing files 700 pass 701 702 for f in files.values(): 703 src = os.path.join(self.original_directory, f.filename) 704 dst = os.path.join(self.directory, f.filename) 705 if not os.path.exists(src): 706 continue 707 fs.copy(src, dst, mode=0o640) 708 709 source = None 710 try: 711 source = self.changes.source 712 except Exception: 713 # Do not raise an exception here if the .dsc is invalid. 714 pass 715 716 if source is not None: 717 for f in source.files.values(): 718 src = os.path.join(self.original_directory, f.filename) 719 dst = os.path.join(self.directory, f.filename) 720 if not os.path.exists(dst): 721 try: 722 db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False) 723 db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first() 724 fs.copy(db_archive_file.path, dst, mode=0o640) 725 except KeyError: 726 # Ignore if get_file could not find it. Upload will 727 # probably be rejected later. 728 pass
729
730 - def unpacked_source(self):
731 """Path to unpacked source 732 733 Get path to the unpacked source. This method does unpack the source 734 into a temporary directory under C{self.directory} if it has not 735 been done so already. 736 737 @rtype: str or C{None} 738 @return: string giving the path to the unpacked source directory 739 or C{None} if no source was included in the upload. 740 """ 741 assert self.directory is not None 742 743 source = self.changes.source 744 if source is None: 745 return None 746 dsc_path = os.path.join(self.directory, source._dsc_file.filename) 747 748 sourcedir = os.path.join(self.directory, 'source') 749 if not os.path.exists(sourcedir): 750 subprocess.check_call(["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], shell=False, stdout=subprocess.DEVNULL) 751 if not os.path.isdir(sourcedir): 752 raise Exception("{0} is not a directory after extracting source package".format(sourcedir)) 753 return sourcedir
754
755 - def _map_suite(self, suite_name):
756 suite_names = set((suite_name, )) 757 for rule in Config().value_list("SuiteMappings"): 758 fields = rule.split() 759 rtype = fields[0] 760 if rtype == "map" or rtype == "silent-map": 761 (src, dst) = fields[1:3] 762 if src in suite_names: 763 suite_names.remove(src) 764 suite_names.add(dst) 765 if rtype != "silent-map": 766 self.warnings.append('Mapping {0} to {1}.'.format(src, dst)) 767 elif rtype == "copy" or rtype == "silent-copy": 768 (src, dst) = fields[1:3] 769 if src in suite_names: 770 suite_names.add(dst) 771 if rtype != "silent-copy": 772 self.warnings.append('Copy {0} to {1}.'.format(src, dst)) 773 elif rtype == "ignore": 774 ignored = fields[1] 775 if ignored in suite_names: 776 suite_names.remove(ignored) 777 self.warnings.append('Ignoring target suite {0}.'.format(ignored)) 778 elif rtype == "reject": 779 rejected = fields[1] 780 if rejected in suite_names: 781 raise checks.Reject('Uploads to {0} are not accepted.'.format(rejected)) 782 ## XXX: propup-version and map-unreleased not yet implemented 783 return suite_names
784
785 - def _mapped_suites(self):
786 """Get target suites after mappings 787 788 @rtype: list of L{daklib.dbconn.Suite} 789 @return: list giving the mapped target suites of this upload 790 """ 791 session = self.session 792 793 suite_names = set() 794 for dist in self.changes.distributions: 795 suite_names.update(self._map_suite(dist)) 796 797 suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names)) 798 return suites
799
800 - def _check_new_binary_overrides(self, suite, overridesuite):
801 new = False 802 source = self.changes.source 803 804 # Check binaries listed in the source package's Package-List field: 805 if source is not None and not source.package_list.fallback: 806 packages = source.package_list.packages_for_suite(suite) 807 binaries = [entry for entry in packages] 808 for b in binaries: 809 override = self._binary_override(overridesuite, b) 810 if override is None: 811 self.warnings.append('binary:{0} is NEW.'.format(b.name)) 812 new = True 813 814 # Check all uploaded packages. 815 # This is necessary to account for packages without a Package-List 816 # field, really late binary-only uploads (where an unused override 817 # was already removed), and for debug packages uploaded to a suite 818 # without a debug suite (which are then considered as NEW). 819 binaries = self.changes.binaries 820 for b in binaries: 821 if daklib.utils.is_in_debug_section(b.control) and suite.debug_suite is not None: 822 continue 823 override = self._binary_override(overridesuite, b) 824 if override is None: 825 self.warnings.append('binary:{0} is NEW.'.format(b.name)) 826 new = True 827 828 return new
829
830 - def _check_new(self, suite, overridesuite):
831 """Check if upload is NEW 832 833 An upload is NEW if it has binary or source packages that do not have 834 an override in C{overridesuite} OR if it references files ONLY in a 835 tainted archive (eg. when it references files in NEW). 836 837 Debug packages (*-dbgsym in Section: debug) are not considered as NEW 838 if C{suite} has a separate debug suite. 839 840 @rtype: bool 841 @return: C{True} if the upload is NEW, C{False} otherwise 842 """ 843 session = self.session 844 new = False 845 846 # Check for missing overrides 847 if self._check_new_binary_overrides(suite, overridesuite): 848 new = True 849 if self.changes.source is not None: 850 override = self._source_override(overridesuite, self.changes.source) 851 if override is None: 852 self.warnings.append('source:{0} is NEW.'.format(self.changes.source.dsc['Source'])) 853 new = True 854 855 # Check if we reference a file only in a tainted archive 856 files = list(self.changes.files.values()) 857 if self.changes.source is not None: 858 files.extend(self.changes.source.files.values()) 859 for f in files: 860 query = session.query(ArchiveFile).join(PoolFile).filter(PoolFile.sha1sum == f.sha1sum) 861 query_untainted = query.join(Archive).filter(Archive.tainted == False) # noqa:E712 862 863 in_archive = (query.first() is not None) 864 in_untainted_archive = (query_untainted.first() is not None) 865 866 if in_archive and not in_untainted_archive: 867 self.warnings.append('{0} is only available in NEW.'.format(f.filename)) 868 new = True 869 870 return new
871
872 - def _final_suites(self):
873 session = self.session 874 875 mapped_suites = self._mapped_suites() 876 final_suites = list() 877 878 for suite in mapped_suites: 879 overridesuite = suite 880 if suite.overridesuite is not None: 881 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one() 882 if self._check_new(suite, overridesuite): 883 self.new = True 884 if suite not in final_suites: 885 final_suites.append(suite) 886 887 return final_suites
888
889 - def _binary_override(self, suite, binary):
890 """Get override entry for a binary 891 892 @type suite: L{daklib.dbconn.Suite} 893 @param suite: suite to get override for 894 895 @type binary: L{daklib.upload.Binary} or L{daklib.packagelist.PackageListEntry} 896 @param binary: binary to get override for 897 898 @rtype: L{daklib.dbconn.Override} or C{None} 899 @return: override for the given binary or C{None} 900 """ 901 if suite.overridesuite is not None: 902 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one() 903 904 mapped_component = get_mapped_component(binary.component) 905 if mapped_component is None: 906 return None 907 908 query = self.session.query(Override).filter_by(suite=suite, package=binary.name) \ 909 .join(Component).filter(Component.component_name == mapped_component.component_name) \ 910 .join(OverrideType).filter(OverrideType.overridetype == binary.type) 911 912 return query.one_or_none()
913
914 - def _source_override(self, suite, source):
915 """Get override entry for a source 916 917 @type suite: L{daklib.dbconn.Suite} 918 @param suite: suite to get override for 919 920 @type source: L{daklib.upload.Source} 921 @param source: source to get override for 922 923 @rtype: L{daklib.dbconn.Override} or C{None} 924 @return: override for the given source or C{None} 925 """ 926 if suite.overridesuite is not None: 927 suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one() 928 929 query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \ 930 .join(OverrideType).filter(OverrideType.overridetype == 'dsc') 931 932 component = source_component_from_package_list(source.package_list, suite) 933 if component is not None: 934 query = query.filter(Override.component == component) 935 936 return query.one_or_none()
937
938 - def _binary_component(self, suite, binary, only_overrides=True):
939 """get component for a binary 940 941 By default this will only look at overrides to get the right component; 942 if C{only_overrides} is C{False} this method will also look at the 943 Section field. 944 945 @type suite: L{daklib.dbconn.Suite} 946 947 @type binary: L{daklib.upload.Binary} 948 949 @type only_overrides: bool 950 @param only_overrides: only use overrides to get the right component 951 952 @rtype: L{daklib.dbconn.Component} or C{None} 953 """ 954 override = self._binary_override(suite, binary) 955 if override is not None: 956 return override.component 957 if only_overrides: 958 return None 959 return get_mapped_component(binary.component, self.session)
960
961 - def _source_component(self, suite, source, only_overrides=True):
962 """get component for a source 963 964 By default this will only look at overrides to get the right component; 965 if C{only_overrides} is C{False} this method will also look at the 966 Section field. 967 968 @type suite: L{daklib.dbconn.Suite} 969 970 @type binary: L{daklib.upload.Binary} 971 972 @type only_overrides: bool 973 @param only_overrides: only use overrides to get the right component 974 975 @rtype: L{daklib.dbconn.Component} or C{None} 976 """ 977 override = self._source_override(suite, source) 978 if override is not None: 979 return override.component 980 if only_overrides: 981 return None 982 return get_mapped_component(source.component, self.session)
983
984 - def check(self, force=False):
985 """run checks against the upload 986 987 @type force: bool 988 @param force: ignore failing forcable checks 989 990 @rtype: bool 991 @return: C{True} if all checks passed, C{False} otherwise 992 """ 993 # XXX: needs to be better structured. 994 assert self.changes.valid_signature 995 996 try: 997 # Validate signatures and hashes before we do any real work: 998 for chk in ( 999 checks.SignatureAndHashesCheck, 1000 checks.WeakSignatureCheck, 1001 checks.SignatureTimestampCheck, 1002 checks.ChangesCheck, 1003 checks.ExternalHashesCheck, 1004 checks.SourceCheck, 1005 checks.BinaryCheck, 1006 checks.BinaryTimestampCheck, 1007 checks.SingleDistributionCheck, 1008 checks.ArchAllBinNMUCheck, 1009 ): 1010 chk().check(self) 1011 1012 final_suites = self._final_suites() 1013 if len(final_suites) == 0: 1014 self.reject_reasons.append('No target suite found. Please check your target distribution and that you uploaded to the right archive.') 1015 return False 1016 1017 self.final_suites = final_suites 1018 1019 for chk in ( 1020 checks.TransitionCheck, 1021 checks.ACLCheck, 1022 checks.NewOverrideCheck, 1023 checks.NoSourceOnlyCheck, 1024 checks.LintianCheck, 1025 ): 1026 chk().check(self) 1027 1028 for chk in ( 1029 checks.SuiteCheck, 1030 checks.ACLCheck, 1031 checks.SourceFormatCheck, 1032 checks.SuiteArchitectureCheck, 1033 checks.VersionCheck, 1034 ): 1035 for suite in final_suites: 1036 chk().per_suite_check(self, suite) 1037 1038 if len(self.reject_reasons) != 0: 1039 return False 1040 1041 self._checked = True 1042 return True 1043 except checks.Reject as e: 1044 self.reject_reasons.append(str(e)) 1045 except Exception as e: 1046 self.reject_reasons.append("Processing raised an exception: {0}.\n{1}".format(e, traceback.format_exc())) 1047 return False
1048
1049 - def _install_to_suite(self, target_suite, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None, policy_upload=False):
1050 """Install upload to the given suite 1051 1052 @type target_suite: L{daklib.dbconn.Suite} 1053 @param target_suite: target suite (before redirection to policy queue or NEW) 1054 1055 @type suite: L{daklib.dbconn.Suite} 1056 @param suite: suite to install the package into. This is the real suite, 1057 ie. after any redirection to NEW or a policy queue 1058 1059 @param source_component_func: function to get the L{daklib.dbconn.Component} 1060 for a L{daklib.upload.Source} object 1061 1062 @param binary_component_func: function to get the L{daklib.dbconn.Component} 1063 for a L{daklib.upload.Binary} object 1064 1065 @param source_suites: see L{daklib.archive.ArchiveTransaction.install_binary} 1066 1067 @param extra_source_archives: see L{daklib.archive.ArchiveTransaction.install_binary} 1068 1069 @param policy_upload: Boolean indicating upload to policy queue (including NEW) 1070 1071 @return: tuple with two elements. The first is a L{daklib.dbconn.DBSource} 1072 object for the install source or C{None} if no source was 1073 included. The second is a list of L{daklib.dbconn.DBBinary} 1074 objects for the installed binary packages. 1075 """ 1076 # XXX: move this function to ArchiveTransaction? 1077 1078 control = self.changes.changes 1079 changed_by = get_or_set_maintainer(control.get('Changed-By', control['Maintainer']), self.session) 1080 1081 if source_suites is None: 1082 source_suites = self.session.query(Suite).join((VersionCheck, VersionCheck.reference_id == Suite.suite_id)).filter(VersionCheck.check == 'Enhances').filter(VersionCheck.suite == suite).subquery() 1083 1084 source = self.changes.source 1085 if source is not None: 1086 component = source_component_func(source) 1087 db_source = self.transaction.install_source( 1088 self.directory, 1089 source, 1090 suite, 1091 component, 1092 changed_by, 1093 fingerprint=self.fingerprint 1094 ) 1095 else: 1096 db_source = None 1097 1098 db_binaries = [] 1099 for binary in sorted(self.changes.binaries, key=lambda x: x.name): 1100 copy_to_suite = suite 1101 if daklib.utils.is_in_debug_section(binary.control) and suite.debug_suite is not None: 1102 copy_to_suite = suite.debug_suite 1103 1104 component = binary_component_func(binary) 1105 db_binary = self.transaction.install_binary( 1106 self.directory, 1107 binary, 1108 copy_to_suite, 1109 component, 1110 fingerprint=self.fingerprint, 1111 source_suites=source_suites, 1112 extra_source_archives=extra_source_archives 1113 ) 1114 db_binaries.append(db_binary) 1115 1116 if not policy_upload: 1117 check_upload_for_external_signature_request(self.session, target_suite, copy_to_suite, db_binary) 1118 1119 if suite.copychanges: 1120 src = os.path.join(self.directory, self.changes.filename) 1121 dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename) 1122 self.transaction.fs.copy(src, dst, mode=suite.archive.mode) 1123 1124 suite.update_last_changed() 1125 1126 return (db_source, db_binaries)
1127
1128 - def _install_changes(self):
1129 assert self.changes.valid_signature 1130 control = self.changes.changes 1131 session = self.transaction.session 1132 config = Config() 1133 1134 changelog_id = None 1135 # Only add changelog for sourceful uploads and binNMUs 1136 if self.changes.sourceful or re_bin_only_nmu.search(control['Version']): 1137 query = 'INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id' 1138 changelog_id = session.execute(query, {'changelog': control['Changes']}).scalar() 1139 assert changelog_id is not None 1140 1141 db_changes = DBChange() 1142 db_changes.changesname = self.changes.filename 1143 db_changes.source = control['Source'] 1144 db_changes.binaries = control.get('Binary', None) 1145 db_changes.architecture = control['Architecture'] 1146 db_changes.version = control['Version'] 1147 db_changes.distribution = control['Distribution'] 1148 db_changes.urgency = control['Urgency'] 1149 db_changes.maintainer = control['Maintainer'] 1150 db_changes.changedby = control.get('Changed-By', control['Maintainer']) 1151 db_changes.date = control['Date'] 1152 db_changes.fingerprint = self.fingerprint.fingerprint 1153 db_changes.changelog_id = changelog_id 1154 db_changes.closes = self.changes.closed_bugs 1155 1156 try: 1157 self.transaction.session.add(db_changes) 1158 self.transaction.session.flush() 1159 except sqlalchemy.exc.IntegrityError: 1160 raise ArchiveException('{0} is already known.'.format(self.changes.filename)) 1161 1162 return db_changes
1163
1164 - def _install_policy(self, policy_queue, target_suite, db_changes, db_source, db_binaries):
1165 u = PolicyQueueUpload() 1166 u.policy_queue = policy_queue 1167 u.target_suite = target_suite 1168 u.changes = db_changes 1169 u.source = db_source 1170 u.binaries = db_binaries 1171 self.transaction.session.add(u) 1172 self.transaction.session.flush() 1173 1174 queue_files = [self.changes.filename] 1175 queue_files.extend(f.filename for f in self.changes.buildinfo_files) 1176 for fn in queue_files: 1177 src = os.path.join(self.changes.directory, fn) 1178 dst = os.path.join(policy_queue.path, fn) 1179 self.transaction.fs.copy(src, dst, mode=policy_queue.change_perms) 1180 1181 return u
1182
1183 - def try_autobyhand(self):
1184 """Try AUTOBYHAND 1185 1186 Try to handle byhand packages automatically. 1187 1188 @rtype: list of L{daklib.upload.HashedFile} 1189 @return: list of remaining byhand files 1190 """ 1191 assert len(self.reject_reasons) == 0 1192 assert self.changes.valid_signature 1193 assert self.final_suites is not None 1194 assert self._checked 1195 1196 byhand = self.changes.byhand_files 1197 if len(byhand) == 0: 1198 return True 1199 1200 suites = list(self.final_suites) 1201 assert len(suites) == 1, "BYHAND uploads must be to a single suite" 1202 suite = suites[0] 1203 1204 cnf = Config() 1205 control = self.changes.changes 1206 automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages") 1207 1208 remaining = [] 1209 for f in byhand: 1210 if '_' in f.filename: 1211 parts = f.filename.split('_', 2) 1212 if len(parts) != 3: 1213 print("W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)) 1214 remaining.append(f) 1215 continue 1216 1217 package, version, archext = parts 1218 arch, ext = archext.split('.', 1) 1219 else: 1220 parts = f.filename.split('.') 1221 if len(parts) < 2: 1222 print("W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)) 1223 remaining.append(f) 1224 continue 1225 1226 package = parts[0] 1227 version = '0' 1228 arch = 'all' 1229 ext = parts[-1] 1230 1231 try: 1232 rule = automatic_byhand_packages.subtree(package) 1233 except KeyError: 1234 remaining.append(f) 1235 continue 1236 1237 if rule['Source'] != self.changes.source_name \ 1238 or rule['Section'] != f.section \ 1239 or ('Extension' in rule and rule['Extension'] != ext): 1240 remaining.append(f) 1241 continue 1242 1243 script = rule['Script'] 1244 retcode = subprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename), suite.suite_name], shell=False) 1245 if retcode != 0: 1246 print("W: error processing {0}.".format(f.filename)) 1247 remaining.append(f) 1248 1249 return len(remaining) == 0
1250
1251 - def _install_byhand(self, policy_queue_upload, hashed_file):
1252 """install byhand file 1253 1254 @type policy_queue_upload: L{daklib.dbconn.PolicyQueueUpload} 1255 1256 @type hashed_file: L{daklib.upload.HashedFile} 1257 """ 1258 fs = self.transaction.fs 1259 session = self.transaction.session 1260 policy_queue = policy_queue_upload.policy_queue 1261 1262 byhand_file = PolicyQueueByhandFile() 1263 byhand_file.upload = policy_queue_upload 1264 byhand_file.filename = hashed_file.filename 1265 session.add(byhand_file) 1266 session.flush() 1267 1268 src = os.path.join(self.directory, hashed_file.filename) 1269 dst = os.path.join(policy_queue.path, hashed_file.filename) 1270 fs.copy(src, dst, mode=policy_queue.change_perms) 1271 1272 return byhand_file
1273
1274 - def _do_bts_versiontracking(self):
1275 cnf = Config() 1276 fs = self.transaction.fs 1277 1278 btsdir = cnf.get('Dir::BTSVersionTrack') 1279 if btsdir is None or btsdir == '': 1280 return 1281 1282 base = os.path.join(btsdir, self.changes.filename[:-8]) 1283 1284 # version history 1285 sourcedir = self.unpacked_source() 1286 if sourcedir is not None: 1287 dch_path = os.path.join(sourcedir, 'debian', 'changelog') 1288 with open(dch_path, 'r') as fh: 1289 versions = fs.create("{0}.versions".format(base), mode=0o644) 1290 for line in fh.readlines(): 1291 if re_changelog_versions.match(line): 1292 versions.write(line) 1293 versions.close() 1294 1295 # binary -> source mapping 1296 if self.changes.binaries: 1297 debinfo = fs.create("{0}.debinfo".format(base), mode=0o644) 1298 for binary in self.changes.binaries: 1299 control = binary.control 1300 source_package, source_version = binary.source 1301 line = " ".join([control['Package'], control['Version'], control['Architecture'], source_package, source_version]) 1302 print(line, file=debinfo) 1303 debinfo.close()
1304
1305 - def _policy_queue(self, suite):
1306 if suite.policy_queue is not None: 1307 return suite.policy_queue 1308 return None
1309
1310 - def install(self):
1311 """install upload 1312 1313 Install upload to a suite or policy queue. This method does B{not} 1314 handle uploads to NEW. 1315 1316 You need to have called the C{check} method before calling this method. 1317 """ 1318 assert len(self.reject_reasons) == 0 1319 assert self.changes.valid_signature 1320 assert self.final_suites is not None 1321 assert self._checked 1322 assert not self.new 1323 1324 db_changes = self._install_changes() 1325 1326 for suite in self.final_suites: 1327 overridesuite = suite 1328 if suite.overridesuite is not None: 1329 overridesuite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one() 1330 1331 policy_queue = self._policy_queue(suite) 1332 policy_upload = False 1333 1334 redirected_suite = suite 1335 if policy_queue is not None: 1336 redirected_suite = policy_queue.suite 1337 policy_upload = True 1338 1339 # source can be in the suite we install to or any suite we enhance 1340 source_suite_ids = set([suite.suite_id, redirected_suite.suite_id]) 1341 for enhanced_suite_id, in self.session.query(VersionCheck.reference_id) \ 1342 .filter(VersionCheck.suite_id.in_(source_suite_ids)) \ 1343 .filter(VersionCheck.check == 'Enhances'): 1344 source_suite_ids.add(enhanced_suite_id) 1345 1346 source_suites = self.session.query(Suite).filter(Suite.suite_id.in_(source_suite_ids)).subquery() 1347 1348 def source_component_func(source): 1349 return self._source_component(overridesuite, source, only_overrides=False)
1350 1351 def binary_component_func(binary): 1352 return self._binary_component(overridesuite, binary, only_overrides=False)
1353 1354 (db_source, db_binaries) = self._install_to_suite(suite, redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive], policy_upload=policy_upload) 1355 1356 if policy_queue is not None: 1357 self._install_policy(policy_queue, suite, db_changes, db_source, db_binaries) 1358 1359 # copy to build queues 1360 if policy_queue is None or policy_queue.send_to_build_queues: 1361 for build_queue in suite.copy_queues: 1362 self._install_to_suite(suite, build_queue.suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive]) 1363 1364 self._do_bts_versiontracking() 1365
1366 - def install_to_new(self):
1367 """install upload to NEW 1368 1369 Install upload to NEW. This method does B{not} handle regular uploads 1370 to suites or policy queues. 1371 1372 You need to have called the C{check} method before calling this method. 1373 """ 1374 # Uploads to NEW are special as we don't have overrides. 1375 assert len(self.reject_reasons) == 0 1376 assert self.changes.valid_signature 1377 assert self.final_suites is not None 1378 1379 source = self.changes.source 1380 binaries = self.changes.binaries 1381 byhand = self.changes.byhand_files 1382 1383 # we need a suite to guess components 1384 suites = list(self.final_suites) 1385 assert len(suites) == 1, "NEW uploads must be to a single suite" 1386 suite = suites[0] 1387 1388 # decide which NEW queue to use 1389 if suite.new_queue is None: 1390 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one() 1391 else: 1392 new_queue = suite.new_queue 1393 if len(byhand) > 0: 1394 # There is only one global BYHAND queue 1395 new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one() 1396 new_suite = new_queue.suite 1397 1398 def binary_component_func(binary): 1399 return self._binary_component(suite, binary, only_overrides=False)
1400 1401 # guess source component 1402 # XXX: should be moved into an extra method 1403 binary_component_names = set() 1404 for binary in binaries: 1405 component = binary_component_func(binary) 1406 binary_component_names.add(component.component_name) 1407 source_component_name = None 1408 for c in self.session.query(Component).order_by(Component.component_id): 1409 guess = c.component_name 1410 if guess in binary_component_names: 1411 source_component_name = guess 1412 break 1413 if source_component_name is None: 1414 source_component = self.session.query(Component).order_by(Component.component_id).first() 1415 else: 1416 source_component = self.session.query(Component).filter_by(component_name=source_component_name).one() 1417 1418 def source_component_func(source): 1419 return source_component 1420 1421 db_changes = self._install_changes() 1422 (db_source, db_binaries) = self._install_to_suite(suite, new_suite, source_component_func, binary_component_func, source_suites=True, extra_source_archives=[suite.archive], policy_upload=True) 1423 policy_upload = self._install_policy(new_queue, suite, db_changes, db_source, db_binaries) 1424 1425 for f in byhand: 1426 self._install_byhand(policy_upload, f) 1427 1428 self._do_bts_versiontracking() 1429
1430 - def commit(self):
1431 """commit changes""" 1432 self.transaction.commit()
1433
1434 - def rollback(self):
1435 """rollback changes""" 1436 self.transaction.rollback()
1437
1438 - def __enter__(self):
1439 self.prepare() 1440 return self
1441
1442 - def __exit__(self, type, value, traceback):
1443 if self.directory is not None: 1444 shutil.rmtree(self.directory) 1445 self.directory = None 1446 self.changes = None 1447 self.transaction.rollback() 1448 return None
1449