Coverage for daklib/dbconn.py: 81%

983 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2025-08-26 22:11 +0000

1"""DB access class 

2 

3@contact: Debian FTPMaster <ftpmaster@debian.org> 

4@copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org> 

5@copyright: 2008-2009 Mark Hymers <mhy@debian.org> 

6@copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org> 

7@copyright: 2009 Mike O'Connor <stew@debian.org> 

8@license: GNU General Public License version 2 or later 

9""" 

10 

11# This program is free software; you can redistribute it and/or modify 

12# it under the terms of the GNU General Public License as published by 

13# the Free Software Foundation; either version 2 of the License, or 

14# (at your option) any later version. 

15 

16# This program is distributed in the hope that it will be useful, 

17# but WITHOUT ANY WARRANTY; without even the implied warranty of 

18# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

19# GNU General Public License for more details. 

20 

21# You should have received a copy of the GNU General Public License 

22# along with this program; if not, write to the Free Software 

23# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 

24 

25################################################################################ 

26 

27# < mhy> I need a funny comment 

28# < sgran> two peanuts were walking down a dark street 

29# < sgran> one was a-salted 

30# * mhy looks up the definition of "funny" 

31 

32################################################################################ 

33 

34import functools 

35import inspect 

36import os 

37import re 

38import subprocess 

39import warnings 

40from collections.abc import Iterable 

41from os.path import normpath 

42from tarfile import TarFile 

43from typing import TYPE_CHECKING, Optional, Union 

44 

45import apt_pkg 

46import sqlalchemy 

47import sqlalchemy.types 

48from debian.deb822 import Deb822 

49from sqlalchemy import Table, create_engine, desc 

50 

51# Don't remove this, we re-export the exceptions to scripts which import us 

52from sqlalchemy.exc import IntegrityError, OperationalError, SAWarning, SQLAlchemyError 

53from sqlalchemy.ext.associationproxy import association_proxy 

54from sqlalchemy.orm import ( 

55 backref, 

56 mapper, 

57 object_session, 

58 relation, 

59 sessionmaker, 

60) 

61from sqlalchemy.orm.collections import attribute_mapped_collection 

62from sqlalchemy.orm.exc import NoResultFound 

63 

64import daklib.gpg 

65 

66from .aptversion import AptVersion 

67 

68# Only import Config until Queue stuff is changed to store its config 

69# in the database 

70from .config import Config 

71from .textutils import fix_maintainer 

72 

73# suppress some deprecation warnings in squeeze related to sqlalchemy 

74warnings.filterwarnings( 

75 "ignore", "Predicate of partial index .* ignored during reflection", SAWarning 

76) 

77 

78# (Debian 12 "bookworm") Silence warning targeted at SQLAlchemy dialect maintainers 

79warnings.filterwarnings( 

80 "ignore", 

81 "Dialect postgresql:psycopg2 will not make use of SQL compilation caching.*", 

82 SAWarning, 

83) 

84 

85from .database.base import Base 

86 

87if TYPE_CHECKING: 87 ↛ 88line 87 didn't jump to line 88, because the condition on line 87 was never true

88 import sqlalchemy.orm.query 

89 

90 

91################################################################################ 

92 

93# Patch in support for the debversion field type so that it works during 

94# reflection 

95 

96 

97class DebVersion(sqlalchemy.types.UserDefinedType): 

98 def get_col_spec(self): 

99 return "DEBVERSION" 

100 

101 def bind_processor(self, dialect): 

102 return None 

103 

104 def result_processor(self, dialect, coltype): 

105 return None 

106 

107 

108from sqlalchemy.databases import postgresql 

109 

110postgresql.ischema_names["debversion"] = DebVersion 

111 

112################################################################################ 

113 

114__all__ = ["IntegrityError", "SQLAlchemyError", "DebVersion"] 

115 

116################################################################################ 

117 

118 

119def session_wrapper(fn): 

120 """ 

121 Wrapper around common ".., session=None):" handling. If the wrapped 

122 function is called without passing 'session', we create a local one 

123 and destroy it when the function ends. 

124 

125 Also attaches a commit_or_flush method to the session; if we created a 

126 local session, this is a synonym for session.commit(), otherwise it is a 

127 synonym for session.flush(). 

128 """ 

129 

130 @functools.wraps(fn) 

131 def wrapped(*args, **kwargs): 

132 private_transaction = False 

133 

134 # Find the session object 

135 session = kwargs.get("session") 

136 

137 if session is None: 

138 if len(args) < len(inspect.getfullargspec(fn).args): 

139 # No session specified as last argument or in kwargs 

140 private_transaction = True 

141 session = kwargs["session"] = DBConn().session() 

142 else: 

143 # Session is last argument in args 

144 session = args[-1] 

145 if session is None: 145 ↛ 146line 145 didn't jump to line 146, because the condition on line 145 was never true

146 args = list(args) 

147 session = args[-1] = DBConn().session() 

148 private_transaction = True 

149 

150 if private_transaction: 

151 session.commit_or_flush = session.commit 

152 else: 

153 session.commit_or_flush = session.flush 

154 

155 try: 

156 return fn(*args, **kwargs) 

157 finally: 

158 if private_transaction: 

159 # We created a session; close it. 

160 session.close() 

161 

162 return wrapped 

163 

164 

165__all__.append("session_wrapper") 

166 

167################################################################################ 

168 

169 

170class ORMObject: 

171 """ 

172 ORMObject is a base class for all ORM classes mapped by SQLalchemy. All 

173 derived classes must implement the properties() method. 

174 """ 

175 

176 def properties(self) -> list[str]: 

177 """ 

178 This method should be implemented by all derived classes and returns a 

179 list of the important properties. The properties 'created' and 

180 'modified' will be added automatically. A suffix '_count' should be 

181 added to properties that are lists or query objects. The most important 

182 property name should be returned as the first element in the list 

183 because it is used by repr(). 

184 """ 

185 return [] 

186 

187 def classname(self) -> str: 

188 """ 

189 Returns the name of the class. 

190 """ 

191 return type(self).__name__ 

192 

193 def __repr__(self): 

194 """ 

195 Returns a short string representation of the object using the first 

196 element from the properties() method. 

197 """ 

198 primary_property = self.properties()[0] 

199 value = getattr(self, primary_property) 

200 return "<%s %s>" % (self.classname(), str(value)) 

201 

202 def __str__(self): 

203 """ 

204 Returns a human readable form of the object using the properties() 

205 method. 

206 """ 

207 return "<%s(...)>" % (self.classname()) 

208 

209 @classmethod 

210 @session_wrapper 

211 def get(cls, primary_key, session=None): 

212 """ 

213 This is a support function that allows getting an object by its primary 

214 key. 

215 

216 Architecture.get(3[, session]) 

217 

218 instead of the more verbose 

219 

220 session.query(Architecture).get(3) 

221 """ 

222 return session.query(cls).get(primary_key) 

223 

224 def session(self): 

225 """ 

226 Returns the current session that is associated with the object. May 

227 return None is object is in detached state. 

228 """ 

229 

230 return object_session(self) 

231 

232 

233__all__.append("ORMObject") 

234 

235################################################################################ 

236 

237 

238class ACL(ORMObject): 

239 def __repr__(self): 

240 return "<ACL {0}>".format(self.name) 

241 

242 

243__all__.append("ACL") 

244 

245 

246class ACLPerSource(ORMObject): 

247 def __repr__(self): 

248 return "<ACLPerSource acl={0} fingerprint={1} source={2} reason={3}>".format( 

249 self.acl.name, self.fingerprint.fingerprint, self.source, self.reason 

250 ) 

251 

252 

253__all__.append("ACLPerSource") 

254 

255 

256class ACLPerSuite(ORMObject): 

257 def __repr__(self): 

258 return "<ACLPerSuite acl={0} fingerprint={1} suite={2} reason={3}>".format( 

259 self.acl.name, 

260 self.fingerprint.fingerprint, 

261 self.suite.suite_name, 

262 self.reason, 

263 ) 

264 

265 

266__all__.append("ACLPerSuite") 

267 

268################################################################################ 

269 

270 

271from .database.architecture import Architecture 

272 

273__all__.append("Architecture") 

274 

275 

276@session_wrapper 

277def get_architecture(architecture: str, session=None) -> Optional[Architecture]: 

278 """ 

279 Returns database id for given `architecture`. 

280 

281 :param architecture: The name of the architecture 

282 :param session: Optional SQLA session object (a temporary one will be 

283 generated if not supplied) 

284 :return: Architecture object for the given arch (None if not present) 

285 """ 

286 

287 q = session.query(Architecture).filter_by(arch_string=architecture) 

288 return q.one_or_none() 

289 

290 

291__all__.append("get_architecture") 

292 

293################################################################################ 

294 

295 

296class Archive: 

297 def __init__(self, *args, **kwargs): 

298 pass 

299 

300 def __repr__(self): 

301 return "<Archive %s>" % self.archive_name 

302 

303 

304__all__.append("Archive") 

305 

306 

307@session_wrapper 

308def get_archive(archive: str, session=None) -> Optional[Archive]: 

309 """ 

310 returns database id for given `archive`. 

311 

312 :param archive: the name of the arhive 

313 :param session: Optional SQLA session object (a temporary one will be 

314 generated if not supplied) 

315 :return: Archive object for the given name (None if not present) 

316 """ 

317 archive = archive.lower() 

318 

319 q = session.query(Archive).filter_by(archive_name=archive) 

320 return q.one_or_none() 

321 

322 

323__all__.append("get_archive") 

324 

325################################################################################ 

326 

327 

328class ArchiveFile: 

329 def __init__(self, archive=None, component=None, file=None): 

330 self.archive = archive 

331 self.component = component 

332 self.file = file 

333 

334 @property 

335 def path(self): 

336 return os.path.join( 

337 self.archive.path, "pool", self.component.component_name, self.file.filename 

338 ) 

339 

340 

341__all__.append("ArchiveFile") 

342 

343################################################################################ 

344 

345 

346class BinContents(ORMObject): 

347 def __init__(self, file=None, binary=None): 

348 self.file = file 

349 self.binary = binary 

350 

351 def properties(self) -> list[str]: 

352 return ["file", "binary"] 

353 

354 

355__all__.append("BinContents") 

356 

357################################################################################ 

358 

359 

360class DBBinary(ORMObject): 

361 def __init__( 

362 self, 

363 package=None, 

364 source=None, 

365 version=None, 

366 maintainer=None, 

367 architecture=None, 

368 poolfile=None, 

369 binarytype="deb", 

370 fingerprint=None, 

371 ): 

372 self.package = package 

373 self.source = source 

374 self.version = version 

375 self.maintainer = maintainer 

376 self.architecture = architecture 

377 self.poolfile = poolfile 

378 self.binarytype = binarytype 

379 self.fingerprint = fingerprint 

380 

381 @property 

382 def pkid(self) -> int: 

383 return self.binary_id 

384 

385 @property 

386 def name(self) -> str: 

387 return self.package 

388 

389 @property 

390 def arch_string(self) -> str: 

391 return "%s" % self.architecture 

392 

393 def properties(self) -> list[str]: 

394 return [ 

395 "package", 

396 "version", 

397 "maintainer", 

398 "source", 

399 "architecture", 

400 "poolfile", 

401 "binarytype", 

402 "fingerprint", 

403 "install_date", 

404 "suites_count", 

405 "binary_id", 

406 "contents_count", 

407 "extra_sources", 

408 ] 

409 

410 metadata = association_proxy("key", "value") 

411 

412 def scan_contents(self) -> Iterable[str]: 

413 """ 

414 Yields the contents of the package. Only regular files are yielded and 

415 the path names are normalized after converting them from either utf-8 

416 or iso8859-1 encoding. It yields the string ' <EMPTY PACKAGE>' if the 

417 package does not contain any regular file. 

418 """ 

419 fullpath = self.poolfile.fullpath 

420 dpkg_cmd = ("dpkg-deb", "--fsys-tarfile", fullpath) 

421 dpkg = subprocess.Popen(dpkg_cmd, stdout=subprocess.PIPE) 

422 tar = TarFile.open(fileobj=dpkg.stdout, mode="r|") 

423 for member in tar.getmembers(): 

424 if not member.isdir(): 

425 name = normpath(member.name) 

426 yield name 

427 tar.close() 

428 dpkg.stdout.close() 

429 dpkg.wait() 

430 

431 def read_control(self) -> bytes: 

432 """ 

433 Reads the control information from a binary. 

434 

435 :return: stanza text of the control section. 

436 """ 

437 from . import utils 

438 

439 fullpath = self.poolfile.fullpath 

440 return utils.deb_extract_control(fullpath) 

441 

442 def read_control_fields(self) -> apt_pkg.TagSection: 

443 """ 

444 Reads the control information from a binary and return 

445 as a dictionary. 

446 

447 :return: fields of the control section as a dictionary. 

448 """ 

449 stanza = self.read_control() 

450 return apt_pkg.TagSection(stanza) 

451 

452 @property 

453 def proxy(self) -> "MetadataProxy": 

454 session = object_session(self) 

455 query = session.query(BinaryMetadata).filter_by(binary=self) 

456 return MetadataProxy(session, query) 

457 

458 

459__all__.append("DBBinary") 

460 

461 

462@session_wrapper 

463def get_suites_binary_in(package: str, session=None) -> "list[Suite]": 

464 """ 

465 Returns list of Suite objects which given `package` name is in 

466 

467 :param package: DBBinary package name to search for 

468 :return: list of Suite objects for the given package 

469 """ 

470 

471 return ( 

472 session.query(Suite) 

473 .filter(Suite.binaries.any(DBBinary.package == package)) 

474 .all() 

475 ) 

476 

477 

478__all__.append("get_suites_binary_in") 

479 

480 

481@session_wrapper 

482def get_component_by_package_suite( 

483 package: str, suite_list: list[str], arch_list: Optional[str] = None, session=None 

484) -> Optional[str]: 

485 """ 

486 Returns the component name of the newest binary package in suite_list or 

487 None if no package is found. The result can be optionally filtered by a list 

488 of architecture names. 

489 

490 :param package: DBBinary package name to search for 

491 :param suite_list: list of suite_name items 

492 :param arch_list: optional list of arch_string items that defaults to [] 

493 :return: name of component or None 

494 """ 

495 

496 q = ( 

497 session.query(DBBinary) 

498 .filter_by(package=package) 

499 .join(DBBinary.suites) 

500 .filter(Suite.suite_name.in_(suite_list)) 

501 ) 

502 if arch_list: 

503 q = q.join(DBBinary.architecture).filter( 

504 Architecture.arch_string.in_(arch_list) 

505 ) 

506 binary = q.order_by(desc(DBBinary.version)).first() 

507 if binary is None: 

508 return None 

509 else: 

510 return binary.poolfile.component.component_name 

511 

512 

513__all__.append("get_component_by_package_suite") 

514 

515################################################################################ 

516 

517 

518class BuildQueue: 

519 def __init__(self, *args, **kwargs): 

520 pass 

521 

522 def __repr__(self): 

523 return "<BuildQueue %s>" % self.queue_name 

524 

525 

526__all__.append("BuildQueue") 

527 

528################################################################################ 

529 

530 

531class Component(ORMObject): 

532 def __init__(self, component_name=None): 

533 self.component_name = component_name 

534 

535 def __eq__(self, val): 

536 if isinstance(val, str): 536 ↛ 537line 536 didn't jump to line 537, because the condition on line 536 was never true

537 warnings.warn( 

538 "comparison with a `str` is deprecated", 

539 DeprecationWarning, 

540 stacklevel=2, 

541 ) 

542 return self.component_name == val 

543 # This signals to use the normal comparison operator 

544 return NotImplemented 

545 

546 def __ne__(self, val): 

547 if isinstance(val, str): 

548 warnings.warn( 

549 "comparison with a `str` is deprecated", 

550 DeprecationWarning, 

551 stacklevel=2, 

552 ) 

553 return self.component_name != val 

554 # This signals to use the normal comparison operator 

555 return NotImplemented 

556 

557 __hash__ = ORMObject.__hash__ 

558 

559 def properties(self) -> list[str]: 

560 return [ 

561 "component_name", 

562 "component_id", 

563 "description", 

564 "meets_dfsg", 

565 "overrides_count", 

566 ] 

567 

568 

569__all__.append("Component") 

570 

571 

572@session_wrapper 

573def get_component(component: str, session=None) -> Optional[Component]: 

574 """ 

575 Returns database id for given `component`. 

576 

577 :param component: The name of the override type 

578 :return: the database id for the given component 

579 """ 

580 component = component.lower() 

581 

582 q = session.query(Component).filter_by(component_name=component) 

583 

584 return q.one_or_none() 

585 

586 

587__all__.append("get_component") 

588 

589 

590def get_mapped_component_name(component_name): 

591 cnf = Config() 

592 for m in cnf.value_list("ComponentMappings"): 592 ↛ 593line 592 didn't jump to line 593, because the loop on line 592 never started

593 (src, dst) = m.split() 

594 if component_name == src: 

595 component_name = dst 

596 return component_name 

597 

598 

599__all__.append("get_mapped_component_name") 

600 

601 

602@session_wrapper 

603def get_mapped_component(component_name: str, session=None) -> Optional[Component]: 

604 """get component after mappings 

605 

606 Evaluate component mappings from ComponentMappings in dak.conf for the 

607 given component name. 

608 

609 .. todo:: 

610 

611 ansgar wants to get rid of this. It's currently only used for 

612 the security archive 

613 

614 :param component_name: component name 

615 :param session: database session 

616 :return: component after applying maps or :const:`None` 

617 """ 

618 component_name = get_mapped_component_name(component_name) 

619 component = ( 

620 session.query(Component).filter_by(component_name=component_name).first() 

621 ) 

622 return component 

623 

624 

625__all__.append("get_mapped_component") 

626 

627 

628@session_wrapper 

629def get_component_names(session=None) -> list[str]: 

630 """ 

631 Returns list of strings of component names. 

632 

633 :return: list of strings of component names 

634 """ 

635 

636 return [x.component_name for x in session.query(Component).all()] 

637 

638 

639__all__.append("get_component_names") 

640 

641################################################################################ 

642 

643 

644class DBConfig: 

645 def __init__(self, *args, **kwargs): 

646 pass 

647 

648 def __repr__(self): 

649 return "<DBConfig %s>" % self.name 

650 

651 

652__all__.append("DBConfig") 

653 

654################################################################################ 

655 

656 

657class DSCFile: 

658 def __init__(self, *args, **kwargs): 

659 pass 

660 

661 def __repr__(self): 

662 return "<DSCFile %s>" % self.dscfile_id 

663 

664 

665__all__.append("DSCFile") 

666 

667 

668@session_wrapper 

669def get_dscfiles( 

670 dscfile_id: Optional[int] = None, 

671 source_id: Optional[int] = None, 

672 poolfile_id: Optional[int] = None, 

673 session=None, 

674) -> list[DSCFile]: 

675 """ 

676 Returns a list of DSCFiles which may be empty 

677 

678 :param dscfile_id: the dscfile_id of the DSCFiles to find 

679 :param source_id: the source id related to the DSCFiles to find 

680 :param poolfile_id: the poolfile id related to the DSCFiles to find 

681 :return: Possibly empty list of DSCFiles 

682 """ 

683 

684 q = session.query(DSCFile) 

685 

686 if dscfile_id is not None: 

687 q = q.filter_by(dscfile_id=dscfile_id) 

688 

689 if source_id is not None: 

690 q = q.filter_by(source_id=source_id) 

691 

692 if poolfile_id is not None: 

693 q = q.filter_by(poolfile_id=poolfile_id) 

694 

695 return q.all() 

696 

697 

698__all__.append("get_dscfiles") 

699 

700################################################################################ 

701 

702 

703class ExternalOverride(ORMObject): 

704 def __init__(self, *args, **kwargs): 

705 pass 

706 

707 def __repr__(self): 

708 return "<ExternalOverride %s = %s: %s>" % (self.package, self.key, self.value) 

709 

710 

711__all__.append("ExternalOverride") 

712 

713################################################################################ 

714 

715 

716class PoolFile(ORMObject): 

717 def __init__(self, filename=None, filesize=-1, md5sum=None): 

718 self.filename = filename 

719 self.filesize = filesize 

720 self.md5sum = md5sum 

721 

722 @property 

723 def fullpath(self) -> str: 

724 session = DBConn().session().object_session(self) 

725 af = ( 

726 session.query(ArchiveFile) 

727 .join(Archive) 

728 .filter(ArchiveFile.file == self) 

729 .order_by(Archive.tainted.desc()) 

730 .first() 

731 ) 

732 return af.path 

733 

734 @property 

735 def component(self) -> Component: 

736 session = DBConn().session().object_session(self) 

737 component_id = ( 

738 session.query(ArchiveFile.component_id) 

739 .filter(ArchiveFile.file == self) 

740 .group_by(ArchiveFile.component_id) 

741 .one() 

742 ) 

743 return session.query(Component).get(component_id) 

744 

745 @property 

746 def basename(self) -> str: 

747 return os.path.basename(self.filename) 

748 

749 def properties(self) -> list[str]: 

750 return [ 

751 "filename", 

752 "file_id", 

753 "filesize", 

754 "md5sum", 

755 "sha1sum", 

756 "sha256sum", 

757 "source", 

758 "binary", 

759 "last_used", 

760 ] 

761 

762 

763__all__.append("PoolFile") 

764 

765################################################################################ 

766 

767 

768class Fingerprint(ORMObject): 

769 def __init__(self, fingerprint=None): 

770 self.fingerprint = fingerprint 

771 

772 def properties(self) -> list[str]: 

773 return ["fingerprint", "fingerprint_id", "keyring", "uid", "binary_reject"] 

774 

775 

776__all__.append("Fingerprint") 

777 

778 

779@session_wrapper 

780def get_fingerprint(fpr: str, session=None) -> Optional[Fingerprint]: 

781 """ 

782 Returns Fingerprint object for given fpr. 

783 

784 :param fpr: The fpr to find / add 

785 :param session: Optional SQL session object (a temporary one will be 

786 generated if not supplied). 

787 :return: the Fingerprint object for the given fpr or None 

788 """ 

789 

790 q = session.query(Fingerprint).filter_by(fingerprint=fpr) 

791 return q.one_or_none() 

792 

793 

794__all__.append("get_fingerprint") 

795 

796 

797@session_wrapper 

798def get_or_set_fingerprint(fpr: str, session=None) -> Fingerprint: 

799 """ 

800 Returns Fingerprint object for given fpr. 

801 

802 If no matching fpr is found, a row is inserted. 

803 

804 :param fpr: The fpr to find / add 

805 :param session: Optional SQL session object (a temporary one will be 

806 generated if not supplied). If not passed, a commit will be performed at 

807 the end of the function, otherwise the caller is responsible for commiting. 

808 A flush will be performed either way. 

809 :return: the Fingerprint object for the given fpr 

810 """ 

811 

812 q = session.query(Fingerprint).filter_by(fingerprint=fpr) 

813 

814 try: 

815 ret = q.one() 

816 except NoResultFound: 

817 fingerprint = Fingerprint() 

818 fingerprint.fingerprint = fpr 

819 session.add(fingerprint) 

820 session.commit_or_flush() 

821 ret = fingerprint 

822 

823 return ret 

824 

825 

826__all__.append("get_or_set_fingerprint") 

827 

828################################################################################ 

829 

830# Helper routine for Keyring class 

831 

832 

833def get_ldap_name(entry) -> str: 

834 name = [] 

835 for k in ["cn", "mn", "sn"]: 

836 ret = entry.get(k) 

837 if not ret: 

838 continue 

839 value = ret[0].decode() 

840 if value and value[0] != "-": 

841 name.append(value) 

842 return " ".join(name) 

843 

844 

845################################################################################ 

846 

847 

848class Keyring: 

849 keys = {} 

850 fpr_lookup: dict[str, str] = {} 

851 

852 def __init__(self, *args, **kwargs): 

853 pass 

854 

855 def __repr__(self): 

856 return "<Keyring %s>" % self.keyring_name 

857 

858 def de_escape_gpg_str(self, txt: str) -> str: 

859 esclist = re.split(r"(\\x..)", txt) 

860 for x in range(1, len(esclist), 2): 860 ↛ 861line 860 didn't jump to line 861, because the loop on line 860 never started

861 esclist[x] = "%c" % (int(esclist[x][2:], 16)) 

862 return "".join(esclist) 

863 

864 def parse_address(self, uid: str) -> tuple[str, str]: 

865 """parses uid and returns a tuple of real name and email address""" 

866 import email.utils 

867 

868 (name, address) = email.utils.parseaddr(uid) 

869 name = re.sub(r"\s*[(].*[)]", "", name) 

870 name = self.de_escape_gpg_str(name) 

871 if name == "": 

872 name = uid 

873 return (name, address) 

874 

875 def load_keys(self, keyring: str) -> None: 

876 if not self.keyring_id: 876 ↛ 877line 876 didn't jump to line 877, because the condition on line 876 was never true

877 raise Exception("Must be initialized with database information") 

878 

879 cmd = [ 

880 "gpg", 

881 "--no-default-keyring", 

882 "--keyring", 

883 keyring, 

884 "--with-colons", 

885 "--fingerprint", 

886 "--fingerprint", 

887 ] 

888 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 

889 

890 key = None 

891 need_fingerprint = False 

892 

893 for line_raw in p.stdout: 

894 try: 

895 line = line_raw.decode() 

896 except UnicodeDecodeError: 

897 # Some old UIDs might not use UTF-8 encoding. We assume they 

898 # use latin1. 

899 line = line_raw.decode("latin1") 

900 field = line.split(":") 

901 if field[0] == "pub": 

902 key = field[4] 

903 self.keys[key] = {} 

904 (name, addr) = self.parse_address(field[9]) 

905 if "@" in addr: 905 ↛ 906line 905 didn't jump to line 906, because the condition on line 905 was never true

906 self.keys[key]["email"] = addr 

907 self.keys[key]["name"] = name 

908 need_fingerprint = True 

909 elif key and field[0] == "uid": 

910 (name, addr) = self.parse_address(field[9]) 

911 if "email" not in self.keys[key] and "@" in addr: 911 ↛ 893line 911 didn't jump to line 893, because the condition on line 911 was never false

912 self.keys[key]["email"] = addr 

913 self.keys[key]["name"] = name 

914 elif need_fingerprint and field[0] == "fpr": 

915 self.keys[key]["fingerprints"] = [field[9]] 

916 self.fpr_lookup[field[9]] = key 

917 need_fingerprint = False 

918 

919 (out, err) = p.communicate() 

920 r = p.returncode 

921 if r != 0: 921 ↛ 922line 921 didn't jump to line 922, because the condition on line 921 was never true

922 raise daklib.gpg.GpgException( 

923 "command failed: %s\nstdout: %s\nstderr: %s\n" % (cmd, out, err) 

924 ) 

925 

926 def import_users_from_ldap( 

927 self, session 

928 ) -> tuple[dict[str, tuple[int, str]], dict[int, tuple[str, str]]]: 

929 import ldap # type: ignore 

930 

931 from .utils import open_ldap_connection 

932 

933 conn = open_ldap_connection() 

934 cnf = Config() 

935 LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"] 

936 Attrs = conn.search_s( 

937 LDAPDn, 

938 ldap.SCOPE_ONELEVEL, 

939 "(&(keyfingerprint=*)(supplementaryGid=%s))" 

940 % (cnf["Import-Users-From-Passwd::ValidGID"]), 

941 ["uid", "keyfingerprint", "cn", "mn", "sn"], 

942 ) 

943 

944 byuid: dict[int, tuple[str, str]] = {} 

945 byname: dict[str, tuple[int, str]] = {} 

946 

947 for i in Attrs: 

948 entry = i[1] 

949 uid = entry["uid"][0].decode() 

950 name = get_ldap_name(entry) 

951 fingerprints = entry["keyFingerPrint"] 

952 keyid = None 

953 for f_raw in fingerprints: 

954 f = f_raw.decode() 

955 key = self.fpr_lookup.get(f, None) 

956 if key not in self.keys: 

957 continue 

958 self.keys[key]["uid"] = uid 

959 

960 if keyid is not None: 

961 continue 

962 keyid = get_or_set_uid(uid, session).uid_id 

963 byuid[keyid] = (uid, name) 

964 byname[uid] = (keyid, name) 

965 

966 return (byname, byuid) 

967 

968 def generate_users_from_keyring( 

969 self, format: str, session 

970 ) -> tuple[dict[str, tuple[int, str]], dict[int, tuple[str, str]]]: 

971 byuid: dict[int, tuple[str, str]] = {} 

972 byname: dict[str, tuple[int, str]] = {} 

973 any_invalid = False 

974 for x in list(self.keys.keys()): 

975 if "email" not in self.keys[x]: 975 ↛ 976line 975 didn't jump to line 976, because the condition on line 975 was never true

976 any_invalid = True 

977 self.keys[x]["uid"] = format % "invalid-uid" 

978 else: 

979 uid = format % self.keys[x]["email"] 

980 keyid = get_or_set_uid(uid, session).uid_id 

981 byuid[keyid] = (uid, self.keys[x]["name"]) 

982 byname[uid] = (keyid, self.keys[x]["name"]) 

983 self.keys[x]["uid"] = uid 

984 

985 if any_invalid: 985 ↛ 986line 985 didn't jump to line 986, because the condition on line 985 was never true

986 uid = format % "invalid-uid" 

987 keyid = get_or_set_uid(uid, session).uid_id 

988 byuid[keyid] = (uid, "ungeneratable user id") 

989 byname[uid] = (keyid, "ungeneratable user id") 

990 

991 return (byname, byuid) 

992 

993 

994__all__.append("Keyring") 

995 

996 

997@session_wrapper 

998def get_keyring(keyring: str, session=None) -> Optional[Keyring]: 

999 """ 

1000 If `keyring` does not have an entry in the `keyrings` table yet, return None 

1001 If `keyring` already has an entry, simply return the existing :class:`Keyring` 

1002 

1003 :param keyring: the keyring name 

1004 :return: the :class:`Keyring` object for this keyring 

1005 """ 

1006 

1007 q = session.query(Keyring).filter_by(keyring_name=keyring) 

1008 return q.one_or_none() 

1009 

1010 

1011__all__.append("get_keyring") 

1012 

1013 

1014@session_wrapper 

1015def get_active_keyring_paths(session=None) -> list[str]: 

1016 """ 

1017 :return: list of active keyring paths 

1018 """ 

1019 return [ 

1020 x.keyring_name 

1021 for x in session.query(Keyring) 

1022 .filter(Keyring.active == True) # noqa:E712 

1023 .order_by(desc(Keyring.priority)) 

1024 .all() 

1025 ] 

1026 

1027 

1028__all__.append("get_active_keyring_paths") 

1029 

1030################################################################################ 

1031 

1032 

1033class DBChange: 

1034 def __init__(self, *args, **kwargs): 

1035 pass 

1036 

1037 def __repr__(self): 

1038 return "<DBChange %s>" % self.changesname 

1039 

1040 

1041__all__.append("DBChange") 

1042 

1043 

1044@session_wrapper 

1045def get_dbchange(filename: str, session=None) -> Optional[DBChange]: 

1046 """ 

1047 returns DBChange object for given `filename`. 

1048 

1049 :param filename: the name of the file 

1050 :param session: Optional SQLA session object (a temporary one will be 

1051 generated if not supplied) 

1052 :return: DBChange object for the given filename (:const:`None` if not present) 

1053 """ 

1054 q = session.query(DBChange).filter_by(changesname=filename) 

1055 return q.one_or_none() 

1056 

1057 

1058__all__.append("get_dbchange") 

1059 

1060################################################################################ 

1061 

1062 

1063class DBChangelog: 

1064 def __init__(self, *args, **kwargs): 

1065 pass 

1066 

1067 def __repr__(self): 

1068 return "<DBChangelog %s>" % self.id 

1069 

1070 

1071__all__.append("DBChangelog") 

1072 

1073################################################################################ 

1074 

1075 

1076class Maintainer(ORMObject): 

1077 def __init__(self, name=None): 

1078 self.name = name 

1079 

1080 def properties(self) -> list[str]: 

1081 return ["name", "maintainer_id"] 

1082 

1083 def get_split_maintainer(self) -> tuple[str, str, str, str]: 

1084 if not hasattr(self, "name") or self.name is None: 

1085 return ("", "", "", "") 

1086 

1087 return fix_maintainer(self.name.strip()) 

1088 

1089 

1090__all__.append("Maintainer") 

1091 

1092 

1093@session_wrapper 

1094def get_or_set_maintainer(name: str, session=None) -> Maintainer: 

1095 """ 

1096 Returns Maintainer object for given maintainer name. 

1097 

1098 If no matching maintainer name is found, a row is inserted. 

1099 

1100 :param name: The maintainer name to add 

1101 :param session: Optional SQL session object (a temporary one will be 

1102 generated if not supplied). If not passed, a commit will be performed at 

1103 the end of the function, otherwise the caller is responsible for commiting. 

1104 A flush will be performed either way. 

1105 :return: the Maintainer object for the given maintainer 

1106 """ 

1107 

1108 q = session.query(Maintainer).filter_by(name=name) 

1109 try: 

1110 ret = q.one() 

1111 except NoResultFound: 

1112 maintainer = Maintainer() 

1113 maintainer.name = name 

1114 session.add(maintainer) 

1115 session.commit_or_flush() 

1116 ret = maintainer 

1117 

1118 return ret 

1119 

1120 

1121__all__.append("get_or_set_maintainer") 

1122 

1123 

1124@session_wrapper 

1125def get_maintainer(maintainer_id: int, session=None) -> Optional[Maintainer]: 

1126 """ 

1127 Return the name of the maintainer behind `maintainer_id` or :const:`None` 

1128 if that `maintainer_id` is invalid. 

1129 

1130 :param maintainer_id: the id of the maintainer 

1131 :return: the Maintainer with this `maintainer_id` 

1132 """ 

1133 

1134 return session.query(Maintainer).get(maintainer_id) 

1135 

1136 

1137__all__.append("get_maintainer") 

1138 

1139################################################################################ 

1140 

1141 

1142class NewComment: 

1143 def __init__(self, *args, **kwargs): 

1144 pass 

1145 

1146 def __repr__(self): 

1147 return """<NewComment for '%s %s' (%s)>""" % ( 

1148 self.package, 

1149 self.version, 

1150 self.comment_id, 

1151 ) 

1152 

1153 

1154__all__.append("NewComment") 

1155 

1156 

1157@session_wrapper 

1158def has_new_comment( 

1159 policy_queue: "PolicyQueue", package: str, version: str, session=None 

1160) -> bool: 

1161 """ 

1162 Returns :const:`True` if the given combination of `package`, `version` has a comment. 

1163 

1164 :param package: name of the package 

1165 :param version: package version 

1166 :param session: Optional SQLA session object (a temporary one will be 

1167 generated if not supplied) 

1168 """ 

1169 

1170 q = session.query(NewComment).filter_by(policy_queue=policy_queue) 

1171 q = q.filter_by(package=package) 

1172 q = q.filter_by(version=version) 

1173 

1174 return bool(q.count() > 0) 

1175 

1176 

1177__all__.append("has_new_comment") 

1178 

1179 

1180@session_wrapper 

1181def get_new_comments( 

1182 policy_queue: "PolicyQueue", 

1183 package: Optional[str] = None, 

1184 version: Optional[str] = None, 

1185 comment_id: Optional[int] = None, 

1186 session=None, 

1187) -> list[NewComment]: 

1188 """ 

1189 Returns (possibly empty) list of NewComment objects for the given 

1190 parameters 

1191 

1192 :param package: name of the package 

1193 :param version: package version 

1194 :param comment_id: An id of a comment 

1195 :param session: Optional SQLA session object (a temporary one will be 

1196 generated if not supplied) 

1197 :return: A (possibly empty) list of NewComment objects will be returned 

1198 """ 

1199 

1200 q = session.query(NewComment).filter_by(policy_queue=policy_queue) 

1201 if package is not None: 1201 ↛ 1203line 1201 didn't jump to line 1203, because the condition on line 1201 was never false

1202 q = q.filter_by(package=package) 

1203 if version is not None: 1203 ↛ 1204line 1203 didn't jump to line 1204, because the condition on line 1203 was never true

1204 q = q.filter_by(version=version) 

1205 if comment_id is not None: 1205 ↛ 1206line 1205 didn't jump to line 1206, because the condition on line 1205 was never true

1206 q = q.filter_by(comment_id=comment_id) 

1207 

1208 return q.all() 

1209 

1210 

1211__all__.append("get_new_comments") 

1212 

1213################################################################################ 

1214 

1215 

1216class Override(ORMObject): 

1217 def __init__( 

1218 self, 

1219 package=None, 

1220 suite=None, 

1221 component=None, 

1222 overridetype=None, 

1223 section=None, 

1224 priority=None, 

1225 ): 

1226 self.package = package 

1227 self.suite = suite 

1228 self.component = component 

1229 self.overridetype = overridetype 

1230 self.section = section 

1231 self.priority = priority 

1232 

1233 def properties(self) -> list[str]: 

1234 return ["package", "suite", "component", "overridetype", "section", "priority"] 

1235 

1236 

1237__all__.append("Override") 

1238 

1239 

1240@session_wrapper 

1241def get_override( 

1242 package: str, 

1243 suite: Union[str, list[str], None] = None, 

1244 component: Union[str, list[str], None] = None, 

1245 overridetype: Union[str, list[str], None] = None, 

1246 session=None, 

1247) -> list[Override]: 

1248 """ 

1249 Returns Override object for the given parameters 

1250 

1251 :param package: The name of the package 

1252 :param suite: The name of the suite (or suites if a list) to limit to. If 

1253 None, don't limit. Defaults to None. 

1254 :param component: The name of the component (or components if a list) to 

1255 limit to. If None, don't limit. Defaults to None. 

1256 :param overridetype: The name of the overridetype (or overridetypes if a list) to 

1257 limit to. If None, don't limit. Defaults to None. 

1258 :param session: Optional SQLA session object (a temporary one will be 

1259 generated if not supplied) 

1260 :return: A (possibly empty) list of Override objects will be returned 

1261 """ 

1262 

1263 q = session.query(Override) 

1264 q = q.filter_by(package=package) 

1265 

1266 if suite is not None: 

1267 if not isinstance(suite, list): 

1268 suite = [suite] 

1269 q = q.join(Suite).filter(Suite.suite_name.in_(suite)) 

1270 

1271 if component is not None: 

1272 if not isinstance(component, list): 

1273 component = [component] 

1274 q = q.join(Component).filter(Component.component_name.in_(component)) 

1275 

1276 if overridetype is not None: 

1277 if not isinstance(overridetype, list): 

1278 overridetype = [overridetype] 

1279 q = q.join(OverrideType).filter(OverrideType.overridetype.in_(overridetype)) 

1280 

1281 return q.all() 

1282 

1283 

1284__all__.append("get_override") 

1285 

1286 

1287################################################################################ 

1288 

1289 

1290class OverrideType(ORMObject): 

1291 def __init__(self, overridetype=None): 

1292 self.overridetype = overridetype 

1293 

1294 def properties(self) -> list[str]: 

1295 return ["overridetype", "overridetype_id", "overrides_count"] 

1296 

1297 

1298__all__.append("OverrideType") 

1299 

1300 

1301@session_wrapper 

1302def get_override_type(override_type: str, session=None) -> Optional[OverrideType]: 

1303 """ 

1304 Returns OverrideType object for given `override_type`. 

1305 

1306 :param override_type: The name of the override type 

1307 :param session: Optional SQLA session object (a temporary one will be 

1308 generated if not supplied) 

1309 :return: the database id for the given override type 

1310 """ 

1311 

1312 q = session.query(OverrideType).filter_by(overridetype=override_type) 

1313 return q.one_or_none() 

1314 

1315 

1316__all__.append("get_override_type") 

1317 

1318################################################################################ 

1319 

1320 

1321class PolicyQueue: 

1322 def __init__(self, *args, **kwargs): 

1323 pass 

1324 

1325 def __repr__(self): 

1326 return "<PolicyQueue %s>" % self.queue_name 

1327 

1328 

1329__all__.append("PolicyQueue") 

1330 

1331 

1332@session_wrapper 

1333def get_policy_queue(queuename: str, session=None) -> Optional[PolicyQueue]: 

1334 """ 

1335 Returns PolicyQueue object for given `queuename` 

1336 

1337 :param queuename: The name of the queue 

1338 :param session: Optional SQLA session object (a temporary one will be 

1339 generated if not supplied) 

1340 :return: PolicyQueue object for the given queue 

1341 """ 

1342 

1343 q = session.query(PolicyQueue).filter_by(queue_name=queuename) 

1344 return q.one_or_none() 

1345 

1346 

1347__all__.append("get_policy_queue") 

1348 

1349################################################################################ 

1350 

1351 

1352@functools.total_ordering 

1353class PolicyQueueUpload: 

1354 def _key(self): 

1355 return ( 

1356 self.changes.source, 

1357 AptVersion(self.changes.version), 

1358 self.source is None, 

1359 self.changes.changesname, 

1360 ) 

1361 

1362 def __eq__(self, other: object) -> bool: 

1363 if not isinstance(other, PolicyQueueUpload): 1363 ↛ 1364line 1363 didn't jump to line 1364, because the condition on line 1363 was never true

1364 return NotImplemented 

1365 return self._key() == other._key() 

1366 

1367 def __lt__(self, other): 

1368 return self._key() < other._key() 

1369 

1370 

1371__all__.append("PolicyQueueUpload") 

1372 

1373################################################################################ 

1374 

1375 

1376class PolicyQueueByhandFile: 

1377 pass 

1378 

1379 

1380__all__.append("PolicyQueueByhandFile") 

1381 

1382################################################################################ 

1383 

1384 

1385class Priority(ORMObject): 

1386 def __init__(self, priority=None, level=None): 

1387 self.priority = priority 

1388 self.level = level 

1389 

1390 def properties(self) -> list[str]: 

1391 return ["priority", "priority_id", "level", "overrides_count"] 

1392 

1393 def __eq__(self, val): 

1394 if isinstance(val, str): 

1395 warnings.warn( 

1396 "comparison with a `str` is deprecated", 

1397 DeprecationWarning, 

1398 stacklevel=2, 

1399 ) 

1400 return self.priority == val 

1401 # This signals to use the normal comparison operator 

1402 return NotImplemented 

1403 

1404 def __ne__(self, val): 

1405 if isinstance(val, str): 1405 ↛ 1413line 1405 didn't jump to line 1413, because the condition on line 1405 was never false

1406 warnings.warn( 

1407 "comparison with a `str` is deprecated", 

1408 DeprecationWarning, 

1409 stacklevel=2, 

1410 ) 

1411 return self.priority != val 

1412 # This signals to use the normal comparison operator 

1413 return NotImplemented 

1414 

1415 __hash__ = ORMObject.__hash__ 

1416 

1417 

1418__all__.append("Priority") 

1419 

1420 

1421@session_wrapper 

1422def get_priority(priority: str, session=None) -> Optional[Priority]: 

1423 """ 

1424 Returns Priority object for given `priority` name. 

1425 

1426 :param priority: The name of the priority 

1427 :param session: Optional SQLA session object (a temporary one will be 

1428 generated if not supplied) 

1429 :return: Priority object for the given priority 

1430 """ 

1431 

1432 q = session.query(Priority).filter_by(priority=priority) 

1433 return q.one_or_none() 

1434 

1435 

1436__all__.append("get_priority") 

1437 

1438 

1439@session_wrapper 

1440def get_priorities(session=None) -> dict[str, int]: 

1441 """ 

1442 Returns dictionary of priority names -> id mappings 

1443 

1444 :param session: Optional SQL session object (a temporary one will be 

1445 generated if not supplied) 

1446 :return: dictionary of priority names -> id mappings 

1447 """ 

1448 

1449 ret = {} 

1450 q = session.query(Priority) 

1451 for x in q.all(): 

1452 ret[x.priority] = x.priority_id 

1453 

1454 return ret 

1455 

1456 

1457__all__.append("get_priorities") 

1458 

1459################################################################################ 

1460 

1461 

1462from .database.section import Section 

1463 

1464__all__.append("Section") 

1465 

1466 

1467@session_wrapper 

1468def get_section(section: str, session=None) -> Optional[Section]: 

1469 """ 

1470 Returns Section object for given `section` name. 

1471 

1472 :param section: The name of the section 

1473 :param session: Optional SQLA session object (a temporary one will be 

1474 generated if not supplied) 

1475 :return: Section object for the given section name 

1476 """ 

1477 

1478 q = session.query(Section).filter_by(section=section) 

1479 return q.one_or_none() 

1480 

1481 

1482__all__.append("get_section") 

1483 

1484 

1485@session_wrapper 

1486def get_sections(session=None) -> dict[str, int]: 

1487 """ 

1488 Returns dictionary of section names -> id mappings 

1489 

1490 :param session: Optional SQL session object (a temporary one will be 

1491 generated if not supplied) 

1492 :return: dictionary of section names -> id mappings 

1493 """ 

1494 

1495 ret = {} 

1496 q = session.query(Section) 

1497 for x in q.all(): 

1498 ret[x.section] = x.section_id 

1499 

1500 return ret 

1501 

1502 

1503__all__.append("get_sections") 

1504 

1505################################################################################ 

1506 

1507 

1508class SignatureHistory(ORMObject): 

1509 @classmethod 

1510 def from_signed_file( 

1511 cls, signed_file: "daklib.gpg.SignedFile" 

1512 ) -> "SignatureHistory": 

1513 """signature history entry from signed file 

1514 

1515 :param signed_file: signed file 

1516 """ 

1517 self = cls() 

1518 self.fingerprint = signed_file.primary_fingerprint 

1519 self.signature_timestamp = signed_file.signature_timestamp 

1520 self.contents_sha1 = signed_file.contents_sha1 

1521 return self 

1522 

1523 def query(self, session): 

1524 return ( 

1525 session.query(SignatureHistory) 

1526 .filter_by( 

1527 fingerprint=self.fingerprint, 

1528 signature_timestamp=self.signature_timestamp, 

1529 contents_sha1=self.contents_sha1, 

1530 ) 

1531 .first() 

1532 ) 

1533 

1534 

1535__all__.append("SignatureHistory") 

1536 

1537################################################################################ 

1538 

1539 

1540class SrcContents(ORMObject): 

1541 def __init__(self, file=None, source=None): 

1542 self.file = file 

1543 self.source = source 

1544 

1545 def properties(self) -> list[str]: 

1546 return ["file", "source"] 

1547 

1548 

1549__all__.append("SrcContents") 

1550 

1551################################################################################ 

1552 

1553 

1554class DBSource(ORMObject): 

1555 def __init__( 

1556 self, 

1557 source=None, 

1558 version=None, 

1559 maintainer=None, 

1560 changedby=None, 

1561 poolfile=None, 

1562 install_date=None, 

1563 fingerprint=None, 

1564 ): 

1565 self.source = source 

1566 self.version = version 

1567 self.maintainer = maintainer 

1568 self.changedby = changedby 

1569 self.poolfile = poolfile 

1570 self.install_date = install_date 

1571 self.fingerprint = fingerprint 

1572 

1573 @property 

1574 def pkid(self) -> int: 

1575 return self.source_id 

1576 

1577 @property 

1578 def name(self) -> str: 

1579 return self.source 

1580 

1581 @property 

1582 def arch_string(self) -> str: 

1583 return "source" 

1584 

1585 def properties(self) -> list[str]: 

1586 return [ 

1587 "source", 

1588 "source_id", 

1589 "maintainer", 

1590 "changedby", 

1591 "fingerprint", 

1592 "poolfile", 

1593 "version", 

1594 "suites_count", 

1595 "install_date", 

1596 "binaries_count", 

1597 "uploaders_count", 

1598 ] 

1599 

1600 def read_control_fields(self) -> Deb822: 

1601 """ 

1602 Reads the control information from a dsc 

1603 

1604 :return: fields is the dsc information in a dictionary form 

1605 """ 

1606 with open(self.poolfile.fullpath, "r") as fd: 

1607 fields = Deb822(fd) 

1608 return fields 

1609 

1610 metadata = association_proxy("key", "value") 

1611 

1612 def scan_contents(self) -> set[str]: 

1613 """ 

1614 Returns a set of names for non directories. The path names are 

1615 normalized after converting them from either utf-8 or iso8859-1 

1616 encoding. 

1617 """ 

1618 fullpath = self.poolfile.fullpath 

1619 from daklib.contents import UnpackedSource 

1620 

1621 unpacked = UnpackedSource(fullpath) 

1622 fileset = set() 

1623 for name in unpacked.get_all_filenames(): 

1624 fileset.add(name) 

1625 return fileset 

1626 

1627 @property 

1628 def proxy(self) -> "MetadataProxy": 

1629 session = object_session(self) 

1630 query = session.query(SourceMetadata).filter_by(source=self) 

1631 return MetadataProxy(session, query) 

1632 

1633 

1634__all__.append("DBSource") 

1635 

1636 

1637@session_wrapper 

1638def get_suites_source_in(source: str, session=None) -> "list[Suite]": 

1639 """ 

1640 Returns list of Suite objects which given `source` name is in 

1641 

1642 :param source: DBSource package name to search for 

1643 :return: list of Suite objects for the given source 

1644 """ 

1645 

1646 return session.query(Suite).filter(Suite.sources.any(source=source)).all() 

1647 

1648 

1649__all__.append("get_suites_source_in") 

1650 

1651# FIXME: This function fails badly if it finds more than 1 source package and 

1652# its implementation is trivial enough to be inlined. 

1653 

1654 

1655@session_wrapper 

1656def get_source_in_suite( 

1657 source: str, suite_name: Optional[str], session=None 

1658) -> Optional[DBSource]: 

1659 """ 

1660 Returns a DBSource object for a combination of `source` and `suite_name`. 

1661 

1662 :param source: source package name 

1663 :param suite_name: the suite name 

1664 :return: the version for `source` in `suite` 

1665 """ 

1666 suite = get_suite(suite_name, session) 

1667 if suite is None: 1667 ↛ 1668line 1667 didn't jump to line 1668, because the condition on line 1667 was never true

1668 return None 

1669 return suite.get_sources(source).one_or_none() 

1670 

1671 

1672__all__.append("get_source_in_suite") 

1673 

1674 

1675@session_wrapper 

1676def import_metadata_into_db(obj: Union[DBBinary, DBSource], session=None) -> None: 

1677 """ 

1678 This routine works on either DBBinary or DBSource objects and imports 

1679 their metadata into the database 

1680 """ 

1681 fields = obj.read_control_fields() 

1682 for k in fields.keys(): 

1683 try: 

1684 # Try raw ASCII 

1685 val = str(fields[k]) 

1686 except UnicodeEncodeError: 

1687 # Fall back to UTF-8 

1688 try: 

1689 val = fields[k].encode("utf-8") 

1690 except UnicodeEncodeError: 

1691 # Finally try iso8859-1 

1692 val = fields[k].encode("iso8859-1") 

1693 # Otherwise we allow the exception to percolate up and we cause 

1694 # a reject as someone is playing silly buggers 

1695 

1696 obj.metadata[get_or_set_metadatakey(k, session)] = val 

1697 

1698 session.commit_or_flush() 

1699 

1700 

1701__all__.append("import_metadata_into_db") 

1702 

1703################################################################################ 

1704 

1705 

1706class SrcFormat: 

1707 def __init__(self, *args, **kwargs): 

1708 pass 

1709 

1710 def __repr__(self): 

1711 return "<SrcFormat %s>" % (self.format_name) 

1712 

1713 

1714__all__.append("SrcFormat") 

1715 

1716################################################################################ 

1717 

1718SUITE_FIELDS = [ 

1719 ("SuiteName", "suite_name"), 

1720 ("SuiteID", "suite_id"), 

1721 ("Version", "version"), 

1722 ("Origin", "origin"), 

1723 ("Label", "label"), 

1724 ("Description", "description"), 

1725 ("Untouchable", "untouchable"), 

1726 ("Announce", "announce"), 

1727 ("Codename", "codename"), 

1728 ("OverrideCodename", "overridecodename"), 

1729 ("ValidTime", "validtime"), 

1730 ("Priority", "priority"), 

1731 ("NotAutomatic", "notautomatic"), 

1732 ("CopyChanges", "copychanges"), 

1733 ("OverrideSuite", "overridesuite"), 

1734] 

1735 

1736# Why the heck don't we have any UNIQUE constraints in table suite? 

1737# TODO: Add UNIQUE constraints for appropriate columns. 

1738 

1739 

1740class Suite(ORMObject): 

1741 def __init__(self, suite_name=None, version=None): 

1742 self.suite_name = suite_name 

1743 self.version = version 

1744 

1745 def properties(self) -> list[str]: 

1746 return [ 

1747 "suite_name", 

1748 "version", 

1749 "sources_count", 

1750 "binaries_count", 

1751 "overrides_count", 

1752 ] 

1753 

1754 def __eq__(self, val): 

1755 if isinstance(val, str): 1755 ↛ 1756line 1755 didn't jump to line 1756, because the condition on line 1755 was never true

1756 warnings.warn( 

1757 "comparison with a `str` is deprecated", 

1758 DeprecationWarning, 

1759 stacklevel=2, 

1760 ) 

1761 return self.suite_name == val 

1762 # This signals to use the normal comparison operator 

1763 return NotImplemented 

1764 

1765 def __ne__(self, val): 

1766 if isinstance(val, str): 1766 ↛ 1767line 1766 didn't jump to line 1767, because the condition on line 1766 was never true

1767 warnings.warn( 

1768 "comparison with a `str` is deprecated", 

1769 DeprecationWarning, 

1770 stacklevel=2, 

1771 ) 

1772 return self.suite_name != val 

1773 # This signals to use the normal comparison operator 

1774 return NotImplemented 

1775 

1776 __hash__ = ORMObject.__hash__ 

1777 

1778 def details(self) -> str: 

1779 ret = [] 

1780 for disp, field in SUITE_FIELDS: 

1781 val = getattr(self, field, None) 

1782 if val is not None: 

1783 ret.append("%s: %s" % (disp, val)) 

1784 

1785 return "\n".join(ret) 

1786 

1787 def get_architectures( 

1788 self, skipsrc: bool = False, skipall: bool = False 

1789 ) -> list[Architecture]: 

1790 """ 

1791 Returns list of Architecture objects 

1792 

1793 :param skipsrc: Whether to skip returning the 'source' architecture entry 

1794 :param skipall: Whether to skip returning the 'all' architecture entry 

1795 :return: list of Architecture objects for the given name (may be empty) 

1796 """ 

1797 

1798 q = object_session(self).query(Architecture).with_parent(self) 

1799 if skipsrc: 

1800 q = q.filter(Architecture.arch_string != "source") 

1801 if skipall: 

1802 q = q.filter(Architecture.arch_string != "all") 

1803 return q.order_by(Architecture.arch_string).all() 

1804 

1805 def get_sources(self, source: str) -> sqlalchemy.orm.query.Query: 

1806 """ 

1807 Returns a query object representing DBSource that is part of this suite. 

1808 

1809 :param source: source package name 

1810 :return: a query of DBSource 

1811 """ 

1812 

1813 session = object_session(self) 

1814 return session.query(DBSource).filter_by(source=source).with_parent(self) 

1815 

1816 def get_overridesuite(self) -> "Suite": 

1817 if self.overridesuite is None: 

1818 return self 

1819 else: 

1820 return ( 

1821 object_session(self) 

1822 .query(Suite) 

1823 .filter_by(suite_name=self.overridesuite) 

1824 .one() 

1825 ) 

1826 

1827 def update_last_changed(self) -> None: 

1828 self.last_changed = sqlalchemy.func.now() 

1829 

1830 @property 

1831 def path(self) -> str: 

1832 return os.path.join(self.archive.path, "dists", self.suite_name) 

1833 

1834 @property 

1835 def release_suite_output(self) -> str: 

1836 if self.release_suite is not None: 1836 ↛ 1837line 1836 didn't jump to line 1837, because the condition on line 1836 was never true

1837 return self.release_suite 

1838 return self.suite_name 

1839 

1840 

1841__all__.append("Suite") 

1842 

1843 

1844@session_wrapper 

1845def get_suite(suite: str, session=None) -> Optional[Suite]: 

1846 """ 

1847 Returns Suite object for given `suite` name. 

1848 

1849 :param suite: The name of the suite 

1850 :param session: Optional SQLA session object (a temporary one will be 

1851 generated if not supplied) 

1852 :return: Suite object for the requested suite name (None if not present) 

1853 """ 

1854 

1855 # Start by looking for the dak internal name 

1856 q = session.query(Suite).filter_by(suite_name=suite) 

1857 try: 

1858 return q.one() 

1859 except NoResultFound: 

1860 pass 

1861 

1862 # Now try codename 

1863 q = session.query(Suite).filter_by(codename=suite) 

1864 try: 

1865 return q.one() 

1866 except NoResultFound: 

1867 pass 

1868 

1869 # Finally give release_suite a try 

1870 q = session.query(Suite).filter_by(release_suite=suite) 

1871 return q.one_or_none() 

1872 

1873 

1874__all__.append("get_suite") 

1875 

1876################################################################################ 

1877 

1878 

1879@session_wrapper 

1880def get_suite_architectures( 

1881 suite: str, skipsrc: bool = False, skipall: bool = False, session=None 

1882) -> list[Architecture]: 

1883 """ 

1884 Returns list of Architecture objects for given `suite` name. The list is 

1885 empty if `suite` does not exist. 

1886 

1887 :param suite: Suite name to search for 

1888 :param skipsrc: Whether to skip returning the 'source' architecture entry 

1889 :param skipall: Whether to skip returning the 'all' architecture entry 

1890 :param session: Optional SQL session object (a temporary one will be 

1891 generated if not supplied) 

1892 :return: list of Architecture objects for the given name (may be empty) 

1893 """ 

1894 

1895 try: 

1896 return get_suite(suite, session).get_architectures(skipsrc, skipall) 

1897 except AttributeError: 

1898 return [] 

1899 

1900 

1901__all__.append("get_suite_architectures") 

1902 

1903################################################################################ 

1904 

1905 

1906class Uid(ORMObject): 

1907 def __init__(self, uid=None, name=None): 

1908 self.uid = uid 

1909 self.name = name 

1910 

1911 def __eq__(self, val): 

1912 if isinstance(val, str): 

1913 warnings.warn( 

1914 "comparison with a `str` is deprecated", 

1915 DeprecationWarning, 

1916 stacklevel=2, 

1917 ) 

1918 return self.uid == val 

1919 # This signals to use the normal comparison operator 

1920 return NotImplemented 

1921 

1922 def __ne__(self, val): 

1923 if isinstance(val, str): 

1924 warnings.warn( 

1925 "comparison with a `str` is deprecated", 

1926 DeprecationWarning, 

1927 stacklevel=2, 

1928 ) 

1929 return self.uid != val 

1930 # This signals to use the normal comparison operator 

1931 return NotImplemented 

1932 

1933 __hash__ = ORMObject.__hash__ 

1934 

1935 def properties(self) -> list[str]: 

1936 return ["uid", "name", "fingerprint"] 

1937 

1938 

1939__all__.append("Uid") 

1940 

1941 

1942@session_wrapper 

1943def get_or_set_uid(uidname: str, session=None) -> Uid: 

1944 """ 

1945 Returns uid object for given uidname. 

1946 

1947 If no matching uidname is found, a row is inserted. 

1948 

1949 :param uidname: The uid to add 

1950 :param session: Optional SQL session object (a temporary one will be 

1951 generated if not supplied). If not passed, a commit will be performed at 

1952 the end of the function, otherwise the caller is responsible for commiting. 

1953 :return: the uid object for the given uidname 

1954 """ 

1955 

1956 q = session.query(Uid).filter_by(uid=uidname) 

1957 

1958 try: 

1959 ret = q.one() 

1960 except NoResultFound: 

1961 uid = Uid() 

1962 uid.uid = uidname 

1963 session.add(uid) 

1964 session.commit_or_flush() 

1965 ret = uid 

1966 

1967 return ret 

1968 

1969 

1970__all__.append("get_or_set_uid") 

1971 

1972 

1973@session_wrapper 

1974def get_uid_from_fingerprint(fpr: str, session=None) -> Optional[Uid]: 

1975 q = session.query(Uid) 

1976 q = q.join(Fingerprint).filter_by(fingerprint=fpr) 

1977 

1978 return q.one_or_none() 

1979 

1980 

1981__all__.append("get_uid_from_fingerprint") 

1982 

1983################################################################################ 

1984 

1985 

1986class MetadataKey(ORMObject): 

1987 def __init__(self, key=None): 

1988 self.key = key 

1989 

1990 def properties(self) -> list[str]: 

1991 return ["key"] 

1992 

1993 

1994__all__.append("MetadataKey") 

1995 

1996 

1997@session_wrapper 

1998def get_or_set_metadatakey(keyname: str, session=None) -> MetadataKey: 

1999 """ 

2000 Returns MetadataKey object for given uidname. 

2001 

2002 If no matching keyname is found, a row is inserted. 

2003 

2004 :param keyname: The keyname to add 

2005 :param session: Optional SQL session object (a temporary one will be 

2006 generated if not supplied). If not passed, a commit will be performed at 

2007 the end of the function, otherwise the caller is responsible for commiting. 

2008 :return: the metadatakey object for the given keyname 

2009 """ 

2010 

2011 q = session.query(MetadataKey).filter_by(key=keyname) 

2012 

2013 try: 

2014 ret = q.one() 

2015 except NoResultFound: 

2016 ret = MetadataKey(keyname) 

2017 session.add(ret) 

2018 session.commit_or_flush() 

2019 

2020 return ret 

2021 

2022 

2023__all__.append("get_or_set_metadatakey") 

2024 

2025################################################################################ 

2026 

2027 

2028class BinaryMetadata(ORMObject): 

2029 def __init__(self, key=None, value=None, binary=None): 

2030 self.key = key 

2031 self.value = value 

2032 if binary is not None: 

2033 self.binary = binary 

2034 

2035 def properties(self) -> list[str]: 

2036 return ["binary", "key", "value"] 

2037 

2038 

2039__all__.append("BinaryMetadata") 

2040 

2041################################################################################ 

2042 

2043 

2044class SourceMetadata(ORMObject): 

2045 def __init__(self, key=None, value=None, source=None): 

2046 self.key = key 

2047 self.value = value 

2048 if source is not None: 

2049 self.source = source 

2050 

2051 def properties(self) -> list[str]: 

2052 return ["source", "key", "value"] 

2053 

2054 

2055__all__.append("SourceMetadata") 

2056 

2057################################################################################ 

2058 

2059 

2060class MetadataProxy: 

2061 def __init__(self, session, query): 

2062 self.session = session 

2063 self.query = query 

2064 

2065 def _get(self, key): 

2066 metadata_key = self.session.query(MetadataKey).filter_by(key=key).first() 

2067 if metadata_key is None: 

2068 return None 

2069 metadata = self.query.filter_by(key=metadata_key).first() 

2070 return metadata 

2071 

2072 def __contains__(self, key: str) -> bool: 

2073 if self._get(key) is not None: 2073 ↛ 2075line 2073 didn't jump to line 2075, because the condition on line 2073 was never false

2074 return True 

2075 return False 

2076 

2077 def __getitem__(self, key: str) -> str: 

2078 metadata = self._get(key) 

2079 if metadata is None: 

2080 raise KeyError 

2081 return metadata.value 

2082 

2083 def get(self, key: str, default: Optional[str] = None) -> Optional[str]: 

2084 try: 

2085 return self[key] 

2086 except KeyError: 

2087 return default 

2088 

2089 

2090################################################################################ 

2091 

2092 

2093class VersionCheck(ORMObject): 

2094 def __init__(self, *args, **kwargs): 

2095 pass 

2096 

2097 def properties(self) -> list[str]: 

2098 return ["check"] 

2099 

2100 

2101__all__.append("VersionCheck") 

2102 

2103 

2104@session_wrapper 

2105def get_version_checks( 

2106 suite_name: str, check: Optional[str] = None, session=None 

2107) -> list[VersionCheck]: 

2108 suite = get_suite(suite_name, session) 

2109 if not suite: 2109 ↛ 2112line 2109 didn't jump to line 2112, because the condition on line 2109 was never true

2110 # Make sure that what we return is iterable so that list comprehensions 

2111 # involving this don't cause a traceback 

2112 return [] 

2113 q = session.query(VersionCheck).filter_by(suite=suite) 

2114 if check: 2114 ↛ 2116line 2114 didn't jump to line 2116, because the condition on line 2114 was never false

2115 q = q.filter_by(check=check) 

2116 return q.all() 

2117 

2118 

2119__all__.append("get_version_checks") 

2120 

2121################################################################################ 

2122 

2123 

2124class DBConn: 

2125 """ 

2126 database module init. 

2127 """ 

2128 

2129 __shared_state = {} 

2130 

2131 db_meta = None 

2132 

2133 tbl_architecture = Architecture.__table__ 

2134 

2135 tables = ( 

2136 "acl", 

2137 "acl_architecture_map", 

2138 "acl_fingerprint_map", 

2139 "acl_per_source", 

2140 "acl_per_suite", 

2141 "archive", 

2142 "bin_associations", 

2143 "bin_contents", 

2144 "binaries", 

2145 "binaries_metadata", 

2146 "build_queue", 

2147 "changelogs_text", 

2148 "changes", 

2149 "component", 

2150 "component_suite", 

2151 "config", 

2152 "dsc_files", 

2153 "external_files", 

2154 "external_overrides", 

2155 "external_signature_requests", 

2156 "extra_src_references", 

2157 "files", 

2158 "files_archive_map", 

2159 "fingerprint", 

2160 "hashfile", 

2161 "keyrings", 

2162 "maintainer", 

2163 "metadata_keys", 

2164 "new_comments", 

2165 # TODO: the maintainer column in table override should be removed. 

2166 "override", 

2167 "override_type", 

2168 "policy_queue", 

2169 "policy_queue_upload", 

2170 "policy_queue_upload_binaries_map", 

2171 "policy_queue_byhand_file", 

2172 "priority", 

2173 "signature_history", 

2174 "source", 

2175 "source_metadata", 

2176 "src_associations", 

2177 "src_contents", 

2178 "src_format", 

2179 "src_uploaders", 

2180 "suite", 

2181 "suite_acl_map", 

2182 "suite_architectures", 

2183 "suite_build_queue_copy", 

2184 "suite_permission", 

2185 "suite_src_formats", 

2186 "uid", 

2187 "version_check", 

2188 ) 

2189 

2190 views = ( 

2191 "bin_associations_binaries", 

2192 "changelogs", 

2193 "newest_source", 

2194 "newest_src_association", 

2195 "package_list", 

2196 "source_suite", 

2197 "src_associations_src", 

2198 ) 

2199 

2200 def __init__(self, *args, **kwargs): 

2201 self.__dict__ = self.__shared_state 

2202 

2203 if not getattr(self, "initialised", False): 

2204 self.initialised = True 

2205 self.debug = "debug" in kwargs 

2206 self.__createconn() 

2207 

2208 def __setuptables(self): 

2209 for table_name in self.tables: 

2210 table = Table(table_name, self.db_meta, autoload=True, extend_existing=True) 

2211 setattr(self, "tbl_%s" % table_name, table) 

2212 

2213 for view_name in self.views: 

2214 view = Table(view_name, self.db_meta, autoload=True) 

2215 setattr(self, "view_%s" % view_name, view) 

2216 

2217 def __setupmappers(self): 

2218 mapper( 

2219 ACL, 

2220 self.tbl_acl, 

2221 properties=dict( 

2222 architectures=relation( 

2223 Architecture, 

2224 secondary=self.tbl_acl_architecture_map, 

2225 collection_class=set, 

2226 ), 

2227 fingerprints=relation( 

2228 Fingerprint, 

2229 secondary=self.tbl_acl_fingerprint_map, 

2230 collection_class=set, 

2231 ), 

2232 match_keyring=relation( 

2233 Keyring, 

2234 primaryjoin=( 

2235 self.tbl_acl.c.match_keyring_id == self.tbl_keyrings.c.id 

2236 ), 

2237 ), 

2238 per_source=relation( 

2239 ACLPerSource, collection_class=set, back_populates="acl" 

2240 ), 

2241 per_suite=relation( 

2242 ACLPerSuite, collection_class=set, back_populates="acl" 

2243 ), 

2244 ), 

2245 ) 

2246 

2247 mapper( 

2248 ACLPerSource, 

2249 self.tbl_acl_per_source, 

2250 properties=dict( 

2251 acl=relation(ACL, back_populates="per_source"), 

2252 fingerprint=relation( 

2253 Fingerprint, 

2254 primaryjoin=( 

2255 self.tbl_acl_per_source.c.fingerprint_id 

2256 == self.tbl_fingerprint.c.id 

2257 ), 

2258 ), 

2259 created_by=relation( 

2260 Fingerprint, 

2261 primaryjoin=( 

2262 self.tbl_acl_per_source.c.created_by_id 

2263 == self.tbl_fingerprint.c.id 

2264 ), 

2265 ), 

2266 ), 

2267 ) 

2268 

2269 mapper( 

2270 ACLPerSuite, 

2271 self.tbl_acl_per_suite, 

2272 properties=dict( 

2273 acl=relation(ACL, back_populates="per_suite"), 

2274 fingerprint=relation( 

2275 Fingerprint, 

2276 primaryjoin=( 

2277 self.tbl_acl_per_suite.c.fingerprint_id 

2278 == self.tbl_fingerprint.c.id 

2279 ), 

2280 ), 

2281 suite=relation( 

2282 Suite, 

2283 primaryjoin=( 

2284 self.tbl_acl_per_suite.c.suite_id == self.tbl_suite.c.id 

2285 ), 

2286 ), 

2287 created_by=relation( 

2288 Fingerprint, 

2289 primaryjoin=( 

2290 self.tbl_acl_per_suite.c.created_by_id 

2291 == self.tbl_fingerprint.c.id 

2292 ), 

2293 ), 

2294 ), 

2295 ) 

2296 

2297 mapper( 

2298 Archive, 

2299 self.tbl_archive, 

2300 properties=dict( 

2301 archive_id=self.tbl_archive.c.id, archive_name=self.tbl_archive.c.name 

2302 ), 

2303 ) 

2304 

2305 mapper( 

2306 ArchiveFile, 

2307 self.tbl_files_archive_map, 

2308 properties=dict( 

2309 archive=relation(Archive, backref="files"), 

2310 component=relation(Component), 

2311 file=relation(PoolFile, backref="archives"), 

2312 ), 

2313 ) 

2314 

2315 mapper( 

2316 BuildQueue, 

2317 self.tbl_build_queue, 

2318 properties=dict( 

2319 queue_id=self.tbl_build_queue.c.id, 

2320 suite=relation( 

2321 Suite, 

2322 primaryjoin=( 

2323 self.tbl_build_queue.c.suite_id == self.tbl_suite.c.id 

2324 ), 

2325 ), 

2326 ), 

2327 ) 

2328 

2329 mapper( 

2330 DBBinary, 

2331 self.tbl_binaries, 

2332 properties=dict( 

2333 binary_id=self.tbl_binaries.c.id, 

2334 package=self.tbl_binaries.c.package, 

2335 version=self.tbl_binaries.c.version, 

2336 maintainer_id=self.tbl_binaries.c.maintainer, 

2337 maintainer=relation(Maintainer), 

2338 source_id=self.tbl_binaries.c.source, 

2339 source=relation(DBSource, backref="binaries"), 

2340 arch_id=self.tbl_binaries.c.architecture, 

2341 architecture=relation(Architecture), 

2342 poolfile_id=self.tbl_binaries.c.file, 

2343 poolfile=relation(PoolFile), 

2344 binarytype=self.tbl_binaries.c.type, 

2345 fingerprint_id=self.tbl_binaries.c.sig_fpr, 

2346 fingerprint=relation( 

2347 Fingerprint, 

2348 primaryjoin=( 

2349 self.tbl_binaries.c.sig_fpr == self.tbl_fingerprint.c.id 

2350 ), 

2351 ), 

2352 authorized_by_fingerprint=relation( 

2353 Fingerprint, 

2354 primaryjoin=( 

2355 self.tbl_binaries.c.authorized_by_fingerprint_id 

2356 == self.tbl_fingerprint.c.id 

2357 ), 

2358 ), 

2359 install_date=self.tbl_binaries.c.install_date, 

2360 suites=relation( 

2361 Suite, 

2362 secondary=self.tbl_bin_associations, 

2363 backref=backref("binaries", lazy="dynamic"), 

2364 ), 

2365 extra_sources=relation( 

2366 DBSource, 

2367 secondary=self.tbl_extra_src_references, 

2368 backref=backref("extra_binary_references", lazy="dynamic"), 

2369 ), 

2370 key=relation( 

2371 BinaryMetadata, 

2372 cascade="all", 

2373 collection_class=attribute_mapped_collection("key"), 

2374 back_populates="binary", 

2375 ), 

2376 ), 

2377 ) 

2378 

2379 mapper( 

2380 Component, 

2381 self.tbl_component, 

2382 properties=dict( 

2383 component_id=self.tbl_component.c.id, 

2384 component_name=self.tbl_component.c.name, 

2385 ), 

2386 ) 

2387 

2388 mapper( 

2389 DBConfig, self.tbl_config, properties=dict(config_id=self.tbl_config.c.id) 

2390 ) 

2391 

2392 mapper( 

2393 DSCFile, 

2394 self.tbl_dsc_files, 

2395 properties=dict( 

2396 dscfile_id=self.tbl_dsc_files.c.id, 

2397 source_id=self.tbl_dsc_files.c.source, 

2398 source=relation(DBSource, back_populates="srcfiles"), 

2399 poolfile_id=self.tbl_dsc_files.c.file, 

2400 poolfile=relation(PoolFile), 

2401 ), 

2402 ) 

2403 

2404 mapper( 

2405 ExternalOverride, 

2406 self.tbl_external_overrides, 

2407 properties=dict( 

2408 suite_id=self.tbl_external_overrides.c.suite, 

2409 suite=relation(Suite), 

2410 component_id=self.tbl_external_overrides.c.component, 

2411 component=relation(Component), 

2412 ), 

2413 ) 

2414 

2415 mapper( 

2416 PoolFile, 

2417 self.tbl_files, 

2418 properties=dict( 

2419 file_id=self.tbl_files.c.id, filesize=self.tbl_files.c.size 

2420 ), 

2421 ) 

2422 

2423 mapper( 

2424 Fingerprint, 

2425 self.tbl_fingerprint, 

2426 properties=dict( 

2427 fingerprint_id=self.tbl_fingerprint.c.id, 

2428 uid_id=self.tbl_fingerprint.c.uid, 

2429 uid=relation(Uid, back_populates="fingerprint"), 

2430 keyring_id=self.tbl_fingerprint.c.keyring, 

2431 keyring=relation(Keyring), 

2432 acl=relation(ACL), 

2433 ), 

2434 ) 

2435 

2436 mapper( 

2437 Keyring, 

2438 self.tbl_keyrings, 

2439 properties=dict( 

2440 keyring_name=self.tbl_keyrings.c.name, 

2441 keyring_id=self.tbl_keyrings.c.id, 

2442 acl=relation( 

2443 ACL, primaryjoin=(self.tbl_keyrings.c.acl_id == self.tbl_acl.c.id) 

2444 ), 

2445 ), 

2446 ), 

2447 

2448 mapper( 

2449 DBChange, 

2450 self.tbl_changes, 

2451 properties=dict( 

2452 change_id=self.tbl_changes.c.id, 

2453 changelog_id=self.tbl_changes.c.changelog_id, 

2454 seen=self.tbl_changes.c.seen, 

2455 source=self.tbl_changes.c.source, 

2456 binaries=self.tbl_changes.c.binaries, 

2457 architecture=self.tbl_changes.c.architecture, 

2458 distribution=self.tbl_changes.c.distribution, 

2459 urgency=self.tbl_changes.c.urgency, 

2460 maintainer=self.tbl_changes.c.maintainer, 

2461 changedby=self.tbl_changes.c.changedby, 

2462 date=self.tbl_changes.c.date, 

2463 version=self.tbl_changes.c.version, 

2464 ), 

2465 ) 

2466 

2467 mapper( 

2468 DBChangelog, 

2469 self.tbl_changelogs_text, 

2470 properties=dict( 

2471 id=self.tbl_changelogs_text.c.id, 

2472 changelog=self.tbl_changelogs_text.c.changelog, 

2473 ), 

2474 ) 

2475 

2476 mapper( 

2477 Maintainer, 

2478 self.tbl_maintainer, 

2479 properties=dict( 

2480 maintainer_id=self.tbl_maintainer.c.id, 

2481 maintains_sources=relation( 

2482 DBSource, 

2483 backref="maintainer", 

2484 primaryjoin=( 

2485 self.tbl_maintainer.c.id == self.tbl_source.c.maintainer 

2486 ), 

2487 ), 

2488 changed_sources=relation( 

2489 DBSource, 

2490 backref="changedby", 

2491 primaryjoin=( 

2492 self.tbl_maintainer.c.id == self.tbl_source.c.changedby 

2493 ), 

2494 ), 

2495 ), 

2496 ) 

2497 

2498 mapper( 

2499 NewComment, 

2500 self.tbl_new_comments, 

2501 properties=dict( 

2502 comment_id=self.tbl_new_comments.c.id, 

2503 policy_queue=relation(PolicyQueue), 

2504 ), 

2505 ) 

2506 

2507 mapper( 

2508 Override, 

2509 self.tbl_override, 

2510 properties=dict( 

2511 suite_id=self.tbl_override.c.suite, 

2512 suite=relation(Suite, backref=backref("overrides", lazy="dynamic")), 

2513 package=self.tbl_override.c.package, 

2514 component_id=self.tbl_override.c.component, 

2515 component=relation( 

2516 Component, backref=backref("overrides", lazy="dynamic") 

2517 ), 

2518 priority_id=self.tbl_override.c.priority, 

2519 priority=relation( 

2520 Priority, backref=backref("overrides", lazy="dynamic") 

2521 ), 

2522 section_id=self.tbl_override.c.section, 

2523 section=relation(Section, backref=backref("overrides", lazy="dynamic")), 

2524 overridetype_id=self.tbl_override.c.type, 

2525 overridetype=relation( 

2526 OverrideType, backref=backref("overrides", lazy="dynamic") 

2527 ), 

2528 ), 

2529 ) 

2530 

2531 mapper( 

2532 OverrideType, 

2533 self.tbl_override_type, 

2534 properties=dict( 

2535 overridetype=self.tbl_override_type.c.type, 

2536 overridetype_id=self.tbl_override_type.c.id, 

2537 ), 

2538 ) 

2539 

2540 mapper( 

2541 PolicyQueue, 

2542 self.tbl_policy_queue, 

2543 properties=dict( 

2544 policy_queue_id=self.tbl_policy_queue.c.id, 

2545 suite=relation( 

2546 Suite, 

2547 primaryjoin=( 

2548 self.tbl_policy_queue.c.suite_id == self.tbl_suite.c.id 

2549 ), 

2550 ), 

2551 ), 

2552 ) 

2553 

2554 mapper( 

2555 PolicyQueueUpload, 

2556 self.tbl_policy_queue_upload, 

2557 properties=dict( 

2558 changes=relation(DBChange), 

2559 policy_queue=relation(PolicyQueue, backref="uploads"), 

2560 target_suite=relation(Suite), 

2561 source=relation(DBSource), 

2562 binaries=relation( 

2563 DBBinary, secondary=self.tbl_policy_queue_upload_binaries_map 

2564 ), 

2565 ), 

2566 ) 

2567 

2568 mapper( 

2569 PolicyQueueByhandFile, 

2570 self.tbl_policy_queue_byhand_file, 

2571 properties=dict( 

2572 upload=relation(PolicyQueueUpload, backref="byhand"), 

2573 ), 

2574 ) 

2575 

2576 mapper( 

2577 Priority, 

2578 self.tbl_priority, 

2579 properties=dict(priority_id=self.tbl_priority.c.id), 

2580 ) 

2581 

2582 mapper(SignatureHistory, self.tbl_signature_history) 

2583 

2584 mapper( 

2585 DBSource, 

2586 self.tbl_source, 

2587 properties=dict( 

2588 source_id=self.tbl_source.c.id, 

2589 version=self.tbl_source.c.version, 

2590 maintainer_id=self.tbl_source.c.maintainer, 

2591 poolfile_id=self.tbl_source.c.file, 

2592 poolfile=relation(PoolFile), 

2593 fingerprint_id=self.tbl_source.c.sig_fpr, 

2594 fingerprint=relation( 

2595 Fingerprint, 

2596 primaryjoin=( 

2597 self.tbl_source.c.sig_fpr == self.tbl_fingerprint.c.id 

2598 ), 

2599 ), 

2600 authorized_by_fingerprint=relation( 

2601 Fingerprint, 

2602 primaryjoin=( 

2603 self.tbl_source.c.authorized_by_fingerprint_id 

2604 == self.tbl_fingerprint.c.id 

2605 ), 

2606 ), 

2607 changedby_id=self.tbl_source.c.changedby, 

2608 srcfiles=relation( 

2609 DSCFile, 

2610 primaryjoin=(self.tbl_source.c.id == self.tbl_dsc_files.c.source), 

2611 back_populates="source", 

2612 ), 

2613 suites=relation( 

2614 Suite, 

2615 secondary=self.tbl_src_associations, 

2616 backref=backref("sources", lazy="dynamic"), 

2617 ), 

2618 uploaders=relation(Maintainer, secondary=self.tbl_src_uploaders), 

2619 key=relation( 

2620 SourceMetadata, 

2621 cascade="all", 

2622 collection_class=attribute_mapped_collection("key"), 

2623 back_populates="source", 

2624 ), 

2625 ), 

2626 ) 

2627 

2628 mapper( 

2629 SrcFormat, 

2630 self.tbl_src_format, 

2631 properties=dict( 

2632 src_format_id=self.tbl_src_format.c.id, 

2633 format_name=self.tbl_src_format.c.format_name, 

2634 ), 

2635 ) 

2636 

2637 mapper( 

2638 Suite, 

2639 self.tbl_suite, 

2640 properties=dict( 

2641 suite_id=self.tbl_suite.c.id, 

2642 policy_queue=relation( 

2643 PolicyQueue, 

2644 primaryjoin=( 

2645 self.tbl_suite.c.policy_queue_id == self.tbl_policy_queue.c.id 

2646 ), 

2647 ), 

2648 new_queue=relation( 

2649 PolicyQueue, 

2650 primaryjoin=( 

2651 self.tbl_suite.c.new_queue_id == self.tbl_policy_queue.c.id 

2652 ), 

2653 ), 

2654 debug_suite=relation(Suite, remote_side=[self.tbl_suite.c.id]), 

2655 copy_queues=relation( 

2656 BuildQueue, secondary=self.tbl_suite_build_queue_copy 

2657 ), 

2658 srcformats=relation( 

2659 SrcFormat, 

2660 secondary=self.tbl_suite_src_formats, 

2661 backref=backref("suites", lazy="dynamic"), 

2662 ), 

2663 archive=relation(Archive, backref="suites"), 

2664 acls=relation( 

2665 ACL, secondary=self.tbl_suite_acl_map, collection_class=set 

2666 ), 

2667 components=relation( 

2668 Component, 

2669 secondary=self.tbl_component_suite, 

2670 order_by=self.tbl_component.c.ordering, 

2671 backref=backref("suites"), 

2672 ), 

2673 architectures=relation( 

2674 Architecture, 

2675 secondary=self.tbl_suite_architectures, 

2676 backref=backref("suites"), 

2677 ), 

2678 ), 

2679 ) 

2680 

2681 mapper( 

2682 Uid, 

2683 self.tbl_uid, 

2684 properties=dict( 

2685 uid_id=self.tbl_uid.c.id, 

2686 fingerprint=relation(Fingerprint, back_populates="uid"), 

2687 ), 

2688 ) 

2689 

2690 mapper( 

2691 BinContents, 

2692 self.tbl_bin_contents, 

2693 properties=dict( 

2694 binary=relation( 

2695 DBBinary, backref=backref("contents", lazy="dynamic", cascade="all") 

2696 ), 

2697 file=self.tbl_bin_contents.c.file, 

2698 ), 

2699 ) 

2700 

2701 mapper( 

2702 SrcContents, 

2703 self.tbl_src_contents, 

2704 properties=dict( 

2705 source=relation( 

2706 DBSource, backref=backref("contents", lazy="dynamic", cascade="all") 

2707 ), 

2708 file=self.tbl_src_contents.c.file, 

2709 ), 

2710 ) 

2711 

2712 mapper( 

2713 MetadataKey, 

2714 self.tbl_metadata_keys, 

2715 properties=dict( 

2716 key_id=self.tbl_metadata_keys.c.key_id, key=self.tbl_metadata_keys.c.key 

2717 ), 

2718 ) 

2719 

2720 mapper( 

2721 BinaryMetadata, 

2722 self.tbl_binaries_metadata, 

2723 properties=dict( 

2724 binary_id=self.tbl_binaries_metadata.c.bin_id, 

2725 binary=relation(DBBinary, back_populates="key"), 

2726 key_id=self.tbl_binaries_metadata.c.key_id, 

2727 key=relation(MetadataKey), 

2728 value=self.tbl_binaries_metadata.c.value, 

2729 ), 

2730 ) 

2731 

2732 mapper( 

2733 SourceMetadata, 

2734 self.tbl_source_metadata, 

2735 properties=dict( 

2736 source_id=self.tbl_source_metadata.c.src_id, 

2737 source=relation(DBSource, back_populates="key"), 

2738 key_id=self.tbl_source_metadata.c.key_id, 

2739 key=relation(MetadataKey), 

2740 value=self.tbl_source_metadata.c.value, 

2741 ), 

2742 ) 

2743 

2744 mapper( 

2745 VersionCheck, 

2746 self.tbl_version_check, 

2747 properties=dict( 

2748 suite_id=self.tbl_version_check.c.suite, 

2749 suite=relation( 

2750 Suite, 

2751 primaryjoin=self.tbl_version_check.c.suite == self.tbl_suite.c.id, 

2752 ), 

2753 reference_id=self.tbl_version_check.c.reference, 

2754 reference=relation( 

2755 Suite, 

2756 primaryjoin=self.tbl_version_check.c.reference 

2757 == self.tbl_suite.c.id, 

2758 lazy="joined", 

2759 ), 

2760 ), 

2761 ) 

2762 

2763 ## Connection functions 

2764 def __createconn(self): 

2765 from .config import Config 

2766 

2767 cnf = Config() 

2768 if "DB::Service" in cnf: 2768 ↛ 2769line 2768 didn't jump to line 2769, because the condition on line 2768 was never true

2769 connstr = "postgresql://service=%s" % cnf["DB::Service"] 

2770 elif "DB::Host" in cnf: 

2771 # TCP/IP 

2772 connstr = "postgresql://%s" % cnf["DB::Host"] 

2773 if "DB::Port" in cnf and cnf["DB::Port"] != "-1": 2773 ↛ 2774line 2773 didn't jump to line 2774, because the condition on line 2773 was never true

2774 connstr += ":%s" % cnf["DB::Port"] 

2775 connstr += "/%s" % cnf["DB::Name"] 

2776 else: 

2777 # Unix Socket 

2778 connstr = "postgresql:///%s" % cnf["DB::Name"] 

2779 if "DB::Port" in cnf and cnf["DB::Port"] != "-1": 2779 ↛ 2780line 2779 didn't jump to line 2780, because the condition on line 2779 was never true

2780 connstr += "?port=%s" % cnf["DB::Port"] 

2781 

2782 engine_args = {"echo": self.debug} 

2783 if "DB::PoolSize" in cnf: 

2784 engine_args["pool_size"] = int(cnf["DB::PoolSize"]) 

2785 if "DB::MaxOverflow" in cnf: 

2786 engine_args["max_overflow"] = int(cnf["DB::MaxOverflow"]) 

2787 # we don't support non-utf-8 connections 

2788 engine_args["client_encoding"] = "utf-8" 

2789 

2790 # Monkey patch a new dialect in in order to support service= syntax 

2791 import sqlalchemy.dialects.postgresql 

2792 from sqlalchemy.dialects.postgresql.psycopg2 import PGDialect_psycopg2 

2793 

2794 class PGDialect_psycopg2_dak(PGDialect_psycopg2): 

2795 def create_connect_args(self, url): 

2796 if str(url).startswith("postgresql://service="): 2796 ↛ 2798line 2796 didn't jump to line 2798, because the condition on line 2796 was never true

2797 # Eww 

2798 servicename = str(url)[21:] 

2799 return (["service=%s" % servicename], {}) 

2800 else: 

2801 return PGDialect_psycopg2.create_connect_args(self, url) 

2802 

2803 sqlalchemy.dialects.postgresql.base.dialect = PGDialect_psycopg2_dak 

2804 

2805 try: 

2806 self.db_pg = create_engine(connstr, **engine_args) 

2807 self.db_smaker = sessionmaker( 

2808 bind=self.db_pg, autoflush=True, autocommit=False 

2809 ) 

2810 

2811 if self.db_meta is None: 

2812 self.__class__.db_meta = Base.metadata 

2813 self.__class__.db_meta.bind = self.db_pg 

2814 self.__setuptables() 

2815 self.__setupmappers() 

2816 

2817 except OperationalError as e: 

2818 from . import utils 

2819 

2820 utils.fubar("Cannot connect to database (%s)" % str(e)) 

2821 

2822 self.pid = os.getpid() 

2823 

2824 def session(self, work_mem=0): 

2825 """ 

2826 Returns a new session object. If a work_mem parameter is provided a new 

2827 transaction is started and the work_mem parameter is set for this 

2828 transaction. The work_mem parameter is measured in MB. A default value 

2829 will be used if the parameter is not set. 

2830 """ 

2831 # reinitialize DBConn in new processes 

2832 if self.pid != os.getpid(): 

2833 self.__createconn() 

2834 session = self.db_smaker() 

2835 if work_mem > 0: 

2836 session.execute("SET LOCAL work_mem TO '%d MB'" % work_mem) 

2837 return session 

2838 

2839 

2840__all__.append("DBConn")