Coverage for dak/process_policy.py: 81%
334 statements
« prev ^ index » next coverage.py v7.6.0, created at 2026-01-04 16:18 +0000
« prev ^ index » next coverage.py v7.6.0, created at 2026-01-04 16:18 +0000
1#! /usr/bin/env python3
2# vim:set et ts=4 sw=4:
4"""Handles packages from policy queues
6@contact: Debian FTP Master <ftpmaster@debian.org>
7@copyright: 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8@copyright: 2009 Joerg Jaspert <joerg@debian.org>
9@copyright: 2009 Frank Lichtenheld <djpig@debian.org>
10@copyright: 2009 Mark Hymers <mhy@debian.org>
11@license: GNU General Public License version 2 or later
12"""
13# This program is free software; you can redistribute it and/or modify
14# it under the terms of the GNU General Public License as published by
15# the Free Software Foundation; either version 2 of the License, or
16# (at your option) any later version.
18# This program is distributed in the hope that it will be useful,
19# but WITHOUT ANY WARRANTY; without even the implied warranty of
20# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21# GNU General Public License for more details.
23# You should have received a copy of the GNU General Public License
24# along with this program; if not, write to the Free Software
25# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27################################################################################
29# <mhy> So how do we handle that at the moment?
30# <stew> Probably incorrectly.
32################################################################################
34import datetime
35import functools
36import os
37import re
38import sys
39import traceback
40from collections.abc import Callable
41from typing import NoReturn
43import apt_pkg
44import sqlalchemy.sql as sql
45from sqlalchemy.orm.exc import NoResultFound
47import daklib.announce
48import daklib.upload
49import daklib.utils
50from daklib import daklog, utils
51from daklib.archive import ArchiveTransaction, source_component_from_package_list
52from daklib.config import Config
53from daklib.dbconn import (
54 ArchiveFile,
55 Component,
56 DBBinary,
57 DBChange,
58 DBConn,
59 DBSource,
60 Override,
61 OverrideType,
62 PolicyQueue,
63 PolicyQueueUpload,
64 PoolFile,
65 Suite,
66 get_mapped_component,
67)
68from daklib.externalsignature import check_upload_for_external_signature_request
69from daklib.packagelist import PackageList
70from daklib.urgencylog import UrgencyLog
72# Globals
73Options: apt_pkg.Configuration
74Logger: daklog.Logger
76################################################################################
78ProcessingCallable = Callable[
79 [PolicyQueueUpload, PolicyQueue, str, ArchiveTransaction], None
80]
83def do_comments(
84 dir: str,
85 srcqueue: PolicyQueue,
86 opref: str,
87 npref: str,
88 line: str,
89 fn: ProcessingCallable,
90 transaction: ArchiveTransaction,
91) -> None:
92 session = transaction.session
93 actions: list[tuple[PolicyQueueUpload, str]] = []
94 for comm in [x for x in os.listdir(dir) if x.startswith(opref)]:
95 with open(os.path.join(dir, comm)) as fd:
96 lines = fd.readlines()
97 if len(lines) == 0 or lines[0] != line + "\n": 97 ↛ 98line 97 didn't jump to line 98 because the condition on line 97 was never true
98 continue
100 # If the ACCEPT includes a _<arch> we only accept that .changes.
101 # Otherwise we accept all .changes that start with the given prefix
102 changes_prefix = comm[len(opref) :]
103 if changes_prefix.count("_") < 2:
104 changes_prefix = changes_prefix + "_"
105 else:
106 changes_prefix = changes_prefix + ".changes"
108 # We need to escape "_" as we use it with the LIKE operator (via the
109 # SQLA startwith) later.
110 changes_prefix = changes_prefix.replace("_", r"\_")
112 uploads = (
113 session.query(PolicyQueueUpload)
114 .filter_by(policy_queue=srcqueue)
115 .join(PolicyQueueUpload.changes)
116 .filter(DBChange.changesname.startswith(changes_prefix))
117 .order_by(PolicyQueueUpload.source_id)
118 )
119 reason = "".join(lines[1:])
120 actions.extend((u, reason) for u in uploads)
122 if opref != npref:
123 newcomm = npref + comm[len(opref) :]
124 newcomm = utils.find_next_free(os.path.join(dir, newcomm))
125 transaction.fs.move(os.path.join(dir, comm), newcomm)
127 actions.sort()
129 for u, reason in actions:
130 print(("Processing changes file: {0}".format(u.changes.changesname)))
131 fn(u, srcqueue, reason, transaction)
134################################################################################
137def try_or_reject(function: ProcessingCallable) -> ProcessingCallable:
138 @functools.wraps(function)
139 def wrapper(
140 upload: PolicyQueueUpload,
141 srcqueue: PolicyQueue,
142 comments: str,
143 transaction: ArchiveTransaction,
144 ) -> None:
145 try:
146 function(upload, srcqueue, comments, transaction)
147 except Exception:
148 comments = "An exception was raised while processing the package:\n{0}\nOriginal comments:\n{1}".format(
149 traceback.format_exc(), comments
150 )
151 try:
152 transaction.rollback()
153 real_comment_reject(upload, srcqueue, comments, transaction)
154 except Exception:
155 comments = "In addition an exception was raised while trying to reject the upload:\n{0}\nOriginal rejection:\n{1}".format(
156 traceback.format_exc(), comments
157 )
158 transaction.rollback()
159 real_comment_reject(
160 upload, srcqueue, comments, transaction, notify=False
161 )
162 if not Options["No-Action"]: 162 ↛ 165line 162 didn't jump to line 165 because the condition on line 162 was always true
163 transaction.commit()
164 else:
165 transaction.rollback()
167 return wrapper
170################################################################################
173@try_or_reject
174def comment_accept(
175 upload: PolicyQueueUpload,
176 srcqueue: PolicyQueue,
177 comments: str,
178 transaction: ArchiveTransaction,
179) -> None:
180 for byhand in upload.byhand: 180 ↛ 181line 180 didn't jump to line 181 because the loop on line 180 never started
181 path = os.path.join(srcqueue.path, byhand.filename)
182 if os.path.exists(path):
183 raise Exception(
184 "E: cannot ACCEPT upload with unprocessed byhand file {0}".format(
185 byhand.filename
186 )
187 )
189 cnf = Config()
191 fs = transaction.fs
192 session = transaction.session
193 changesname = upload.changes.changesname
194 allow_tainted = srcqueue.suite.archive.tainted
196 # We need overrides to get the target component
197 overridesuite = upload.target_suite
198 if overridesuite.overridesuite is not None:
199 overridesuite = (
200 session.query(Suite).filter_by(suite_name=overridesuite.overridesuite).one()
201 )
203 def binary_component_func(db_binary: DBBinary) -> Component:
204 section = db_binary.proxy["Section"]
205 component_name = "main"
206 if section.find("/") != -1:
207 component_name = section.split("/", 1)[0]
208 component = get_mapped_component(component_name, session=session)
209 assert component is not None
210 return component
212 def is_debug_binary(db_binary: DBBinary) -> bool:
213 return daklib.utils.is_in_debug_section(db_binary.proxy)
215 def has_debug_binaries(upload: PolicyQueueUpload) -> bool:
216 return any((is_debug_binary(x) for x in upload.binaries))
218 def source_component_func(db_source: DBSource) -> Component:
219 package_list = PackageList(db_source.proxy)
220 component = source_component_from_package_list(
221 package_list, upload.target_suite
222 )
223 if component is not None: 223 ↛ 229line 223 didn't jump to line 229
224 component = get_mapped_component(component.component_name, session=session)
225 assert component is not None
226 return component
228 # Fallback for packages without Package-List field
229 query = (
230 session.query(Override)
231 .filter_by(suite=overridesuite, package=db_source.source)
232 .join(OverrideType)
233 .filter(OverrideType.overridetype == "dsc")
234 .join(Component)
235 )
236 return query.one().component
238 policy_queue = upload.target_suite.policy_queue
239 if policy_queue == srcqueue:
240 policy_queue = None
242 all_target_suites = [
243 upload.target_suite if policy_queue is None else policy_queue.suite
244 ]
245 if policy_queue is None or policy_queue.send_to_build_queues: 245 ↛ 248line 245 didn't jump to line 248 because the condition on line 245 was always true
246 all_target_suites.extend([q.suite for q in upload.target_suite.copy_queues])
248 throw_away_binaries = False
249 if upload.source is not None:
250 source_component = source_component_func(upload.source)
251 if upload.target_suite.suite_name in cnf.value_list(
252 "Dinstall::ThrowAwayNewBinarySuites"
253 ) and source_component.component_name in cnf.value_list(
254 "Dinstall::ThrowAwayNewBinaryComponents"
255 ):
256 throw_away_binaries = True
258 for suite in all_target_suites:
259 debug_suite = suite.debug_suite
261 if upload.source is not None:
262 # If we have Source in this upload, let's include it into
263 # upload suite.
264 transaction.copy_source(
265 upload.source,
266 suite,
267 source_component,
268 allow_tainted=allow_tainted,
269 )
271 if not throw_away_binaries:
272 if debug_suite is not None and has_debug_binaries(upload):
273 # If we're handing a debug package, we also need to include the
274 # source in the debug suite as well.
275 transaction.copy_source(
276 upload.source,
277 debug_suite,
278 source_component_func(upload.source),
279 allow_tainted=allow_tainted,
280 )
282 if not throw_away_binaries:
283 for db_binary in upload.binaries:
284 # Now, let's work out where to copy this guy to -- if it's
285 # a debug binary, and the suite has a debug suite, let's go
286 # ahead and target the debug suite rather then the stock
287 # suite.
288 copy_to_suite = suite
289 if debug_suite is not None and is_debug_binary(db_binary):
290 copy_to_suite = debug_suite
292 # build queues and debug suites may miss the source package
293 # if this is a binary-only upload.
294 if copy_to_suite != upload.target_suite:
295 transaction.copy_source(
296 db_binary.source,
297 copy_to_suite,
298 source_component_func(db_binary.source),
299 allow_tainted=allow_tainted,
300 )
302 transaction.copy_binary(
303 db_binary,
304 copy_to_suite,
305 binary_component_func(db_binary),
306 allow_tainted=allow_tainted,
307 extra_archives=[upload.target_suite.archive],
308 )
310 check_upload_for_external_signature_request(
311 session, suite, copy_to_suite, db_binary
312 )
314 suite.update_last_changed()
316 # Copy .changes if needed
317 if policy_queue is None and upload.target_suite.copychanges: 317 ↛ 318line 317 didn't jump to line 318 because the condition on line 317 was never true
318 src = os.path.join(upload.policy_queue.path, upload.changes.changesname)
319 dst = os.path.join(upload.target_suite.path, upload.changes.changesname)
320 fs.copy(src, dst, mode=upload.target_suite.archive.mode)
322 # List of files in the queue directory
323 queue_files = [changesname]
324 chg = daklib.upload.Changes(
325 upload.policy_queue.path, changesname, keyrings=[], require_signature=False
326 )
327 queue_files.extend(f.filename for f in chg.buildinfo_files)
329 # TODO: similar code exists in archive.py's `ArchiveUpload._install_policy`
330 if policy_queue is not None:
331 # register upload in policy queue
332 new_upload = PolicyQueueUpload()
333 new_upload.policy_queue = policy_queue
334 new_upload.target_suite = upload.target_suite
335 new_upload.changes = upload.changes
336 new_upload.source = upload.source
337 new_upload.binaries = upload.binaries
338 session.add(new_upload)
339 session.flush()
341 # copy .changes & similar to policy queue
342 for fn in queue_files:
343 src = os.path.join(upload.policy_queue.path, fn)
344 dst = os.path.join(policy_queue.path, fn)
345 transaction.fs.copy(src, dst, mode=policy_queue.change_perms)
347 # Copy upload to Process-Policy::CopyDir
348 # Used on security.d.o to sync accepted packages to ftp-master, but this
349 # should eventually be replaced by something else.
350 copydir = cnf.get("Process-Policy::CopyDir") or None
351 if policy_queue is None and copydir is not None: 351 ↛ 352line 351 didn't jump to line 352 because the condition on line 351 was never true
352 mode = upload.target_suite.archive.mode
353 if upload.source is not None:
354 for f in [df.poolfile for df in upload.source.srcfiles]:
355 dst = os.path.join(copydir, f.basename)
356 if not os.path.exists(dst):
357 fs.copy(f.fullpath, dst, mode=mode)
359 for db_binary in upload.binaries:
360 f = db_binary.poolfile
361 dst = os.path.join(copydir, f.basename)
362 if not os.path.exists(dst):
363 fs.copy(f.fullpath, dst, mode=mode)
365 for fn in queue_files:
366 src = os.path.join(upload.policy_queue.path, fn)
367 dst = os.path.join(copydir, fn)
368 # We check for `src` to exist as old uploads in policy queues
369 # might still miss the `.buildinfo` files.
370 if os.path.exists(src) and not os.path.exists(dst):
371 fs.copy(src, dst, mode=mode)
373 if policy_queue is None:
374 utils.process_buildinfos(
375 upload.policy_queue.path, chg.buildinfo_files, fs, Logger
376 )
378 if policy_queue is None and upload.source is not None and not Options["No-Action"]:
379 urgency = upload.changes.urgency
380 # As per policy 5.6.17, the urgency can be followed by a space and a
381 # comment. Extract only the urgency from the string.
382 if " " in urgency: 382 ↛ 383line 382 didn't jump to line 383 because the condition on line 382 was never true
383 urgency, comment = urgency.split(" ", 1)
384 if urgency not in cnf.value_list("Urgency::Valid"): 384 ↛ 385line 384 didn't jump to line 385 because the condition on line 384 was never true
385 urgency = cnf["Urgency::Default"]
386 UrgencyLog().log(upload.source.source, upload.source.version, urgency)
388 if policy_queue is None:
389 print(" ACCEPT")
390 else:
391 print(" ACCEPT-TO-QUEUE")
392 if not Options["No-Action"]: 392 ↛ 395line 392 didn't jump to line 395 because the condition on line 392 was always true
393 Logger.log(["Policy Queue ACCEPT", srcqueue.queue_name, changesname])
395 if policy_queue is None:
396 pu = get_processed_upload(upload)
397 daklib.announce.announce_accept(pu)
399 # TODO: code duplication. Similar code is in process-upload.
400 # Move .changes to done
401 now = datetime.datetime.now()
402 donedir = os.path.join(cnf["Dir::Done"], now.strftime("%Y/%m/%d"))
403 if policy_queue is None:
404 for fn in queue_files:
405 src = os.path.join(upload.policy_queue.path, fn)
406 if os.path.exists(src): 406 ↛ 404line 406 didn't jump to line 404 because the condition on line 406 was always true
407 dst = os.path.join(donedir, fn)
408 dst = utils.find_next_free(dst)
409 fs.copy(src, dst, mode=0o644)
411 if throw_away_binaries and upload.target_suite.archive.use_morgue:
412 morguesubdir = cnf.get("New::MorgueSubDir", "new")
414 utils.move_to_morgue(
415 morguesubdir,
416 [db_binary.poolfile.fullpath for db_binary in upload.binaries],
417 fs,
418 Logger,
419 )
421 remove_upload(upload, transaction)
424################################################################################
427@try_or_reject
428def comment_reject(*args) -> None:
429 real_comment_reject(*args, manual=True)
432def real_comment_reject(
433 upload: PolicyQueueUpload,
434 srcqueue: PolicyQueue,
435 comments: str,
436 transaction: ArchiveTransaction,
437 notify=True,
438 *,
439 manual=False,
440) -> None:
441 cnf = Config()
443 fs = transaction.fs
444 session = transaction.session
445 changesname = upload.changes.changesname
446 queuedir = upload.policy_queue.path
447 rejectdir = cnf["Dir::Reject"]
449 ### Copy files to reject/
451 poolfiles = [b.poolfile for b in upload.binaries]
452 if upload.source is not None: 452 ↛ 455line 452 didn't jump to line 455 because the condition on line 452 was always true
453 poolfiles.extend([df.poolfile for df in upload.source.srcfiles])
454 # Not beautiful...
455 files = [
456 af.path
457 for af in session.query(ArchiveFile)
458 .filter_by(archive=upload.policy_queue.suite.archive)
459 .join(ArchiveFile.file)
460 .filter(PoolFile.file_id.in_([f.file_id for f in poolfiles]))
461 ]
462 for byhand in upload.byhand: 462 ↛ 463line 462 didn't jump to line 463 because the loop on line 462 never started
463 path = os.path.join(queuedir, byhand.filename)
464 if os.path.exists(path):
465 files.append(path)
466 chg = daklib.upload.Changes(
467 queuedir, changesname, keyrings=[], require_signature=False
468 )
469 for f in chg.buildinfo_files:
470 path = os.path.join(queuedir, f.filename)
471 if os.path.exists(path): 471 ↛ 469line 471 didn't jump to line 469 because the condition on line 471 was always true
472 files.append(path)
473 files.append(os.path.join(queuedir, changesname))
475 for fn in files:
476 dst = utils.find_next_free(os.path.join(rejectdir, os.path.basename(fn)))
477 fs.copy(fn, dst, link=True)
479 ### Write reason
481 dst = utils.find_next_free(
482 os.path.join(rejectdir, "{0}.reason".format(changesname))
483 )
484 fh = fs.create(dst)
485 fh.write(comments)
486 fh.close()
488 ### Send mail notification
490 if notify: 490 ↛ 504line 490 didn't jump to line 504 because the condition on line 490 was always true
491 rejected_by = None
492 reason = comments
494 # Try to use From: from comment file if there is one.
495 # This is not very elegant...
496 match = re.match(r"\AFrom: ([^\n]+)\n\n", comments)
497 if match: 497 ↛ 501line 497 didn't jump to line 501 because the condition on line 497 was always true
498 rejected_by = match.group(1)
499 reason = "\n".join(comments.splitlines()[2:])
501 pu = get_processed_upload(upload)
502 daklib.announce.announce_reject(pu, reason, rejected_by)
504 print(" REJECT")
505 if not Options["No-Action"]: 505 ↛ 510line 505 didn't jump to line 510 because the condition on line 505 was always true
506 Logger.log(
507 ["Policy Queue REJECT", srcqueue.queue_name, upload.changes.changesname]
508 )
510 changes = upload.changes
511 remove_upload(upload, transaction)
512 session.delete(changes)
515################################################################################
518def remove_upload(upload: PolicyQueueUpload, transaction: ArchiveTransaction) -> None:
519 fs = transaction.fs
520 session = transaction.session
522 # Remove byhand and changes files. Binary and source packages will be
523 # removed from {bin,src}_associations and eventually removed by clean-suites automatically.
524 queuedir = upload.policy_queue.path
525 for byhand in upload.byhand: 525 ↛ 526line 525 didn't jump to line 526 because the loop on line 525 never started
526 path = os.path.join(queuedir, byhand.filename)
527 if os.path.exists(path):
528 fs.unlink(path)
529 session.delete(byhand)
531 chg = daklib.upload.Changes(
532 queuedir, upload.changes.changesname, keyrings=[], require_signature=False
533 )
534 queue_files = [upload.changes.changesname]
535 queue_files.extend(f.filename for f in chg.buildinfo_files)
536 for fn in queue_files:
537 # We check for `path` to exist as old uploads in policy queues
538 # might still miss the `.buildinfo` files.
539 path = os.path.join(queuedir, fn)
540 if os.path.exists(path): 540 ↛ 536line 540 didn't jump to line 536 because the condition on line 540 was always true
541 fs.unlink(path)
543 session.delete(upload)
544 session.flush()
547################################################################################
550def get_processed_upload(upload: PolicyQueueUpload) -> daklib.announce.ProcessedUpload:
551 pu = daklib.announce.ProcessedUpload()
553 pu.maintainer = upload.changes.maintainer
554 pu.changed_by = upload.changes.changedby
555 pu.fingerprint = upload.changes.fingerprint
556 pu.authorized_by_fingerprint = upload.changes.authorized_by_fingerprint
558 pu.suites = [upload.target_suite]
559 pu.from_policy_suites = [upload.target_suite]
561 changes_path = os.path.join(upload.policy_queue.path, upload.changes.changesname)
562 with open(changes_path, "r") as fd:
563 pu.changes = fd.read()
564 pu.changes_filename = upload.changes.changesname
565 pu.sourceful = upload.source is not None
566 pu.source = upload.changes.source
567 pu.version = upload.changes.version
568 pu.architecture = upload.changes.architecture
569 pu.bugs = upload.changes.closes
571 pu.program = "process-policy"
573 return pu
576################################################################################
579def remove_unreferenced_binaries(
580 policy_queue: PolicyQueue, transaction: ArchiveTransaction
581) -> None:
582 """Remove binaries that are no longer referenced by an upload"""
583 session = transaction.session
584 suite = policy_queue.suite
586 query = sql.text(
587 """
588 SELECT b.*
589 FROM binaries b
590 JOIN bin_associations ba ON b.id = ba.bin
591 WHERE ba.suite = :suite_id
592 AND NOT EXISTS (SELECT 1 FROM policy_queue_upload_binaries_map pqubm
593 JOIN policy_queue_upload pqu ON pqubm.policy_queue_upload_id = pqu.id
594 WHERE pqu.policy_queue_id = :policy_queue_id
595 AND pqubm.binary_id = b.id)"""
596 )
597 binaries = (
598 session.query(DBBinary)
599 .from_statement(query) # type: ignore[arg-type]
600 .params(
601 {
602 "suite_id": policy_queue.suite_id,
603 "policy_queue_id": policy_queue.policy_queue_id,
604 }
605 )
606 )
608 for binary in binaries:
609 Logger.log(
610 [
611 "removed binary from policy queue",
612 policy_queue.queue_name,
613 binary.package,
614 binary.version,
615 ]
616 )
617 transaction.remove_binary(binary, suite)
620def remove_unreferenced_sources(
621 policy_queue: PolicyQueue, transaction: ArchiveTransaction
622) -> None:
623 """Remove sources that are no longer referenced by an upload or a binary"""
624 session = transaction.session
625 suite = policy_queue.suite
627 query = sql.text(
628 """
629 SELECT s.*
630 FROM source s
631 JOIN src_associations sa ON s.id = sa.source
632 WHERE sa.suite = :suite_id
633 AND NOT EXISTS (SELECT 1 FROM policy_queue_upload pqu
634 WHERE pqu.policy_queue_id = :policy_queue_id
635 AND pqu.source_id = s.id)
636 AND NOT EXISTS (SELECT 1 FROM binaries b
637 JOIN bin_associations ba ON b.id = ba.bin
638 WHERE b.source = s.id
639 AND ba.suite = :suite_id)"""
640 )
641 sources = (
642 session.query(DBSource)
643 .from_statement(query) # type: ignore[arg-type]
644 .params(
645 {
646 "suite_id": policy_queue.suite_id,
647 "policy_queue_id": policy_queue.policy_queue_id,
648 }
649 )
650 )
652 for source in sources:
653 Logger.log(
654 [
655 "removed source from policy queue",
656 policy_queue.queue_name,
657 source.source,
658 source.version,
659 ]
660 )
661 transaction.remove_source(source, suite)
664################################################################################
667def usage(status=0) -> NoReturn:
668 print("""Usage: dak process-policy QUEUE""")
669 sys.exit(status)
672################################################################################
675def main() -> None:
676 global Options, Logger
678 cnf = Config()
679 session = DBConn().session()
681 Arguments = [
682 ("h", "help", "Process-Policy::Options::Help"),
683 ("n", "no-action", "Process-Policy::Options::No-Action"),
684 ]
686 for i in ["help", "no-action"]:
687 key = "Process-Policy::Options::%s" % i
688 if key not in cnf: 688 ↛ 686line 688 didn't jump to line 686 because the condition on line 688 was always true
689 cnf[key] = ""
691 queue_name = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) # type: ignore[attr-defined]
693 Options = cnf.subtree("Process-Policy::Options")
694 if Options["Help"]:
695 usage()
697 if len(queue_name) != 1: 697 ↛ 698line 697 didn't jump to line 698 because the condition on line 697 was never true
698 print("E: Specify exactly one policy queue")
699 sys.exit(1)
701 queue_name = queue_name[0]
703 Logger = daklog.Logger("process-policy")
704 if not Options["No-Action"]: 704 ↛ 707line 704 didn't jump to line 707 because the condition on line 704 was always true
705 urgencylog = UrgencyLog()
707 with ArchiveTransaction() as transaction:
708 session = transaction.session
709 try:
710 pq = session.query(PolicyQueue).filter_by(queue_name=queue_name).one()
711 except NoResultFound:
712 print("E: Cannot find policy queue %s" % queue_name)
713 sys.exit(1)
715 commentsdir = os.path.join(pq.path, "COMMENTS")
716 # The comments stuff relies on being in the right directory
717 os.chdir(pq.path)
719 do_comments(
720 commentsdir,
721 pq,
722 "REJECT.",
723 "REJECTED.",
724 "NOTOK",
725 comment_reject,
726 transaction,
727 )
728 do_comments(
729 commentsdir, pq, "ACCEPT.", "ACCEPTED.", "OK", comment_accept, transaction
730 )
731 do_comments(
732 commentsdir, pq, "ACCEPTED.", "ACCEPTED.", "OK", comment_accept, transaction
733 )
735 remove_unreferenced_binaries(pq, transaction)
736 remove_unreferenced_sources(pq, transaction)
738 if not Options["No-Action"]: 738 ↛ exitline 738 didn't return from function 'main' because the condition on line 738 was always true
739 urgencylog.close()
742################################################################################
745if __name__ == "__main__":
746 main()