1
2
3 """
4 Create all the Release files
5
6 @contact: Debian FTPMaster <ftpmaster@debian.org>
7 @copyright: 2011 Joerg Jaspert <joerg@debian.org>
8 @copyright: 2011 Mark Hymers <mhy@debian.org>
9 @license: GNU General Public License version 2 or later
10
11 """
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33 import bz2
34 import errno
35 import gzip
36 import os
37 import os.path
38 import subprocess
39 import sys
40 import time
41
42 import apt_pkg
43 from sqlalchemy.orm import object_session
44
45 import daklib.gpg
46 from daklib import daklog, utils
47 from daklib.config import Config
48 from daklib.dakmultiprocessing import PROC_STATUS_SUCCESS, DakProcessPool
49 from daklib.dbconn import Archive, DBConn, Suite, get_suite, get_suite_architectures
50 from daklib.regexes import (
51 re_gensubrelease,
52 re_includeinrelease_byhash,
53 re_includeinrelease_plain,
54 )
55
56
57 Logger = None
58
59
60
61
63 """Usage information"""
64
65 print(
66 """Usage: dak generate-releases [OPTIONS]
67 Generate the Release files
68
69 -a, --archive=ARCHIVE process suites in ARCHIVE
70 -s, --suite=SUITE(s) process this suite
71 Default: All suites not marked 'untouchable'
72 -f, --force Allow processing of untouchable suites
73 CAREFUL: Only to be used at (point) release time!
74 -h, --help show this help and exit
75 -q, --quiet Don't output progress
76
77 SUITE can be a space separated list, e.g.
78 --suite=unstable testing
79 """
80 )
81 sys.exit(exit_code)
82
83
84
85
86
88 cnf = Config()
89
90 if "Dinstall::SigningKeyring" in cnf or "Dinstall::SigningHomedir" in cnf:
91 args = {
92 "keyids": suite.signingkeys or [],
93 "pubring": cnf.get("Dinstall::SigningPubKeyring") or None,
94 "secring": cnf.get("Dinstall::SigningKeyring") or None,
95 "homedir": cnf.get("Dinstall::SigningHomedir") or None,
96 "passphrase_file": cnf.get("Dinstall::SigningPassphraseFile") or None,
97 }
98
99 relname = os.path.join(dirname, "Release")
100
101 dest = os.path.join(dirname, "Release.gpg")
102 if os.path.exists(dest):
103 os.unlink(dest)
104
105 inlinedest = os.path.join(dirname, "InRelease")
106 if os.path.exists(inlinedest):
107 os.unlink(inlinedest)
108
109 with open(relname, "r") as stdin:
110 with open(dest, "w") as stdout:
111 daklib.gpg.sign(stdin, stdout, inline=False, **args)
112 stdin.seek(0)
113 with open(inlinedest, "w") as stdout:
114 daklib.gpg.sign(stdin, stdout, inline=True, **args)
115
116
118 - def __init__(self, filename, mode="r"):
119 self.filename = filename
120
122 with open(self.filename, "rb") as stdin:
123 return subprocess.check_output(["xz", "-d"], stdin=stdin)
124
125
127 - def __init__(self, filename, mode="r"):
128 self.filename = filename
129
131 with open(self.filename, "rb") as stdin:
132 return subprocess.check_output(["zstd", "--decompress"], stdin=stdin)
133
134
136 - def __init__(self, release_field, func, db_name):
137 self.release_field = release_field
138 self.func = func
139 self.db_name = db_name
140
141
142 RELEASE_HASHES = [
143 HashFunc("MD5Sum", apt_pkg.md5sum, "md5sum"),
144 HashFunc("SHA1", apt_pkg.sha1sum, "sha1"),
145 HashFunc("SHA256", apt_pkg.sha256sum, "sha256"),
146 ]
147
148
152
154 """
155 Absolute path to the suite-specific files.
156 """
157 suite_suffix = utils.suite_suffix(self.suite.suite_name)
158
159 return os.path.join(
160 self.suite.archive.path, "dists", self.suite.suite_name, suite_suffix
161 )
162
164 """
165 Absolute path where Release files are physically stored.
166 This should be a path that sorts after the dists/ directory.
167 """
168 suite_suffix = utils.suite_suffix(self.suite.suite_name)
169
170 return os.path.join(
171 self.suite.archive.path,
172 "zzz-dists",
173 self.suite.codename or self.suite.suite_name,
174 suite_suffix,
175 )
176
178 """
179 Create symlinks for Release files.
180 This creates the symlinks for Release files in the `suite_path`
181 to the actual files in `suite_release_path`.
182 """
183 relpath = os.path.relpath(self.suite_release_path(), self.suite_path())
184 for f in ("Release", "Release.gpg", "InRelease"):
185 source = os.path.join(relpath, f)
186 dest = os.path.join(self.suite_path(), f)
187 if os.path.lexists(dest):
188 if not os.path.islink(dest):
189 os.unlink(dest)
190 elif os.readlink(dest) == source:
191 continue
192 else:
193 os.unlink(dest)
194 os.symlink(source, dest)
195
197 for path in (self.suite_path(), self.suite_release_path()):
198 try:
199 os.makedirs(path)
200 except OSError as e:
201 if e.errno != errno.EEXIST:
202 raise
203
205
206
207 query = """
208 UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP
209 WHERE suite_id = :id AND unreferenced IS NULL"""
210 session.execute(query, {"id": self.suite.suite_id})
211
212 query = "SELECT path FROM hashfile WHERE suite_id = :id"
213 q = session.execute(query, {"id": self.suite.suite_id})
214 known_hashfiles = set(row[0] for row in q)
215 updated = set()
216 new = set()
217
218
219 for filename in fileinfo:
220 if not os.path.lexists(filename):
221
222 continue
223 byhashdir = os.path.join(os.path.dirname(filename), "by-hash")
224 for h in hashes:
225 field = h.release_field
226 hashfile = os.path.join(byhashdir, field, fileinfo[filename][field])
227 if hashfile in known_hashfiles:
228 updated.add(hashfile)
229 else:
230 new.add(hashfile)
231
232 if updated:
233 session.execute(
234 """
235 UPDATE hashfile SET unreferenced = NULL
236 WHERE path = ANY(:p) AND suite_id = :id""",
237 {"p": list(updated), "id": self.suite.suite_id},
238 )
239 if new:
240 session.execute(
241 """
242 INSERT INTO hashfile (path, suite_id)
243 VALUES (:p, :id)""",
244 [{"p": hashfile, "id": self.suite.suite_id} for hashfile in new],
245 )
246
247 session.commit()
248
250
251 for filename in fileinfo:
252 if not os.path.lexists(filename):
253
254 continue
255
256 for h in hashes:
257 field = h.release_field
258 hashfile = os.path.join(
259 os.path.dirname(filename),
260 "by-hash",
261 field,
262 fileinfo[filename][field],
263 )
264 try:
265 os.makedirs(os.path.dirname(hashfile))
266 except OSError as exc:
267 if exc.errno != errno.EEXIST:
268 raise
269 try:
270 os.link(filename, hashfile)
271 except OSError as exc:
272 if exc.errno != errno.EEXIST:
273 raise
274
276
277 for filename in fileinfo:
278 if not os.path.lexists(filename):
279
280 continue
281
282 besthash = hashes[-1]
283 field = besthash.release_field
284 hashfilebase = os.path.join("by-hash", field, fileinfo[filename][field])
285 hashfile = os.path.join(os.path.dirname(filename), hashfilebase)
286
287 assert os.path.exists(hashfile), "by-hash file {} is missing".format(
288 hashfile
289 )
290
291 os.unlink(filename)
292 os.symlink(hashfilebase, filename)
293
295 """
296 Generate Release files for the given suite
297 """
298
299 suite = self.suite
300 session = object_session(suite)
301
302
303
304 attribs = (
305 ("Origin", "origin"),
306 ("Label", "label"),
307 ("Suite", "release_suite_output"),
308 ("Version", "version"),
309 ("Codename", "codename"),
310 ("Changelogs", "changelog_url"),
311 )
312
313
314 subattribs = (
315 ("Archive", "suite_name"),
316 ("Origin", "origin"),
317 ("Label", "label"),
318 ("Version", "version"),
319 )
320
321
322 boolattrs = (
323 ("NotAutomatic", "notautomatic"),
324 ("ButAutomaticUpgrades", "butautomaticupgrades"),
325 ("Acquire-By-Hash", "byhash"),
326 )
327
328 cnf = Config()
329 cnf_suite_suffix = cnf.get("Dinstall::SuiteSuffix", "").rstrip("/")
330
331 suite_suffix = utils.suite_suffix(suite.suite_name)
332
333 self.create_output_directories()
334 self.create_release_symlinks()
335
336 outfile = os.path.join(self.suite_release_path(), "Release")
337 out = open(outfile + ".new", "w")
338
339 for key, dbfield in attribs:
340
341
342 if key in ("Version", "Changelogs") and getattr(suite, dbfield) is None:
343 continue
344
345 out.write("%s: %s\n" % (key, getattr(suite, dbfield)))
346
347 out.write(
348 "Date: %s\n"
349 % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time())))
350 )
351
352 if suite.validtime:
353 validtime = float(suite.validtime)
354 out.write(
355 "Valid-Until: %s\n"
356 % (
357 time.strftime(
358 "%a, %d %b %Y %H:%M:%S UTC",
359 time.gmtime(time.time() + validtime),
360 )
361 )
362 )
363
364 for key, dbfield in boolattrs:
365 if getattr(suite, dbfield, False):
366 out.write("%s: yes\n" % (key))
367
368 skip_arch_all = True
369 if (
370 suite.separate_contents_architecture_all
371 or suite.separate_packages_architecture_all
372 ):
373
374
375
376
377
378
379
380
381
382 assert suite.separate_contents_architecture_all
383 skip_arch_all = False
384
385 if not suite.separate_packages_architecture_all:
386 out.write("No-Support-for-Architecture-all: Packages\n")
387
388 architectures = get_suite_architectures(
389 suite.suite_name, skipall=skip_arch_all, skipsrc=True, session=session
390 )
391
392 out.write(
393 "Architectures: %s\n" % (" ".join(a.arch_string for a in architectures))
394 )
395
396 components = [c.component_name for c in suite.components]
397
398 out.write("Components: %s\n" % (" ".join(components)))
399
400
401
402 if getattr(suite, "description") is not None:
403 out.write("Description: %s\n" % suite.description)
404
405 for comp in components:
406 for dirpath, dirnames, filenames in os.walk(
407 os.path.join(self.suite_path(), comp), topdown=True
408 ):
409 if not re_gensubrelease.match(dirpath):
410 continue
411
412 subfile = os.path.join(dirpath, "Release")
413 subrel = open(subfile + ".new", "w")
414
415 for key, dbfield in subattribs:
416 if getattr(suite, dbfield) is not None:
417 subrel.write("%s: %s\n" % (key, getattr(suite, dbfield)))
418
419 for key, dbfield in boolattrs:
420 if getattr(suite, dbfield, False):
421 subrel.write("%s: yes\n" % (key))
422
423 subrel.write("Component: %s%s\n" % (suite_suffix, comp))
424
425
426
427 arch = os.path.split(dirpath)[-1]
428 if arch.startswith("binary-"):
429 arch = arch[7:]
430
431 subrel.write("Architecture: %s\n" % (arch))
432 subrel.close()
433
434 os.rename(subfile + ".new", subfile)
435
436
437
438 oldcwd = os.getcwd()
439
440 os.chdir(self.suite_path())
441
442 hashes = [x for x in RELEASE_HASHES if x.db_name in suite.checksums]
443
444 fileinfo = {}
445 fileinfo_byhash = {}
446
447 uncompnotseen = {}
448
449 for dirpath, dirnames, filenames in os.walk(
450 ".", followlinks=True, topdown=True
451 ):
452
453
454
455
456 if cnf_suite_suffix:
457 path = os.path.join(dirpath, cnf_suite_suffix)
458 try:
459 target = os.readlink(path)
460 if target == ".":
461 dirnames.remove(cnf_suite_suffix)
462 except (OSError, ValueError):
463 pass
464 for entry in filenames:
465 if dirpath == "." and entry in ["Release", "Release.gpg", "InRelease"]:
466 continue
467
468 filename = os.path.join(dirpath.lstrip("./"), entry)
469
470 if re_includeinrelease_byhash.match(entry):
471 fileinfo[filename] = fileinfo_byhash[filename] = {}
472 elif re_includeinrelease_plain.match(entry):
473 fileinfo[filename] = {}
474
475 else:
476 continue
477
478 with open(filename, "rb") as fd:
479 contents = fd.read()
480
481
482
483
484 if entry.endswith(".gz") and filename[:-3] not in uncompnotseen:
485 uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename)
486 elif entry.endswith(".bz2") and filename[:-4] not in uncompnotseen:
487 uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename)
488 elif entry.endswith(".xz") and filename[:-3] not in uncompnotseen:
489 uncompnotseen[filename[:-3]] = (XzFile, filename)
490 elif entry.endswith(".zst") and filename[:-3] not in uncompnotseen:
491 uncompnotseen[filename[:-3]] = (ZstdFile, filename)
492
493 fileinfo[filename]["len"] = len(contents)
494
495 for hf in hashes:
496 fileinfo[filename][hf.release_field] = hf.func(contents)
497
498 for filename, comp in uncompnotseen.items():
499
500
501 if filename in fileinfo:
502 continue
503
504 fileinfo[filename] = {}
505
506
507 contents = comp[0](comp[1], "r").read()
508
509 fileinfo[filename]["len"] = len(contents)
510
511 for hf in hashes:
512 fileinfo[filename][hf.release_field] = hf.func(contents)
513
514 for field in sorted(h.release_field for h in hashes):
515 out.write("%s:\n" % field)
516 for filename in sorted(fileinfo.keys()):
517 out.write(
518 " %s %8d %s\n"
519 % (fileinfo[filename][field], fileinfo[filename]["len"], filename)
520 )
521
522 out.close()
523 os.rename(outfile + ".new", outfile)
524
525 self._update_hashfile_table(session, fileinfo_byhash, hashes)
526 self._make_byhash_links(fileinfo_byhash, hashes)
527 self._make_byhash_base_symlink(fileinfo_byhash, hashes)
528
529 sign_release_dir(suite, os.path.dirname(outfile))
530
531 os.chdir(oldcwd)
532
533 return
534
535
537 global Logger
538
539 cnf = Config()
540
541 for i in ["Help", "Suite", "Force", "Quiet"]:
542 key = "Generate-Releases::Options::%s" % i
543 if key not in cnf:
544 cnf[key] = ""
545
546 Arguments = [
547 ("h", "help", "Generate-Releases::Options::Help"),
548 ("a", "archive", "Generate-Releases::Options::Archive", "HasArg"),
549 ("s", "suite", "Generate-Releases::Options::Suite"),
550 ("f", "force", "Generate-Releases::Options::Force"),
551 ("q", "quiet", "Generate-Releases::Options::Quiet"),
552 ("o", "option", "", "ArbItem"),
553 ]
554
555 suite_names = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
556 Options = cnf.subtree("Generate-Releases::Options")
557
558 if Options["Help"]:
559 usage()
560
561 Logger = daklog.Logger("generate-releases")
562 pool = DakProcessPool()
563
564 session = DBConn().session()
565
566 if Options["Suite"]:
567 suites = []
568 for s in suite_names:
569 suite = get_suite(s.lower(), session)
570 if suite:
571 suites.append(suite)
572 else:
573 print("cannot find suite %s" % s)
574 Logger.log(["cannot find suite %s" % s])
575 else:
576 query = session.query(Suite).filter(Suite.untouchable == False)
577 if "Archive" in Options:
578 archive_names = utils.split_args(Options["Archive"])
579 query = query.join(Suite.archive).filter(
580 Archive.archive_name.in_(archive_names)
581 )
582 suites = query.all()
583
584 for s in suites:
585
586 if s.untouchable and not Options["Force"]:
587 print("Skipping %s (untouchable)" % s.suite_name)
588 continue
589
590 if not Options["Quiet"]:
591 print("Processing %s" % s.suite_name)
592 Logger.log(["Processing release file for Suite: %s" % (s.suite_name)])
593 pool.apply_async(generate_helper, (s.suite_id,))
594
595
596 pool.close()
597 pool.join()
598
599 retcode = pool.overall_status()
600
601 if retcode > 0:
602
603 Logger.log(
604 [
605 "Release file generation broken: %s"
606 % (",".join([str(x[1]) for x in pool.results]))
607 ]
608 )
609
610 Logger.close()
611
612 sys.exit(retcode)
613
614
616 """
617 This function is called in a new subprocess.
618 """
619 session = DBConn().session()
620 suite = Suite.get(suite_id, session)
621
622
623 rw = ReleaseWriter(suite)
624 rw.generate_release_files()
625
626 return (PROC_STATUS_SUCCESS, "Release file written for %s" % suite.suite_name)
627
628
629
630
631
632 if __name__ == "__main__":
633 main()
634