1
2
3 """
4 Create all the Release files
5
6 @contact: Debian FTPMaster <ftpmaster@debian.org>
7 @copyright: 2011 Joerg Jaspert <joerg@debian.org>
8 @copyright: 2011 Mark Hymers <mhy@debian.org>
9 @license: GNU General Public License version 2 or later
10
11 """
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33 import sys
34 import os
35 import os.path
36 import time
37 import gzip
38 import bz2
39 import errno
40 import apt_pkg
41 import subprocess
42 from sqlalchemy.orm import object_session
43
44 import daklib.gpg
45 from daklib import utils, daklog
46 from daklib.regexes import re_gensubrelease, re_includeinrelease_byhash, re_includeinrelease_plain
47 from daklib.dbconn import *
48 from daklib.config import Config
49 from daklib.dakmultiprocessing import DakProcessPool, PROC_STATUS_SUCCESS
50
51
52 Logger = None
53
54
55
56
58 """ Usage information"""
59
60 print("""Usage: dak generate-releases [OPTIONS]
61 Generate the Release files
62
63 -a, --archive=ARCHIVE process suites in ARCHIVE
64 -s, --suite=SUITE(s) process this suite
65 Default: All suites not marked 'untouchable'
66 -f, --force Allow processing of untouchable suites
67 CAREFUL: Only to be used at (point) release time!
68 -h, --help show this help and exit
69 -q, --quiet Don't output progress
70
71 SUITE can be a space separated list, e.g.
72 --suite=unstable testing
73 """)
74 sys.exit(exit_code)
75
76
77
78
80 cnf = Config()
81
82 if 'Dinstall::SigningKeyring' in cnf or 'Dinstall::SigningHomedir' in cnf:
83 args = {
84 'keyids': suite.signingkeys or [],
85 'pubring': cnf.get('Dinstall::SigningPubKeyring') or None,
86 'secring': cnf.get('Dinstall::SigningKeyring') or None,
87 'homedir': cnf.get('Dinstall::SigningHomedir') or None,
88 'passphrase_file': cnf.get('Dinstall::SigningPassphraseFile') or None,
89 }
90
91 relname = os.path.join(dirname, 'Release')
92
93 dest = os.path.join(dirname, 'Release.gpg')
94 if os.path.exists(dest):
95 os.unlink(dest)
96
97 inlinedest = os.path.join(dirname, 'InRelease')
98 if os.path.exists(inlinedest):
99 os.unlink(inlinedest)
100
101 with open(relname, 'r') as stdin:
102 with open(dest, 'w') as stdout:
103 daklib.gpg.sign(stdin, stdout, inline=False, **args)
104 stdin.seek(0)
105 with open(inlinedest, 'w') as stdout:
106 daklib.gpg.sign(stdin, stdout, inline=True, **args)
107
108
110 - def __init__(self, filename, mode='r'):
111 self.filename = filename
112
114 with open(self.filename, 'rb') as stdin:
115 return subprocess.check_output(['xz', '-d'], stdin=stdin)
116
117
119 - def __init__(self, filename, mode='r'):
120 self.filename = filename
121
123 with open(self.filename, 'rb') as stdin:
124 return subprocess.check_output(['zstd', '--decompress'], stdin=stdin)
125
126
128 - def __init__(self, release_field, func, db_name):
129 self.release_field = release_field
130 self.func = func
131 self.db_name = db_name
132
133
134 RELEASE_HASHES = [
135 HashFunc('MD5Sum', apt_pkg.md5sum, 'md5sum'),
136 HashFunc('SHA1', apt_pkg.sha1sum, 'sha1'),
137 HashFunc('SHA256', apt_pkg.sha256sum, 'sha256'),
138 ]
139
140
144
146 """
147 Absolute path to the suite-specific files.
148 """
149 suite_suffix = utils.suite_suffix(self.suite.suite_name)
150
151 return os.path.join(self.suite.archive.path, 'dists',
152 self.suite.suite_name, suite_suffix)
153
155 """
156 Absolute path where Release files are physically stored.
157 This should be a path that sorts after the dists/ directory.
158 """
159 cnf = Config()
160 suite_suffix = utils.suite_suffix(self.suite.suite_name)
161
162 return os.path.join(self.suite.archive.path, 'zzz-dists',
163 self.suite.codename or self.suite.suite_name, suite_suffix)
164
166 """
167 Create symlinks for Release files.
168 This creates the symlinks for Release files in the `suite_path`
169 to the actual files in `suite_release_path`.
170 """
171 relpath = os.path.relpath(self.suite_release_path(), self.suite_path())
172 for f in ("Release", "Release.gpg", "InRelease"):
173 source = os.path.join(relpath, f)
174 dest = os.path.join(self.suite_path(), f)
175 if os.path.lexists(dest):
176 if not os.path.islink(dest):
177 os.unlink(dest)
178 elif os.readlink(dest) == source:
179 continue
180 else:
181 os.unlink(dest)
182 os.symlink(source, dest)
183
185 for path in (self.suite_path(), self.suite_release_path()):
186 try:
187 os.makedirs(path)
188 except OSError as e:
189 if e.errno != errno.EEXIST:
190 raise
191
193
194
195 query = """
196 UPDATE hashfile SET unreferenced = CURRENT_TIMESTAMP
197 WHERE suite_id = :id AND unreferenced IS NULL"""
198 session.execute(query, {'id': self.suite.suite_id})
199
200 query = "SELECT path FROM hashfile WHERE suite_id = :id"
201 q = session.execute(query, {'id': self.suite.suite_id})
202 known_hashfiles = set(row[0] for row in q)
203 updated = set()
204 new = set()
205
206
207 for filename in fileinfo:
208 if not os.path.lexists(filename):
209
210 continue
211 byhashdir = os.path.join(os.path.dirname(filename), 'by-hash')
212 for h in hashes:
213 field = h.release_field
214 hashfile = os.path.join(byhashdir, field, fileinfo[filename][field])
215 if hashfile in known_hashfiles:
216 updated.add(hashfile)
217 else:
218 new.add(hashfile)
219
220 if updated:
221 session.execute("""
222 UPDATE hashfile SET unreferenced = NULL
223 WHERE path = ANY(:p) AND suite_id = :id""",
224 {'p': list(updated), 'id': self.suite.suite_id})
225 if new:
226 session.execute("""
227 INSERT INTO hashfile (path, suite_id)
228 VALUES (:p, :id)""",
229 [{'p': hashfile, 'id': self.suite.suite_id} for hashfile in new])
230
231 session.commit()
232
234
235 for filename in fileinfo:
236 if not os.path.lexists(filename):
237
238 continue
239
240 for h in hashes:
241 field = h.release_field
242 hashfile = os.path.join(os.path.dirname(filename), 'by-hash', field, fileinfo[filename][field])
243 try:
244 os.makedirs(os.path.dirname(hashfile))
245 except OSError as exc:
246 if exc.errno != errno.EEXIST:
247 raise
248 try:
249 os.link(filename, hashfile)
250 except OSError as exc:
251 if exc.errno != errno.EEXIST:
252 raise
253
255
256 for filename in fileinfo:
257 if not os.path.lexists(filename):
258
259 continue
260
261 besthash = hashes[-1]
262 field = besthash.release_field
263 hashfilebase = os.path.join('by-hash', field, fileinfo[filename][field])
264 hashfile = os.path.join(os.path.dirname(filename), hashfilebase)
265
266 assert os.path.exists(hashfile), 'by-hash file {} is missing'.format(hashfile)
267
268 os.unlink(filename)
269 os.symlink(hashfilebase, filename)
270
272 """
273 Generate Release files for the given suite
274 """
275
276 suite = self.suite
277 session = object_session(suite)
278
279
280
281 attribs = (('Origin', 'origin'),
282 ('Label', 'label'),
283 ('Suite', 'release_suite_output'),
284 ('Version', 'version'),
285 ('Codename', 'codename'),
286 ('Changelogs', 'changelog_url'),
287 )
288
289
290 subattribs = (('Archive', 'suite_name'),
291 ('Origin', 'origin'),
292 ('Label', 'label'),
293 ('Version', 'version'))
294
295
296 boolattrs = (('NotAutomatic', 'notautomatic'),
297 ('ButAutomaticUpgrades', 'butautomaticupgrades'),
298 ('Acquire-By-Hash', 'byhash'),
299 )
300
301 cnf = Config()
302 cnf_suite_suffix = cnf.get("Dinstall::SuiteSuffix", "").rstrip("/")
303
304 suite_suffix = utils.suite_suffix(suite.suite_name)
305
306 self.create_output_directories()
307 self.create_release_symlinks()
308
309 outfile = os.path.join(self.suite_release_path(), "Release")
310 out = open(outfile + ".new", "w")
311
312 for key, dbfield in attribs:
313
314
315 if key in ("Version", "Changelogs") and getattr(suite, dbfield) is None:
316 continue
317
318 out.write("%s: %s\n" % (key, getattr(suite, dbfield)))
319
320 out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))
321
322 if suite.validtime:
323 validtime = float(suite.validtime)
324 out.write("Valid-Until: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time() + validtime))))
325
326 for key, dbfield in boolattrs:
327 if getattr(suite, dbfield, False):
328 out.write("%s: yes\n" % (key))
329
330 skip_arch_all = True
331 if suite.separate_contents_architecture_all or suite.separate_packages_architecture_all:
332
333
334
335
336
337
338
339
340
341 assert suite.separate_contents_architecture_all
342 skip_arch_all = False
343
344 if not suite.separate_packages_architecture_all:
345 out.write("No-Support-for-Architecture-all: Packages\n")
346
347 architectures = get_suite_architectures(suite.suite_name, skipall=skip_arch_all, skipsrc=True, session=session)
348
349 out.write("Architectures: %s\n" % (" ".join(a.arch_string for a in architectures)))
350
351 components = [c.component_name for c in suite.components]
352
353 out.write("Components: %s\n" % (" ".join(components)))
354
355
356
357 if getattr(suite, 'description') is not None:
358 out.write("Description: %s\n" % suite.description)
359
360 for comp in components:
361 for dirpath, dirnames, filenames in os.walk(os.path.join(self.suite_path(), comp), topdown=True):
362 if not re_gensubrelease.match(dirpath):
363 continue
364
365 subfile = os.path.join(dirpath, "Release")
366 subrel = open(subfile + '.new', "w")
367
368 for key, dbfield in subattribs:
369 if getattr(suite, dbfield) is not None:
370 subrel.write("%s: %s\n" % (key, getattr(suite, dbfield)))
371
372 for key, dbfield in boolattrs:
373 if getattr(suite, dbfield, False):
374 subrel.write("%s: yes\n" % (key))
375
376 subrel.write("Component: %s%s\n" % (suite_suffix, comp))
377
378
379
380 arch = os.path.split(dirpath)[-1]
381 if arch.startswith('binary-'):
382 arch = arch[7:]
383
384 subrel.write("Architecture: %s\n" % (arch))
385 subrel.close()
386
387 os.rename(subfile + '.new', subfile)
388
389
390
391 oldcwd = os.getcwd()
392
393 os.chdir(self.suite_path())
394
395 hashes = [x for x in RELEASE_HASHES if x.db_name in suite.checksums]
396
397 fileinfo = {}
398 fileinfo_byhash = {}
399
400 uncompnotseen = {}
401
402 for dirpath, dirnames, filenames in os.walk(".", followlinks=True, topdown=True):
403
404
405
406
407 if cnf_suite_suffix:
408 path = os.path.join(dirpath, cnf_suite_suffix)
409 try:
410 target = os.readlink(path)
411 if target == ".":
412 dirnames.remove(cnf_suite_suffix)
413 except (OSError, ValueError):
414 pass
415 for entry in filenames:
416 if dirpath == '.' and entry in ["Release", "Release.gpg", "InRelease"]:
417 continue
418
419 filename = os.path.join(dirpath.lstrip('./'), entry)
420
421 if re_includeinrelease_byhash.match(entry):
422 fileinfo[filename] = fileinfo_byhash[filename] = {}
423 elif re_includeinrelease_plain.match(entry):
424 fileinfo[filename] = {}
425
426 else:
427 continue
428
429 with open(filename, 'rb') as fd:
430 contents = fd.read()
431
432
433
434
435 if entry.endswith(".gz") and filename[:-3] not in uncompnotseen:
436 uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename)
437 elif entry.endswith(".bz2") and filename[:-4] not in uncompnotseen:
438 uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename)
439 elif entry.endswith(".xz") and filename[:-3] not in uncompnotseen:
440 uncompnotseen[filename[:-3]] = (XzFile, filename)
441 elif entry.endswith(".zst") and filename[:-3] not in uncompnotseen:
442 uncompnotseen[filename[:-3]] = (ZstdFile, filename)
443
444 fileinfo[filename]['len'] = len(contents)
445
446 for hf in hashes:
447 fileinfo[filename][hf.release_field] = hf.func(contents)
448
449 for filename, comp in uncompnotseen.items():
450
451
452 if filename in fileinfo:
453 continue
454
455 fileinfo[filename] = {}
456
457
458 contents = comp[0](comp[1], 'r').read()
459
460 fileinfo[filename]['len'] = len(contents)
461
462 for hf in hashes:
463 fileinfo[filename][hf.release_field] = hf.func(contents)
464
465 for field in sorted(h.release_field for h in hashes):
466 out.write('%s:\n' % field)
467 for filename in sorted(fileinfo.keys()):
468 out.write(" %s %8d %s\n" % (fileinfo[filename][field], fileinfo[filename]['len'], filename))
469
470 out.close()
471 os.rename(outfile + '.new', outfile)
472
473 self._update_hashfile_table(session, fileinfo_byhash, hashes)
474 self._make_byhash_links(fileinfo_byhash, hashes)
475 self._make_byhash_base_symlink(fileinfo_byhash, hashes)
476
477 sign_release_dir(suite, os.path.dirname(outfile))
478
479 os.chdir(oldcwd)
480
481 return
482
483
485 global Logger
486
487 cnf = Config()
488
489 for i in ["Help", "Suite", "Force", "Quiet"]:
490 key = "Generate-Releases::Options::%s" % i
491 if key not in cnf:
492 cnf[key] = ""
493
494 Arguments = [('h', "help", "Generate-Releases::Options::Help"),
495 ('a', 'archive', 'Generate-Releases::Options::Archive', 'HasArg'),
496 ('s', "suite", "Generate-Releases::Options::Suite"),
497 ('f', "force", "Generate-Releases::Options::Force"),
498 ('q', "quiet", "Generate-Releases::Options::Quiet"),
499 ('o', 'option', '', 'ArbItem')]
500
501 suite_names = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
502 Options = cnf.subtree("Generate-Releases::Options")
503
504 if Options["Help"]:
505 usage()
506
507 Logger = daklog.Logger('generate-releases')
508 pool = DakProcessPool()
509
510 session = DBConn().session()
511
512 if Options["Suite"]:
513 suites = []
514 for s in suite_names:
515 suite = get_suite(s.lower(), session)
516 if suite:
517 suites.append(suite)
518 else:
519 print("cannot find suite %s" % s)
520 Logger.log(['cannot find suite %s' % s])
521 else:
522 query = session.query(Suite).filter(Suite.untouchable == False)
523 if 'Archive' in Options:
524 archive_names = utils.split_args(Options['Archive'])
525 query = query.join(Suite.archive).filter(Archive.archive_name.in_(archive_names))
526 suites = query.all()
527
528 for s in suites:
529
530 if s.untouchable and not Options["Force"]:
531 print("Skipping %s (untouchable)" % s.suite_name)
532 continue
533
534 if not Options["Quiet"]:
535 print("Processing %s" % s.suite_name)
536 Logger.log(['Processing release file for Suite: %s' % (s.suite_name)])
537 pool.apply_async(generate_helper, (s.suite_id, ))
538
539
540 pool.close()
541 pool.join()
542
543 retcode = pool.overall_status()
544
545 if retcode > 0:
546
547 Logger.log(['Release file generation broken: %s' % (','.join([str(x[1]) for x in pool.results]))])
548
549 Logger.close()
550
551 sys.exit(retcode)
552
553
555 '''
556 This function is called in a new subprocess.
557 '''
558 session = DBConn().session()
559 suite = Suite.get(suite_id, session)
560
561
562 rw = ReleaseWriter(suite)
563 rw.generate_release_files()
564
565 return (PROC_STATUS_SUCCESS, 'Release file written for %s' % suite.suite_name)
566
567
568
569
570 if __name__ == '__main__':
571 main()
572