1# Copyright (C) 2015, Ansgar Burchardt <ansgar@debian.org>
2#
3# This program is free software; you can redistribute it and/or modify
4# it under the terms of the GNU General Public License as published by
5# the Free Software Foundation; either version 2 of the License, or
6# (at your option) any later version.
7#
8# This program is distributed in the hope that it will be useful,
9# but WITHOUT ANY WARRANTY; without even the implied warranty of
10# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11# GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program; if not, write to the Free Software Foundation, Inc.,
15# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17import daklib.compress
18import daklib.config
19import daklib.dakapt
20import daklib.dbconn
21import daklib.gpg
22import daklib.upload
23import daklib.regexes
25import apt_pkg
26import os
27import shutil
28import tempfile
29import urllib.request
30import urllib.error
31import urllib.parse
33from daklib.dbconn import Archive, Component, DBBinary, DBSource, PoolFile
34from sqlalchemy.orm import object_session
35from typing import Optional
37# Hmm, maybe use APT directly for all of this?
39_release_hashes_fields = ('MD5Sum', 'SHA1', 'SHA256')
42class Release:
43 def __init__(self, base, suite_name, data):
44 self._base = base
45 self._suite_name = suite_name
46 self._dict = apt_pkg.TagSection(data)
47 self._hashes = daklib.upload.parse_file_list(self._dict, False, daklib.regexes.re_file_safe_slash, _release_hashes_fields)
49 def architectures(self):
50 return self._dict['Architectures'].split()
52 def components(self):
53 return self._dict['Components'].split()
55 def packages(self, component, architecture):
56 fn = '{0}/binary-{1}/Packages'.format(component, architecture)
57 tmp = obtain_release_file(self, fn)
58 return apt_pkg.TagFile(tmp.fh())
60 def sources(self, component):
61 fn = '{0}/source/Sources'.format(component)
62 tmp = obtain_release_file(self, fn)
63 return apt_pkg.TagFile(tmp.fh())
65 def suite(self):
66 return self._dict['Suite']
68 def codename(self):
69 return self._dict['Codename']
70 # TODO: Handle Date/Valid-Until to make sure we import
71 # a newer version than before
74class File:
75 def __init__(self):
76 config = daklib.config.Config()
77 self._tmp = tempfile.NamedTemporaryFile(dir=config['Dir::TempPath'])
79 def fh(self):
80 self._tmp.seek(0)
81 return self._tmp
83 def hashes(self):
84 return daklib.dakapt.DakHashes(self.fh())
87def obtain_file(base, path) -> File:
88 """Obtain a file 'path' located below 'base'
90 .. note::
92 return type can still change
93 """
94 fn = '{0}/{1}'.format(base, path)
95 tmp = File()
96 if fn.startswith('http://'):
97 fh = urllib.request.urlopen(fn, timeout=300)
98 shutil.copyfileobj(fh, tmp._tmp)
99 fh.close()
100 else:
101 with open(fn, 'rb') as fh:
102 shutil.copyfileobj(fh, tmp._tmp)
103 return tmp
106def obtain_release(base, suite_name, keyring, fingerprint=None) -> Release:
107 """Obtain release information"""
108 tmp = obtain_file(base, 'dists/{0}/InRelease'.format(suite_name))
109 data = tmp.fh().read()
110 f = daklib.gpg.SignedFile(data, [keyring])
111 r = Release(base, suite_name, f.contents)
112 if r.suite() != suite_name and r.codename() != suite_name:
113 raise Exception("Suite {0} doesn't match suite or codename from Release file.".format(suite_name))
114 return r
117_compressions = ('.zst', '.xz', '.gz', '.bz2')
120def obtain_release_file(release, filename) -> File:
121 """Obtain file referenced from Release
123 A compressed version is automatically selected and decompressed if it exists.
124 """
125 if filename not in release._hashes:
126 raise ValueError("File {0} not referenced in Release".format(filename))
128 compressed = False
129 for ext in _compressions:
130 compressed_file = filename + ext
131 if compressed_file in release._hashes:
132 compressed = True
133 filename = compressed_file
134 break
136 # Obtain file and check hashes
137 tmp = obtain_file(release._base, 'dists/{0}/{1}'.format(release._suite_name, filename))
138 hashedfile = release._hashes[filename]
139 hashedfile.check_fh(tmp.fh())
141 if compressed:
142 tmp2 = File()
143 daklib.compress.decompress(tmp.fh(), tmp2.fh(), filename)
144 tmp = tmp2
146 return tmp
149def import_source_to_archive(base, entry, transaction, archive, component) -> DBSource:
150 """Import source package described by 'entry' into the given 'archive' and 'component'
152 'entry' needs to be a dict-like object with at least the following
153 keys as used in a Sources index: Directory, Files, Checksums-Sha1,
154 Checksums-Sha256
155 """
156 # Obtain and verify files
157 if not daklib.regexes.re_file_safe_slash.match(entry['Directory']):
158 raise Exception("Unsafe path in Directory field")
159 hashed_files = daklib.upload.parse_file_list(entry, False)
160 files = []
161 for f in hashed_files.values():
162 path = os.path.join(entry['Directory'], f.filename)
163 tmp = obtain_file(base, path)
164 f.check_fh(tmp.fh())
165 files.append(tmp)
166 directory, f.input_filename = os.path.split(tmp.fh().name)
168 # Inject files into archive
169 source = daklib.upload.Source(directory, list(hashed_files.values()), [], require_signature=False)
170 # TODO: ugly hack!
171 for f in hashed_files.keys():
172 if f.endswith('.dsc'):
173 continue
174 source.files[f].input_filename = hashed_files[f].input_filename
176 # TODO: allow changed_by to be NULL
177 changed_by = source.dsc['Maintainer']
178 db_changed_by = daklib.dbconn.get_or_set_maintainer(changed_by, transaction.session)
179 db_source = transaction.install_source_to_archive(directory, source, archive, component, db_changed_by)
181 return db_source
184def import_package_to_suite(base, entry, transaction, suite, component) -> DBBinary:
185 """Import binary package described by 'entry' into the given 'suite' and 'component'
187 'entry' needs to be a dict-like object with at least the following
188 keys as used in a Packages index: Filename, Size, MD5sum, SHA1,
189 SHA256
190 """
191 # Obtain and verify file
192 filename = entry['Filename']
193 tmp = obtain_file(base, filename)
194 directory, fn = os.path.split(tmp.fh().name)
195 hashedfile = daklib.upload.HashedFile(os.path.basename(filename), int(entry['Size']), entry['MD5sum'], entry['SHA1'], entry['SHA256'], input_filename=fn)
196 hashedfile.check_fh(tmp.fh())
198 # Inject file into archive
199 binary = daklib.upload.Binary(directory, hashedfile)
200 db_binary = transaction.install_binary(directory, binary, suite, component)
201 transaction.flush()
203 return db_binary
206def import_source_to_suite(base, entry, transaction, suite, component):
207 """Import source package described by 'entry' into the given 'suite' and 'component'
209 'entry' needs to be a dict-like object with at least the following
210 keys as used in a Sources index: Directory, Files, Checksums-Sha1,
211 Checksums-Sha256
212 """
213 source = import_source_to_archive(base, entry, transaction, suite.archive, component)
214 source.suites.append(suite)
215 transaction.flush()
218def source_in_archive(source: str, version: str, archive: Archive, component: Optional[daklib.dbconn.Component] = None) -> bool:
219 """Check that source package 'source' with version 'version' exists in 'archive',
220 with an optional check for the given component 'component'.
222 .. note::
224 This should probably be moved somewhere else
225 """
226 session = object_session(archive)
227 query = session.query(DBSource).filter_by(source=source, version=version) \
228 .join(DBSource.poolfile).join(PoolFile.archives).filter_by(archive=archive)
229 if component is not None:
230 query = query.filter_by(component=component)
231 return session.query(query.exists()).scalar()