1# Copyright (C) 2015, Ansgar Burchardt <ansgar@debian.org>
2#
3# This program is free software; you can redistribute it and/or modify
4# it under the terms of the GNU General Public License as published by
5# the Free Software Foundation; either version 2 of the License, or
6# (at your option) any later version.
7#
8# This program is distributed in the hope that it will be useful,
9# but WITHOUT ANY WARRANTY; without even the implied warranty of
10# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11# GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License along
14# with this program; if not, write to the Free Software Foundation, Inc.,
15# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
17import os
18import shutil
19import tempfile
20import urllib.error
21import urllib.parse
22import urllib.request
23from typing import Optional
25import apt_pkg
26from sqlalchemy.orm import object_session
28import daklib.compress
29import daklib.config
30import daklib.dakapt
31import daklib.dbconn
32import daklib.gpg
33import daklib.regexes
34import daklib.upload
35from daklib.dbconn import Archive, DBBinary, DBSource, PoolFile
37# Hmm, maybe use APT directly for all of this?
39_release_hashes_fields = ("MD5Sum", "SHA1", "SHA256")
42class Release:
43 def __init__(self, base, suite_name, data):
44 self._base = base
45 self._suite_name = suite_name
46 self._dict = apt_pkg.TagSection(data)
47 self._hashes = daklib.upload.parse_file_list(
48 self._dict, False, daklib.regexes.re_file_safe_slash, _release_hashes_fields
49 )
51 def architectures(self):
52 return self._dict["Architectures"].split()
54 def components(self):
55 return self._dict["Components"].split()
57 def packages(self, component, architecture):
58 fn = "{0}/binary-{1}/Packages".format(component, architecture)
59 tmp = obtain_release_file(self, fn)
60 return apt_pkg.TagFile(tmp.fh())
62 def sources(self, component):
63 fn = "{0}/source/Sources".format(component)
64 tmp = obtain_release_file(self, fn)
65 return apt_pkg.TagFile(tmp.fh())
67 def suite(self):
68 return self._dict["Suite"]
70 def codename(self):
71 return self._dict["Codename"]
73 # TODO: Handle Date/Valid-Until to make sure we import
74 # a newer version than before
77class File:
78 def __init__(self):
79 config = daklib.config.Config()
80 self._tmp = tempfile.NamedTemporaryFile(dir=config["Dir::TempPath"])
82 def fh(self):
83 self._tmp.seek(0)
84 return self._tmp
86 def hashes(self):
87 return daklib.dakapt.DakHashes(self.fh())
90def obtain_file(base, path) -> File:
91 """Obtain a file 'path' located below 'base'
93 .. note::
95 return type can still change
96 """
97 fn = "{0}/{1}".format(base, path)
98 tmp = File()
99 if fn.startswith("http://"):
100 fh = urllib.request.urlopen(fn, timeout=300)
101 shutil.copyfileobj(fh, tmp._tmp)
102 fh.close()
103 else:
104 with open(fn, "rb") as fh:
105 shutil.copyfileobj(fh, tmp._tmp)
106 return tmp
109def obtain_release(base, suite_name, keyring, fingerprint=None) -> Release:
110 """Obtain release information"""
111 tmp = obtain_file(base, "dists/{0}/InRelease".format(suite_name))
112 data = tmp.fh().read()
113 f = daklib.gpg.SignedFile(data, [keyring])
114 r = Release(base, suite_name, f.contents)
115 if r.suite() != suite_name and r.codename() != suite_name:
116 raise Exception(
117 "Suite {0} doesn't match suite or codename from Release file.".format(
118 suite_name
119 )
120 )
121 return r
124_compressions = (".zst", ".xz", ".gz", ".bz2")
127def obtain_release_file(release, filename) -> File:
128 """Obtain file referenced from Release
130 A compressed version is automatically selected and decompressed if it exists.
131 """
132 if filename not in release._hashes:
133 raise ValueError("File {0} not referenced in Release".format(filename))
135 compressed = False
136 for ext in _compressions:
137 compressed_file = filename + ext
138 if compressed_file in release._hashes:
139 compressed = True
140 filename = compressed_file
141 break
143 # Obtain file and check hashes
144 tmp = obtain_file(
145 release._base, "dists/{0}/{1}".format(release._suite_name, filename)
146 )
147 hashedfile = release._hashes[filename]
148 hashedfile.check_fh(tmp.fh())
150 if compressed:
151 tmp2 = File()
152 daklib.compress.decompress(tmp.fh(), tmp2.fh(), filename)
153 tmp = tmp2
155 return tmp
158def import_source_to_archive(base, entry, transaction, archive, component) -> DBSource:
159 """Import source package described by 'entry' into the given 'archive' and 'component'
161 'entry' needs to be a dict-like object with at least the following
162 keys as used in a Sources index: Directory, Files, Checksums-Sha1,
163 Checksums-Sha256
164 """
165 # Obtain and verify files
166 if not daklib.regexes.re_file_safe_slash.match(entry["Directory"]):
167 raise Exception("Unsafe path in Directory field")
168 hashed_files = daklib.upload.parse_file_list(entry, False)
169 files = []
170 for f in hashed_files.values():
171 path = os.path.join(entry["Directory"], f.filename)
172 tmp = obtain_file(base, path)
173 f.check_fh(tmp.fh())
174 files.append(tmp)
175 directory, f.input_filename = os.path.split(tmp.fh().name)
177 # Inject files into archive
178 source = daklib.upload.Source(
179 directory, list(hashed_files.values()), [], require_signature=False
180 )
181 # TODO: ugly hack!
182 for f in hashed_files.keys():
183 if f.endswith(".dsc"):
184 continue
185 source.files[f].input_filename = hashed_files[f].input_filename
187 # TODO: allow changed_by to be NULL
188 changed_by = source.dsc["Maintainer"]
189 db_changed_by = daklib.dbconn.get_or_set_maintainer(changed_by, transaction.session)
190 db_source = transaction.install_source_to_archive(
191 directory, source, archive, component, db_changed_by
192 )
194 return db_source
197def import_package_to_suite(base, entry, transaction, suite, component) -> DBBinary:
198 """Import binary package described by 'entry' into the given 'suite' and 'component'
200 'entry' needs to be a dict-like object with at least the following
201 keys as used in a Packages index: Filename, Size, MD5sum, SHA1,
202 SHA256
203 """
204 # Obtain and verify file
205 filename = entry["Filename"]
206 tmp = obtain_file(base, filename)
207 directory, fn = os.path.split(tmp.fh().name)
208 hashedfile = daklib.upload.HashedFile(
209 os.path.basename(filename),
210 int(entry["Size"]),
211 entry["MD5sum"],
212 entry["SHA1"],
213 entry["SHA256"],
214 input_filename=fn,
215 )
216 hashedfile.check_fh(tmp.fh())
218 # Inject file into archive
219 binary = daklib.upload.Binary(directory, hashedfile)
220 db_binary = transaction.install_binary(directory, binary, suite, component)
221 transaction.flush()
223 return db_binary
226def import_source_to_suite(base, entry, transaction, suite, component):
227 """Import source package described by 'entry' into the given 'suite' and 'component'
229 'entry' needs to be a dict-like object with at least the following
230 keys as used in a Sources index: Directory, Files, Checksums-Sha1,
231 Checksums-Sha256
232 """
233 source = import_source_to_archive(
234 base, entry, transaction, suite.archive, component
235 )
236 source.suites.append(suite)
237 transaction.flush()
240def source_in_archive(
241 source: str,
242 version: str,
243 archive: Archive,
244 component: Optional[daklib.dbconn.Component] = None,
245) -> bool:
246 """Check that source package 'source' with version 'version' exists in 'archive',
247 with an optional check for the given component 'component'.
249 .. note::
251 This should probably be moved somewhere else
252 """
253 session = object_session(archive)
254 query = (
255 session.query(DBSource)
256 .filter_by(source=source, version=version)
257 .join(DBSource.poolfile)
258 .join(PoolFile.archives)
259 .filter_by(archive=archive)
260 )
261 if component is not None:
262 query = query.filter_by(component=component)
263 return session.query(query.exists()).scalar()