2 # -*- coding: utf-8 -*-
6 # Parts of code from pm2ml Copyright (C) 2012-2013 Xyne
7 # Copyright © 2013-2018 Antergos
9 # This file is part of Cnchi.
11 # Cnchi is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 3 of the License, or
14 # (at your option) any later version.
16 # Cnchi is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # The following additional terms are in effect as per Section 7 of the license:
23 # The preservation of all legal notices and author attributions in
24 # the material or in the Appropriate Legal Notices displayed
25 # by works containing it is required.
27 # You should have received a copy of the GNU General Public License
28 # along with Cnchi; If not, see <http://www.gnu.org/licenses/>.
31 """ Operations with metalinks """
41 from collections import deque
43 from xml.dom.minidom import getDOMImplementation
44 import xml.etree.cElementTree as elementTree
51 def get_info(metalink):
52 """ Reads metalink xml info and returns it """
54 # tag = "{urn:ietf:params:xml:ns:metalink}"
56 temp_file = tempfile.NamedTemporaryFile(delete=False)
57 temp_file.write(str(metalink).encode('UTF-8'))
63 for event, elem in elementTree.iterparse(temp_file.name, events=('start', 'end')):
65 tag = elem.tag.split('}')[1]
67 element['filename'] = elem.attrib['name']
69 hash_type = elem.attrib['type']
71 element['hash'][hash_type] = elem.text
73 element['hash'] = {hash_type: elem.text}
76 element['urls'].append(elem.text)
78 element['urls'] = [elem.text]
80 element[tag] = elem.text
81 if event == 'end' and elem.tag.endswith('file'):
82 # Limit to MAX_URLS for each file
83 if len(element['urls']) > MAX_URLS:
84 element['urls'] = element['urls'][:MAX_URLS]
85 key = element['identity']
86 metalink_info[key] = element.copy()
90 if os.path.exists(temp_file.name):
91 os.remove(temp_file.name)
96 def create(alpm, package_name, pacman_conf_file):
97 """ Creates a metalink to download package_name and its dependencies """
99 options = ["--conf", pacman_conf_file, "--noconfirm", "--all-deps"]
101 if package_name == "databases":
102 options.append("--refresh")
104 options.append(package_name)
106 download_queue, not_found, missing_deps = build_download_queue(
110 not_found = sorted(not_found)
111 msg = "Can't find these packages: " + ' '.join(not_found)
116 missing_deps = sorted(missing_deps)
117 msg = "Can't resolve these dependencies: " + ' '.join(missing_deps)
122 metalink = download_queue_to_metalink(download_queue)
125 logging.error("Unable to create download queue for package %s", package_name)
128 # From here comes modified code from pm2ml
129 # pm2ml is Copyright (C) 2012-2013 Xyne
130 # More info: http://xyne.archlinux.ca/projects/pm2ml
132 def download_queue_to_metalink(download_queue):
133 """ Converts a download_queue object to a metalink """
134 metalink = Metalink()
136 for database, sigs in download_queue.dbs:
137 metalink.add_db(database, sigs)
139 for pkg, urls, sigs in download_queue.sync_pkgs:
140 metalink.add_sync_pkg(pkg, urls, sigs)
146 """ Metalink class """
149 self.doc = getDOMImplementation().createDocument(
150 None, "metalink", None)
151 self.doc.documentElement.setAttribute(
152 'xmlns', "urn:ietf:params:xml:ns:metalink")
153 self.files = self.doc.documentElement
159 """ Get a string representation of a metalink """
161 r'(?<=>)\n\s*([^\s<].*?)\s*\n\s*',
163 self.doc.toprettyxml(indent=' ')
166 def add_urls(self, element, urls):
167 """Add URL elements to the given element."""
169 url_tag = self.doc.createElement('url')
170 element.appendChild(url_tag)
171 url_val = self.doc.createTextNode(url)
172 url_tag.appendChild(url_val)
174 def add_sync_pkg(self, pkg, urls, sigs=False):
175 """Add a sync db package."""
176 file_ = self.doc.createElement("file")
177 file_.setAttribute("name", pkg.filename)
178 self.files.appendChild(file_)
179 for tag, db_attr, attrs in (
180 ('identity', 'name', ()),
181 ('size', 'size', ()),
182 ('version', 'version', ()),
183 ('description', 'desc', ()),
184 ('hash', 'sha256sum', (('type', 'sha256'),)),
185 ('hash', 'md5sum', (('type', 'md5'),))):
186 tag = self.doc.createElement(tag)
187 file_.appendChild(tag)
188 val = self.doc.createTextNode(str(getattr(pkg, db_attr)))
190 for key, val in attrs:
191 tag.setAttribute(key, val)
193 self.add_urls(file_, urls)
195 self.add_file(pkg.filename + '.sig', (u + '.sig' for u in urls))
197 def add_file(self, name, urls):
198 """Add a signature file."""
199 file_ = self.doc.createElement("file")
200 file_.setAttribute("name", name)
201 self.files.appendChild(file_)
202 self.add_urls(file_, urls)
204 def add_db(self, database, sigs=False):
206 file_ = self.doc.createElement("file")
207 name = database.name + '.db'
208 file_.setAttribute("name", name)
209 self.files.appendChild(file_)
210 urls = list(os.path.join(url, database.name + '.db')
211 for url in database.servers)
212 self.add_urls(file_, urls)
214 self.add_file(name + '.sig', (u + '.sig' for u in urls))
218 """ Represents a set of packages """
220 def __init__(self, pkgs=None):
221 """ Init our internal self.pkgs dict with all given packages in pkgs """
226 self.pkgs[pkg.name] = pkg
229 return 'PkgSet({0})'.format(repr(self.pkgs))
232 """ Adds package info to the set """
233 self.pkgs[pkg.name] = pkg
235 def __and__(self, other):
236 new = PkgSet(set(self.pkgs.values()) & set(other.pkgs.values()))
239 def __iand__(self, other):
240 self.pkgs = self.__and__(other).pkgs
243 def __or__(self, other):
244 copy = PkgSet(list(self.pkgs.values()))
245 return copy.__ior__(other)
247 def __ior__(self, other):
248 self.pkgs.update(other.pkgs)
251 def __contains__(self, pkg):
252 return pkg.name in self.pkgs
255 for value in self.pkgs.values():
259 return len(self.pkgs)
262 class DownloadQueue():
263 """ Represents a download queue """
267 self.sync_pkgs = list()
270 return bool(self.dbs or self.sync_pkgs)
272 def __nonzero__(self):
273 return self.dbs or self.sync_pkgs
275 def add_db(self, database, sigs=False):
276 """ Adds db info and signatures to the queue """
277 self.dbs.append((database, sigs))
279 def add_sync_pkg(self, pkg, urls, sigs=False):
280 """ Adds package and its urls to the queue """
281 self.sync_pkgs.append((pkg, urls, sigs))
284 def parse_args(args):
285 """ Parse arguments to build_download_queue function
286 These arguments mimic pacman ones """
287 parser = argparse.ArgumentParser()
289 parser.add_argument('pkgs', nargs='*', default=[], metavar='<pkgname>',
290 help='Packages or groups to download.')
291 parser.add_argument('--all-deps', action='store_true', dest='alldeps',
292 help='Include all dependencies even if they are already installed.')
293 parser.add_argument('-c', '--conf', metavar='<path>', default='/etc/pacman.conf', dest='conf',
294 help='Use a different pacman.conf file.')
295 parser.add_argument('--noconfirm', action='store_true', dest='noconfirm',
296 help='Suppress user prompts.')
297 parser.add_argument('-d', '--nodeps', action='store_true', dest='nodeps',
298 help='Skip dependencies.')
299 parser.add_argument('--needed', action='store_true', dest='needed',
300 help='Skip packages if they already exist in the cache.')
301 help_msg = '''Include signature files for repos with optional and required SigLevels.
302 Pass this flag twice to attempt to download signature for all databases and packages.'''
303 parser.add_argument('-s', '--sigs', action='count', default=0, dest='sigs',
305 parser.add_argument('-y', '--databases', '--refresh', action='store_true', dest='db',
306 help='Download databases.')
308 return parser.parse_args(args)
311 def get_antergos_repo_pkgs(alpm_handle):
312 """ Returns pkgs from Antergos groups (mate, mate-extra) and
313 the antergos db info """
316 for database in alpm_handle.get_syncdbs():
317 if database.name == 'Reborn-OS':
322 logging.error("Cannot sync Antergos repository database!")
325 group_names = ['mate', 'mate-extra']
327 for group_name in group_names:
328 group = antdb.read_grp(group_name)
330 # Group does not exist
335 pkg for group in groups
336 for pkg in group[1] if group}
338 return repo_pkgs, antdb
341 def resolve_deps(alpm_handle, other, alldeps):
342 """ Resolve dependencies """
345 local_cache = alpm_handle.get_localdb().pkgcache
346 syncdbs = alpm_handle.get_syncdbs()
349 pkg = queue.popleft()
350 for dep in pkg.depends:
351 if pyalpm.find_satisfier(local_cache, dep) is None or alldeps:
352 for database in syncdbs:
353 prov = pyalpm.find_satisfier(database.pkgcache, dep)
356 if prov.name not in seen:
361 missing_deps.append(dep)
362 return other, missing_deps
365 def create_package_set(requested, ant_repo_pkgs, antdb, alpm_handle):
366 """ Create package set from requested set """
371 for pkg in requested:
372 for database in alpm_handle.get_syncdbs():
373 # if pkg is in antergos repo, fetch it from it (instead of another repo)
374 # pkg should be sourced from the antergos repo only.
375 if antdb and pkg in ant_repo_pkgs and database.name != 'Reborn-OS':
378 syncpkg = database.get_pkg(pkg)
384 syncgrp = database.read_grp(pkg)
387 #other_grp |= PkgSet(syncgrp[1])
388 other |= PkgSet(syncgrp[1])
393 def build_download_queue(alpm, args=None):
394 """ Function to build a download queue.
395 Needs a pkgname in args """
397 pargs = parse_args(args)
399 requested = set(pargs.pkgs)
401 handle = alpm.get_handle()
402 conf = alpm.get_config()
404 missing_deps = list()
406 ant_repo_pkgs, antdb = get_antergos_repo_pkgs(handle)
409 logging.error("Cannot load antergos repository database")
410 return None, None, None
412 found, other = create_package_set(requested, ant_repo_pkgs, antdb, handle)
414 # foreign_names = requested - set(x.name for x in other)
416 # Resolve dependencies.
417 if other and not pargs.nodeps:
418 other, missing_deps = resolve_deps(handle, other, pargs.alldeps)
420 found |= set(other.pkgs)
421 not_found = requested - found
423 other = PkgSet(list(check_cache(conf, other)))
425 # Create download queue
426 download_queue = DownloadQueue()
428 # Add databases (and their signature)
430 for database in handle.get_syncdbs():
432 siglevel = conf[database.name]['SigLevel'].split()[0]
435 download_sig = needs_sig(siglevel, pargs.sigs, 'Database')
436 download_queue.add_db(database, download_sig)
438 # Add packages (pkg, url, signature)
441 siglevel = conf[pkg.db.name]['SigLevel'].split()[0]
444 download_sig = needs_sig(siglevel, pargs.sigs, 'Package')
447 server_urls = list(pkg.db.servers)
448 for server_url in server_urls:
449 url = os.path.join(server_url, pkg.filename)
452 # Limit to MAX_URLS url
453 while len(urls) > MAX_URLS:
456 download_queue.add_sync_pkg(pkg, urls, download_sig)
458 return download_queue, not_found, missing_deps
461 def get_checksum(path, typ):
462 """ Returns checksum of a file """
463 new_hash = hashlib.new(typ)
464 block_size = new_hash.block_size
466 with open(path, 'rb') as myfile:
467 buf = myfile.read(block_size)
470 buf = myfile.read(block_size)
471 return new_hash.hexdigest()
472 except FileNotFoundError:
474 except IOError as io_error:
475 logging.error(io_error)
478 def check_cache(conf, pkgs):
479 """ Checks package checksum in cache """
481 for cache in conf.options['CacheDir']:
482 fpath = os.path.join(cache, pkg.filename)
483 for checksum in ('sha256', 'md5'):
484 real_checksum = get_checksum(fpath, checksum)
485 correct_checksum = getattr(pkg, checksum + 'sum')
486 if real_checksum is None or real_checksum != correct_checksum:
494 def needs_sig(siglevel, insistence, prefix):
495 """ Determines if a signature should be downloaded.
496 The siglevel is the pacman.conf SigLevel for the given repo.
497 The insistence is an integer. Anything below 1 will return false,
498 anything above 1 will return true, and 1 will check if the
499 siglevel is required or optional.
500 The prefix is either "Database" or "Package". """
504 elif insistence == 1 and siglevel:
505 for sl_type in ('Required', 'Optional'):
506 if siglevel == sl_type or siglevel == prefix + sl_type:
512 """ Module test function """
517 formatter = logging.Formatter(
518 '[%(asctime)s] [%(module)s] %(levelname)s: %(message)s',
519 "%Y-%m-%d %H:%M:%S.%f")
520 logger = logging.getLogger()
521 logger.setLevel(logging.DEBUG)
522 stream_handler = logging.StreamHandler()
523 stream_handler.setLevel(logging.DEBUG)
524 stream_handler.setFormatter(formatter)
525 logger.addHandler(stream_handler)
530 cnchi_path = "/usr/share/cnchi"
531 sys.path.append(cnchi_path)
532 sys.path.append(os.path.join(cnchi_path, "src"))
533 import pacman.pac as pac
536 conf_path="/etc/pacman.conf",
539 print("Creating metalink...")
542 #package_name="ipw2200-fw",
543 package_name="base-devel",
544 pacman_conf_file="/etc/pacman.conf")
548 print(get_info(meta4))
549 # print(get_info(meta4)['ipw2200-fw']['urls'])
556 if __name__ == '__main__':