6 A resource is a logical file contained within a package, or a logical
7 subdirectory thereof. The package resource API expects resource names
8 to have their path parts separated with ``/``, *not* whatever the local
9 path separator is. Do not use os.path operations to manipulate resource
10 names being passed into the API.
12 The package resource API is designed to work with normal filesystem packages,
13 .egg files, and unpacked .egg files. It can also work in a limited way with
14 .zip files and with custom PEP 302 loaders that support the ``get_data()``
18 from __future__ import absolute_import
42 from pkgutil import get_importer
47 # Python 3.2 compatibility
50 from pkg_resources.extern import six
51 from pkg_resources.extern.six.moves import urllib, map, filter
53 # capture these to bypass sandboxing
56 from os import mkdir, rename, unlink
59 # no write support, probably under GAE
62 from os import open as os_open
63 from os.path import isdir, split
66 import importlib.machinery as importlib_machinery
67 # access attribute to force import under delayed import mechanisms.
68 importlib_machinery.__name__
70 importlib_machinery = None
72 from . import py31compat
73 from pkg_resources.extern import appdirs
74 from pkg_resources.extern import packaging
75 __import__('pkg_resources.extern.packaging.version')
76 __import__('pkg_resources.extern.packaging.specifiers')
77 __import__('pkg_resources.extern.packaging.requirements')
78 __import__('pkg_resources.extern.packaging.markers')
81 if (3, 0) < sys.version_info < (3, 3):
82 raise RuntimeError("Python 3.3 or later is required")
85 # Those builtin exceptions are only defined in Python 3
86 PermissionError = None
87 NotADirectoryError = None
89 # declare some globals that will be defined later to
90 # satisfy the linters.
95 class PEP440Warning(RuntimeWarning):
97 Used when there is an issue with a version or specifier not complying with
102 class _SetuptoolsVersionMixin(object):
104 return super(_SetuptoolsVersionMixin, self).__hash__()
106 def __lt__(self, other):
107 if isinstance(other, tuple):
108 return tuple(self) < other
110 return super(_SetuptoolsVersionMixin, self).__lt__(other)
112 def __le__(self, other):
113 if isinstance(other, tuple):
114 return tuple(self) <= other
116 return super(_SetuptoolsVersionMixin, self).__le__(other)
118 def __eq__(self, other):
119 if isinstance(other, tuple):
120 return tuple(self) == other
122 return super(_SetuptoolsVersionMixin, self).__eq__(other)
124 def __ge__(self, other):
125 if isinstance(other, tuple):
126 return tuple(self) >= other
128 return super(_SetuptoolsVersionMixin, self).__ge__(other)
130 def __gt__(self, other):
131 if isinstance(other, tuple):
132 return tuple(self) > other
134 return super(_SetuptoolsVersionMixin, self).__gt__(other)
136 def __ne__(self, other):
137 if isinstance(other, tuple):
138 return tuple(self) != other
140 return super(_SetuptoolsVersionMixin, self).__ne__(other)
142 def __getitem__(self, key):
143 return tuple(self)[key]
146 component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
155 def _parse_version_parts(s):
156 for part in component_re.split(s):
157 part = replace(part, part)
158 if not part or part == '.':
160 if part[:1] in '0123456789':
161 # pad for numeric comparison
166 # ensure that alpha/beta/candidate are before final
169 def old_parse_version(s):
171 for part in _parse_version_parts(s.lower()):
172 if part.startswith('*'):
173 # remove '-' before a prerelease tag
175 while parts and parts[-1] == '*final-':
177 # remove trailing zeros from each series of numeric parts
178 while parts and parts[-1] == '00000000':
183 # Warn for use of this function
185 "You have iterated over the result of "
186 "pkg_resources.parse_version. This is a legacy behavior which is "
187 "inconsistent with the new version class introduced in setuptools "
188 "8.0. In most cases, conversion to a tuple is unnecessary. For "
189 "comparison of versions, sort the Version instances directly. If "
190 "you have another use case requiring the tuple, please file a "
191 "bug with the setuptools project describing that need.",
196 for part in old_parse_version(str(self)):
200 class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version):
204 class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin,
205 packaging.version.LegacyVersion):
209 def parse_version(v):
211 return SetuptoolsVersion(v)
212 except packaging.version.InvalidVersion:
213 return SetuptoolsLegacyVersion(v)
219 def _declare_state(vartype, **kw):
221 _state_vars.update(dict.fromkeys(kw, vartype))
227 for k, v in _state_vars.items():
228 state[k] = g['_sget_' + v](g[k])
232 def __setstate__(state):
234 for k, v in state.items():
235 g['_sset_' + _state_vars[k]](k, g[k], v)
243 def _sset_dict(key, ob, state):
248 def _sget_object(val):
249 return val.__getstate__()
252 def _sset_object(key, ob, state):
253 ob.__setstate__(state)
256 _sget_none = _sset_none = lambda *args: None
259 def get_supported_platform():
260 """Return this platform's maximum compatible version.
262 distutils.util.get_platform() normally reports the minimum version
263 of Mac OS X that would be required to *use* extensions produced by
264 distutils. But what we want when checking compatibility is to know the
265 version of Mac OS X that we are *running*. To allow usage of packages that
266 explicitly require a newer version of Mac OS X, we must also know the
267 current version of the OS.
269 If this condition occurs for any other platform with a version in its
270 platform strings, this function should be extended accordingly.
272 plat = get_build_platform()
273 m = macosVersionString.match(plat)
274 if m is not None and sys.platform == "darwin":
276 plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
284 # Basic resource access and distribution/entry point discovery
285 'require', 'run_script', 'get_provider', 'get_distribution',
286 'load_entry_point', 'get_entry_map', 'get_entry_info',
288 'resource_string', 'resource_stream', 'resource_filename',
289 'resource_listdir', 'resource_exists', 'resource_isdir',
291 # Environmental control
292 'declare_namespace', 'working_set', 'add_activation_listener',
293 'find_distributions', 'set_extraction_path', 'cleanup_resources',
296 # Primary implementation classes
297 'Environment', 'WorkingSet', 'ResourceManager',
298 'Distribution', 'Requirement', 'EntryPoint',
301 'ResolutionError', 'VersionConflict', 'DistributionNotFound',
302 'UnknownExtra', 'ExtractionError',
307 # Parsing functions and string utilities
308 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
309 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
310 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
312 # filesystem utilities
313 'ensure_directory', 'normalize_path',
315 # Distribution "precedence" constants
316 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
318 # "Provider" interfaces, implementations, and registration/lookup APIs
319 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
320 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
321 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
322 'register_finder', 'register_namespace_handler', 'register_loader_type',
323 'fixup_namespace_packages', 'get_importer',
325 # Deprecated/backward compatibility only
326 'run_main', 'AvailableDistributions',
330 class ResolutionError(Exception):
331 """Abstract base for dependency resolution errors"""
334 return self.__class__.__name__ + repr(self.args)
337 class VersionConflict(ResolutionError):
339 An already-installed version conflicts with the requested version.
341 Should be initialized with the installed Distribution and the requested
345 _template = "{self.dist} is installed but {self.req} is required"
356 return self._template.format(**locals())
358 def with_context(self, required_by):
360 If required_by is non-empty, return a version of self that is a
361 ContextualVersionConflict.
365 args = self.args + (required_by,)
366 return ContextualVersionConflict(*args)
369 class ContextualVersionConflict(VersionConflict):
371 A VersionConflict that accepts a third parameter, the set of the
372 requirements that required the installed Distribution.
375 _template = VersionConflict._template + ' by {self.required_by}'
378 def required_by(self):
382 class DistributionNotFound(ResolutionError):
383 """A requested distribution was not found"""
385 _template = ("The '{self.req}' distribution was not found "
386 "and is required by {self.requirers_str}")
397 def requirers_str(self):
398 if not self.requirers:
399 return 'the application'
400 return ', '.join(self.requirers)
403 return self._template.format(**locals())
409 class UnknownExtra(ResolutionError):
410 """Distribution doesn't have an "extra feature" of the given name"""
413 _provider_factories = {}
415 PY_MAJOR = sys.version[:3]
423 def register_loader_type(loader_type, provider_factory):
424 """Register `provider_factory` to make providers for `loader_type`
426 `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
427 and `provider_factory` is a function that, passed a *module* object,
428 returns an ``IResourceProvider`` for that module.
430 _provider_factories[loader_type] = provider_factory
433 def get_provider(moduleOrReq):
434 """Return an IResourceProvider for the named module or requirement"""
435 if isinstance(moduleOrReq, Requirement):
436 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
438 module = sys.modules[moduleOrReq]
440 __import__(moduleOrReq)
441 module = sys.modules[moduleOrReq]
442 loader = getattr(module, '__loader__', None)
443 return _find_adapter(_provider_factories, loader)(module)
446 def _macosx_vers(_cache=[]):
448 version = platform.mac_ver()[0]
449 # fallback for MacPorts
451 plist = '/System/Library/CoreServices/SystemVersion.plist'
452 if os.path.exists(plist):
453 if hasattr(plistlib, 'readPlist'):
454 plist_content = plistlib.readPlist(plist)
455 if 'ProductVersion' in plist_content:
456 version = plist_content['ProductVersion']
458 _cache.append(version.split('.'))
462 def _macosx_arch(machine):
463 return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
466 def get_build_platform():
467 """Return this platform's string for platform-specific distributions
469 XXX Currently this is the same as ``distutils.util.get_platform()``, but it
470 needs some hacks for Linux and Mac OS X.
473 # Python 2.7 or >=3.2
474 from sysconfig import get_platform
476 from distutils.util import get_platform
478 plat = get_platform()
479 if sys.platform == "darwin" and not plat.startswith('macosx-'):
481 version = _macosx_vers()
482 machine = os.uname()[4].replace(" ", "_")
483 return "macosx-%d.%d-%s" % (
484 int(version[0]), int(version[1]),
485 _macosx_arch(machine),
488 # if someone is running a non-Mac darwin system, this will fall
489 # through to the default implementation
494 macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
495 darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
496 # XXX backward compat
497 get_platform = get_build_platform
500 def compatible_platforms(provided, required):
501 """Can code for the `provided` platform run on the `required` platform?
503 Returns true if either platform is ``None``, or the platforms are equal.
505 XXX Needs compatibility checks for Linux and other unixy OSes.
507 if provided is None or required is None or provided == required:
511 # Mac OS X special cases
512 reqMac = macosVersionString.match(required)
514 provMac = macosVersionString.match(provided)
516 # is this a Mac package?
518 # this is backwards compatibility for packages built before
519 # setuptools 0.6. All packages built after this point will
520 # use the new macosx designation.
521 provDarwin = darwinVersionString.match(provided)
523 dversion = int(provDarwin.group(1))
524 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
525 if dversion == 7 and macosversion >= "10.3" or \
526 dversion == 8 and macosversion >= "10.4":
528 # egg isn't macosx or legacy darwin
531 # are they the same major version and machine type?
532 if provMac.group(1) != reqMac.group(1) or \
533 provMac.group(3) != reqMac.group(3):
536 # is the required OS major update >= the provided one?
537 if int(provMac.group(2)) > int(reqMac.group(2)):
542 # XXX Linux and other platforms' special cases should go here
546 def run_script(dist_spec, script_name):
547 """Locate distribution `dist_spec` and run its `script_name` script"""
548 ns = sys._getframe(1).f_globals
549 name = ns['__name__']
551 ns['__name__'] = name
552 require(dist_spec)[0].run_script(script_name, ns)
555 # backward compatibility
556 run_main = run_script
559 def get_distribution(dist):
560 """Return a current distribution object for a Requirement or string"""
561 if isinstance(dist, six.string_types):
562 dist = Requirement.parse(dist)
563 if isinstance(dist, Requirement):
564 dist = get_provider(dist)
565 if not isinstance(dist, Distribution):
566 raise TypeError("Expected string, Requirement, or Distribution", dist)
570 def load_entry_point(dist, group, name):
571 """Return `name` entry point of `group` for `dist` or raise ImportError"""
572 return get_distribution(dist).load_entry_point(group, name)
575 def get_entry_map(dist, group=None):
576 """Return the entry point map for `group`, or the full entry map"""
577 return get_distribution(dist).get_entry_map(group)
580 def get_entry_info(dist, group, name):
581 """Return the EntryPoint object for `group`+`name`, or ``None``"""
582 return get_distribution(dist).get_entry_info(group, name)
585 class IMetadataProvider:
586 def has_metadata(name):
587 """Does the package's distribution contain the named metadata?"""
589 def get_metadata(name):
590 """The named metadata resource as a string"""
592 def get_metadata_lines(name):
593 """Yield named metadata resource as list of non-blank non-comment lines
595 Leading and trailing whitespace is stripped from each line, and lines
596 with ``#`` as the first non-blank character are omitted."""
598 def metadata_isdir(name):
599 """Is the named metadata a directory? (like ``os.path.isdir()``)"""
601 def metadata_listdir(name):
602 """List of metadata names in the directory (like ``os.listdir()``)"""
604 def run_script(script_name, namespace):
605 """Execute the named script in the supplied namespace dictionary"""
608 class IResourceProvider(IMetadataProvider):
609 """An object that provides access to package resources"""
611 def get_resource_filename(manager, resource_name):
612 """Return a true filesystem path for `resource_name`
614 `manager` must be an ``IResourceManager``"""
616 def get_resource_stream(manager, resource_name):
617 """Return a readable file-like object for `resource_name`
619 `manager` must be an ``IResourceManager``"""
621 def get_resource_string(manager, resource_name):
622 """Return a string containing the contents of `resource_name`
624 `manager` must be an ``IResourceManager``"""
626 def has_resource(resource_name):
627 """Does the package contain the named resource?"""
629 def resource_isdir(resource_name):
630 """Is the named resource a directory? (like ``os.path.isdir()``)"""
632 def resource_listdir(resource_name):
633 """List of resource names in the directory (like ``os.listdir()``)"""
636 class WorkingSet(object):
637 """A collection of active distributions on sys.path (or a similar list)"""
639 def __init__(self, entries=None):
640 """Create working set from list of path entries (default=sys.path)"""
649 for entry in entries:
650 self.add_entry(entry)
653 def _build_master(cls):
655 Prepare the master working set.
659 from __main__ import __requires__
661 # The main program does not list any requirements
664 # ensure the requirements are met
666 ws.require(__requires__)
667 except VersionConflict:
668 return cls._build_from_requirements(__requires__)
673 def _build_from_requirements(cls, req_spec):
675 Build a working set from a requirement spec. Rewrites sys.path.
677 # try it without defaults already on sys.path
678 # by starting with an empty path
680 reqs = parse_requirements(req_spec)
681 dists = ws.resolve(reqs, Environment())
685 # add any missing entries from sys.path
686 for entry in sys.path:
687 if entry not in ws.entries:
690 # then copy back to sys.path
691 sys.path[:] = ws.entries
694 def add_entry(self, entry):
695 """Add a path item to ``.entries``, finding any distributions on it
697 ``find_distributions(entry, True)`` is used to find distributions
698 corresponding to the path entry, and they are added. `entry` is
699 always appended to ``.entries``, even if it is already present.
700 (This is because ``sys.path`` can contain the same value more than
701 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
704 self.entry_keys.setdefault(entry, [])
705 self.entries.append(entry)
706 for dist in find_distributions(entry, True):
707 self.add(dist, entry, False)
709 def __contains__(self, dist):
710 """True if `dist` is the active distribution for its project"""
711 return self.by_key.get(dist.key) == dist
714 """Find a distribution matching requirement `req`
716 If there is an active distribution for the requested project, this
717 returns it as long as it meets the version requirement specified by
718 `req`. But, if there is an active distribution for the project and it
719 does *not* meet the `req` requirement, ``VersionConflict`` is raised.
720 If there is no active distribution for the requested project, ``None``
723 dist = self.by_key.get(req.key)
724 if dist is not None and dist not in req:
726 raise VersionConflict(dist, req)
729 def iter_entry_points(self, group, name=None):
730 """Yield entry point objects from `group` matching `name`
732 If `name` is None, yields all entry points in `group` from all
733 distributions in the working set, otherwise only ones matching
734 both `group` and `name` are yielded (in distribution order).
737 entries = dist.get_entry_map(group)
739 for ep in entries.values():
741 elif name in entries:
744 def run_script(self, requires, script_name):
745 """Locate distribution for `requires` and run `script_name` script"""
746 ns = sys._getframe(1).f_globals
747 name = ns['__name__']
749 ns['__name__'] = name
750 self.require(requires)[0].run_script(script_name, ns)
753 """Yield distributions for non-duplicate projects in the working set
755 The yield order is the order in which the items' path entries were
756 added to the working set.
759 for item in self.entries:
760 if item not in self.entry_keys:
761 # workaround a cache issue
764 for key in self.entry_keys[item]:
767 yield self.by_key[key]
769 def add(self, dist, entry=None, insert=True, replace=False):
770 """Add `dist` to working set, associated with `entry`
772 If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
773 On exit from this routine, `entry` is added to the end of the working
774 set's ``.entries`` (if it wasn't already present).
776 `dist` is only added to the working set if it's for a project that
777 doesn't already have a distribution in the set, unless `replace=True`.
778 If it's added, any callbacks registered with the ``subscribe()`` method
782 dist.insert_on(self.entries, entry, replace=replace)
785 entry = dist.location
786 keys = self.entry_keys.setdefault(entry, [])
787 keys2 = self.entry_keys.setdefault(dist.location, [])
788 if not replace and dist.key in self.by_key:
789 # ignore hidden distros
792 self.by_key[dist.key] = dist
793 if dist.key not in keys:
794 keys.append(dist.key)
795 if dist.key not in keys2:
796 keys2.append(dist.key)
797 self._added_new(dist)
799 def resolve(self, requirements, env=None, installer=None,
800 replace_conflicting=False, extras=None):
801 """List all distributions needed to (recursively) meet `requirements`
803 `requirements` must be a sequence of ``Requirement`` objects. `env`,
804 if supplied, should be an ``Environment`` instance. If
805 not supplied, it defaults to all distributions available within any
806 entry or distribution in the working set. `installer`, if supplied,
807 will be invoked with each requirement that cannot be met by an
808 already-installed distribution; it should return a ``Distribution`` or
811 Unless `replace_conflicting=True`, raises a VersionConflict exception
813 any requirements are found on the path that have the correct name but
814 the wrong version. Otherwise, if an `installer` is supplied it will be
815 invoked to obtain the correct version of the requirement and activate
818 `extras` is a list of the extras to be used with these requirements.
819 This is important because extra requirements may look like `my_req;
820 extra = "my_extra"`, which would otherwise be interpreted as a purely
821 optional requirement. Instead, we want to be able to assert that these
822 requirements are truly required.
826 requirements = list(requirements)[::-1]
827 # set of processed requirements
833 req_extras = _ReqExtras()
835 # Mapping of requirement to set of distributions that required it;
836 # useful for reporting info about conflicts.
837 required_by = collections.defaultdict(set)
840 # process dependencies breadth-first
841 req = requirements.pop(0)
843 # Ignore cyclic or redundant dependencies
846 if not req_extras.markers_pass(req, extras):
849 dist = best.get(req.key)
851 # Find the best distribution and add it to the map
852 dist = self.by_key.get(req.key)
853 if dist is None or (dist not in req and replace_conflicting):
857 env = Environment(self.entries)
859 # Use an empty environment and workingset to avoid
860 # any further conflicts with the conflicting
862 env = Environment([])
864 dist = best[req.key] = env.best_match(
866 replace_conflicting=replace_conflicting
869 requirers = required_by.get(req, None)
870 raise DistributionNotFound(req, requirers)
871 to_activate.append(dist)
873 # Oops, the "best" so far conflicts with a dependency
874 dependent_req = required_by[req]
875 raise VersionConflict(dist, req).with_context(dependent_req)
877 # push the new requirements onto the stack
878 new_requirements = dist.requires(req.extras)[::-1]
879 requirements.extend(new_requirements)
881 # Register the new requirements needed by req
882 for new_requirement in new_requirements:
883 required_by[new_requirement].add(req.project_name)
884 req_extras[new_requirement] = req.extras
886 processed[req] = True
888 # return list of distros to activate
892 self, plugin_env, full_env=None, installer=None, fallback=True):
893 """Find all activatable distributions in `plugin_env`
897 distributions, errors = working_set.find_plugins(
898 Environment(plugin_dirlist)
900 # add plugins+libs to sys.path
901 map(working_set.add, distributions)
903 print('Could not load', errors)
905 The `plugin_env` should be an ``Environment`` instance that contains
906 only distributions that are in the project's "plugin directory" or
907 directories. The `full_env`, if supplied, should be an ``Environment``
908 contains all currently-available distributions. If `full_env` is not
909 supplied, one is created automatically from the ``WorkingSet`` this
910 method is called on, which will typically mean that every directory on
911 ``sys.path`` will be scanned for distributions.
913 `installer` is a standard installer callback as used by the
914 ``resolve()`` method. The `fallback` flag indicates whether we should
915 attempt to resolve older versions of a plugin if the newest version
918 This method returns a 2-tuple: (`distributions`, `error_info`), where
919 `distributions` is a list of the distributions found in `plugin_env`
920 that were loadable, along with any other distributions that are needed
921 to resolve their dependencies. `error_info` is a dictionary mapping
922 unloadable plugin distributions to an exception instance describing the
923 error that occurred. Usually this will be a ``DistributionNotFound`` or
924 ``VersionConflict`` instance.
927 plugin_projects = list(plugin_env)
928 # scan project names in alphabetic order
929 plugin_projects.sort()
935 env = Environment(self.entries)
938 env = full_env + plugin_env
940 shadow_set = self.__class__([])
941 # put all our entries in shadow_set
942 list(map(shadow_set.add, self))
944 for project_name in plugin_projects:
946 for dist in plugin_env[project_name]:
948 req = [dist.as_requirement()]
951 resolvees = shadow_set.resolve(req, env, installer)
953 except ResolutionError as v:
957 # try the next older version of project
960 # give up on this project, keep going
964 list(map(shadow_set.add, resolvees))
965 distributions.update(dict.fromkeys(resolvees))
967 # success, no need to try any more versions of this project
970 distributions = list(distributions)
973 return distributions, error_info
975 def require(self, *requirements):
976 """Ensure that distributions matching `requirements` are activated
978 `requirements` must be a string or a (possibly-nested) sequence
979 thereof, specifying the distributions and versions required. The
980 return value is a sequence of the distributions that needed to be
981 activated to fulfill the requirements; all relevant distributions are
982 included, even if they were already activated in this working set.
984 needed = self.resolve(parse_requirements(requirements))
991 def subscribe(self, callback, existing=True):
992 """Invoke `callback` for all distributions
994 If `existing=True` (default),
995 call on all existing ones, as well.
997 if callback in self.callbacks:
999 self.callbacks.append(callback)
1005 def _added_new(self, dist):
1006 for callback in self.callbacks:
1009 def __getstate__(self):
1011 self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
1015 def __setstate__(self, e_k_b_c):
1016 entries, keys, by_key, callbacks = e_k_b_c
1017 self.entries = entries[:]
1018 self.entry_keys = keys.copy()
1019 self.by_key = by_key.copy()
1020 self.callbacks = callbacks[:]
1023 class _ReqExtras(dict):
1025 Map each requirement to the extras that demanded it.
1028 def markers_pass(self, req, extras=None):
1030 Evaluate markers for req against each extra that
1033 Return False if the req has a marker and fails
1034 evaluation. Otherwise, return True.
1037 req.marker.evaluate({'extra': extra})
1038 for extra in self.get(req, ()) + (extras or (None,))
1040 return not req.marker or any(extra_evals)
1043 class Environment(object):
1044 """Searchable snapshot of distributions on a search path"""
1047 self, search_path=None, platform=get_supported_platform(),
1049 """Snapshot distributions available on a search path
1051 Any distributions found on `search_path` are added to the environment.
1052 `search_path` should be a sequence of ``sys.path`` items. If not
1053 supplied, ``sys.path`` is used.
1055 `platform` is an optional string specifying the name of the platform
1056 that platform-specific distributions must be compatible with. If
1057 unspecified, it defaults to the current platform. `python` is an
1058 optional string naming the desired version of Python (e.g. ``'3.3'``);
1059 it defaults to the current version.
1061 You may explicitly set `platform` (and/or `python`) to ``None`` if you
1062 wish to map *all* distributions, not just those compatible with the
1063 running platform or Python version.
1066 self.platform = platform
1067 self.python = python
1068 self.scan(search_path)
1070 def can_add(self, dist):
1071 """Is distribution `dist` acceptable for this environment?
1073 The distribution must match the platform and python version
1074 requirements specified when this environment was created, or False
1077 return (self.python is None or dist.py_version is None
1078 or dist.py_version == self.python) \
1079 and compatible_platforms(dist.platform, self.platform)
1081 def remove(self, dist):
1082 """Remove `dist` from the environment"""
1083 self._distmap[dist.key].remove(dist)
1085 def scan(self, search_path=None):
1086 """Scan `search_path` for distributions usable in this environment
1088 Any distributions found are added to the environment.
1089 `search_path` should be a sequence of ``sys.path`` items. If not
1090 supplied, ``sys.path`` is used. Only distributions conforming to
1091 the platform/python version defined at initialization are added.
1093 if search_path is None:
1094 search_path = sys.path
1096 for item in search_path:
1097 for dist in find_distributions(item):
1100 def __getitem__(self, project_name):
1101 """Return a newest-to-oldest list of distributions for `project_name`
1103 Uses case-insensitive `project_name` comparison, assuming all the
1104 project's distributions use their project's name converted to all
1105 lowercase as their key.
1108 distribution_key = project_name.lower()
1109 return self._distmap.get(distribution_key, [])
1111 def add(self, dist):
1112 """Add `dist` if we ``can_add()`` it and it has not already been added
1114 if self.can_add(dist) and dist.has_version():
1115 dists = self._distmap.setdefault(dist.key, [])
1116 if dist not in dists:
1118 dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
1121 self, req, working_set, installer=None, replace_conflicting=False):
1122 """Find distribution best matching `req` and usable on `working_set`
1124 This calls the ``find(req)`` method of the `working_set` to see if a
1125 suitable distribution is already active. (This may raise
1126 ``VersionConflict`` if an unsuitable version of the project is already
1127 active in the specified `working_set`.) If a suitable distribution
1128 isn't active, this method returns the newest distribution in the
1129 environment that meets the ``Requirement`` in `req`. If no suitable
1130 distribution is found, and `installer` is supplied, then the result of
1131 calling the environment's ``obtain(req, installer)`` method will be
1135 dist = working_set.find(req)
1136 except VersionConflict:
1137 if not replace_conflicting:
1140 if dist is not None:
1142 for dist in self[req.key]:
1145 # try to download/install
1146 return self.obtain(req, installer)
1148 def obtain(self, requirement, installer=None):
1149 """Obtain a distribution matching `requirement` (e.g. via download)
1151 Obtain a distro that matches requirement (e.g. via download). In the
1152 base ``Environment`` class, this routine just returns
1153 ``installer(requirement)``, unless `installer` is None, in which case
1154 None is returned instead. This method is a hook that allows subclasses
1155 to attempt other ways of obtaining a distribution before falling back
1156 to the `installer` argument."""
1157 if installer is not None:
1158 return installer(requirement)
1161 """Yield the unique project names of the available distributions"""
1162 for key in self._distmap.keys():
1166 def __iadd__(self, other):
1167 """In-place addition of a distribution or environment"""
1168 if isinstance(other, Distribution):
1170 elif isinstance(other, Environment):
1171 for project in other:
1172 for dist in other[project]:
1175 raise TypeError("Can't add %r to environment" % (other,))
1178 def __add__(self, other):
1179 """Add an environment or distribution to an environment"""
1180 new = self.__class__([], platform=None, python=None)
1181 for env in self, other:
1186 # XXX backward compatibility
1187 AvailableDistributions = Environment
1190 class ExtractionError(RuntimeError):
1191 """An error occurred extracting a resource
1193 The following attributes are available from instances of this exception:
1196 The resource manager that raised this exception
1199 The base directory for resource extraction
1202 The exception instance that caused extraction to fail
1206 class ResourceManager:
1207 """Manage resource extraction and packages"""
1208 extraction_path = None
1211 self.cached_files = {}
1213 def resource_exists(self, package_or_requirement, resource_name):
1214 """Does the named resource exist?"""
1215 return get_provider(package_or_requirement).has_resource(resource_name)
1217 def resource_isdir(self, package_or_requirement, resource_name):
1218 """Is the named resource an existing directory?"""
1219 return get_provider(package_or_requirement).resource_isdir(
1223 def resource_filename(self, package_or_requirement, resource_name):
1224 """Return a true filesystem path for specified resource"""
1225 return get_provider(package_or_requirement).get_resource_filename(
1229 def resource_stream(self, package_or_requirement, resource_name):
1230 """Return a readable file-like object for specified resource"""
1231 return get_provider(package_or_requirement).get_resource_stream(
1235 def resource_string(self, package_or_requirement, resource_name):
1236 """Return specified resource as a string"""
1237 return get_provider(package_or_requirement).get_resource_string(
1241 def resource_listdir(self, package_or_requirement, resource_name):
1242 """List the contents of the named resource directory"""
1243 return get_provider(package_or_requirement).resource_listdir(
1247 def extraction_error(self):
1248 """Give an error message for problems extracting file(s)"""
1250 old_exc = sys.exc_info()[1]
1251 cache_path = self.extraction_path or get_default_cache()
1253 tmpl = textwrap.dedent("""
1254 Can't extract file(s) to egg cache
1256 The following error occurred while trying to extract file(s)
1257 to the Python egg cache:
1261 The Python egg cache directory is currently set to:
1265 Perhaps your account does not have write access to this directory?
1266 You can change the cache directory by setting the PYTHON_EGG_CACHE
1267 environment variable to point to an accessible directory.
1269 err = ExtractionError(tmpl.format(**locals()))
1271 err.cache_path = cache_path
1272 err.original_error = old_exc
1275 def get_cache_path(self, archive_name, names=()):
1276 """Return absolute location in cache for `archive_name` and `names`
1278 The parent directory of the resulting path will be created if it does
1279 not already exist. `archive_name` should be the base filename of the
1280 enclosing egg (which may not be the name of the enclosing zipfile!),
1281 including its ".egg" extension. `names`, if provided, should be a
1282 sequence of path name parts "under" the egg's extraction location.
1284 This method should only be called by resource providers that need to
1285 obtain an extraction location, and only for names they intend to
1286 extract, as it tracks the generated names for possible cleanup later.
1288 extract_path = self.extraction_path or get_default_cache()
1289 target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
1291 _bypass_ensure_directory(target_path)
1293 self.extraction_error()
1295 self._warn_unsafe_extraction_path(extract_path)
1297 self.cached_files[target_path] = 1
1301 def _warn_unsafe_extraction_path(path):
1303 If the default extraction path is overridden and set to an insecure
1304 location, such as /tmp, it opens up an opportunity for an attacker to
1305 replace an extracted file with an unauthorized payload. Warn the user
1306 if a known insecure location is used.
1308 See Distribute #375 for more details.
1310 if os.name == 'nt' and not path.startswith(os.environ['windir']):
1311 # On Windows, permissions are generally restrictive by default
1312 # and temp directories are not writable by other users, so
1313 # bypass the warning.
1315 mode = os.stat(path).st_mode
1316 if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
1318 "%s is writable by group/others and vulnerable to attack "
1320 "used with get_resource_filename. Consider a more secure "
1321 "location (set with .set_extraction_path or the "
1322 "PYTHON_EGG_CACHE environment variable)." % path
1324 warnings.warn(msg, UserWarning)
1326 def postprocess(self, tempname, filename):
1327 """Perform any platform-specific postprocessing of `tempname`
1329 This is where Mac header rewrites should be done; other platforms don't
1330 have anything special they should do.
1332 Resource providers should call this method ONLY after successfully
1333 extracting a compressed resource. They must NOT call it on resources
1334 that are already in the filesystem.
1336 `tempname` is the current (temporary) name of the file, and `filename`
1337 is the name it will be renamed to by the caller after this routine
1341 if os.name == 'posix':
1342 # Make the resource executable
1343 mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
1344 os.chmod(tempname, mode)
1346 def set_extraction_path(self, path):
1347 """Set the base path where resources will be extracted to, if needed.
1349 If you do not call this routine before any extractions take place, the
1350 path defaults to the return value of ``get_default_cache()``. (Which
1351 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1352 platform-specific fallbacks. See that routine's documentation for more
1355 Resources are extracted to subdirectories of this path based upon
1356 information given by the ``IResourceProvider``. You may set this to a
1357 temporary directory, but then you must call ``cleanup_resources()`` to
1358 delete the extracted files when done. There is no guarantee that
1359 ``cleanup_resources()`` will be able to remove all extracted files.
1361 (Note: you may not change the extraction path for a given resource
1362 manager once resources have been extracted, unless you first call
1363 ``cleanup_resources()``.)
1365 if self.cached_files:
1367 "Can't change extraction path, files already extracted"
1370 self.extraction_path = path
1372 def cleanup_resources(self, force=False):
1374 Delete all extracted resource files and directories, returning a list
1375 of the file and directory names that could not be successfully removed.
1376 This function does not have any concurrency protection, so it should
1377 generally only be called when the extraction path is a temporary
1378 directory exclusive to a single process. This method is not
1379 automatically called; you must call it explicitly or register it as an
1380 ``atexit`` function if you wish to ensure cleanup of a temporary
1381 directory used for extractions.
1386 def get_default_cache():
1388 Return the ``PYTHON_EGG_CACHE`` environment variable
1389 or a platform-relevant user cache dir for an app
1390 named "Python-Eggs".
1393 os.environ.get('PYTHON_EGG_CACHE')
1394 or appdirs.user_cache_dir(appname='Python-Eggs')
1398 def safe_name(name):
1399 """Convert an arbitrary string to a standard distribution name
1401 Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1403 return re.sub('[^A-Za-z0-9.]+', '-', name)
1406 def safe_version(version):
1408 Convert an arbitrary string to a standard version string
1411 # normalize the version
1412 return str(packaging.version.Version(version))
1413 except packaging.version.InvalidVersion:
1414 version = version.replace(' ', '.')
1415 return re.sub('[^A-Za-z0-9.]+', '-', version)
1418 def safe_extra(extra):
1419 """Convert an arbitrary string to a standard 'extra' name
1421 Any runs of non-alphanumeric characters are replaced with a single '_',
1422 and the result is always lowercased.
1424 return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
1427 def to_filename(name):
1428 """Convert a project or version name to its filename-escaped form
1430 Any '-' characters are currently replaced with '_'.
1432 return name.replace('-', '_')
1435 def invalid_marker(text):
1437 Validate text as a PEP 508 environment marker; return an exception
1438 if invalid or False otherwise.
1441 evaluate_marker(text)
1442 except SyntaxError as e:
1449 def evaluate_marker(text, extra=None):
1451 Evaluate a PEP 508 environment marker.
1452 Return a boolean indicating the marker result in this environment.
1453 Raise SyntaxError if marker is invalid.
1455 This implementation uses the 'pyparsing' module.
1458 marker = packaging.markers.Marker(text)
1459 return marker.evaluate()
1460 except packaging.markers.InvalidMarker as e:
1461 raise SyntaxError(e)
1465 """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1471 def __init__(self, module):
1472 self.loader = getattr(module, '__loader__', None)
1473 self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1475 def get_resource_filename(self, manager, resource_name):
1476 return self._fn(self.module_path, resource_name)
1478 def get_resource_stream(self, manager, resource_name):
1479 return io.BytesIO(self.get_resource_string(manager, resource_name))
1481 def get_resource_string(self, manager, resource_name):
1482 return self._get(self._fn(self.module_path, resource_name))
1484 def has_resource(self, resource_name):
1485 return self._has(self._fn(self.module_path, resource_name))
1487 def has_metadata(self, name):
1488 return self.egg_info and self._has(self._fn(self.egg_info, name))
1490 def get_metadata(self, name):
1491 if not self.egg_info:
1493 value = self._get(self._fn(self.egg_info, name))
1494 return value.decode('utf-8') if six.PY3 else value
1496 def get_metadata_lines(self, name):
1497 return yield_lines(self.get_metadata(name))
1499 def resource_isdir(self, resource_name):
1500 return self._isdir(self._fn(self.module_path, resource_name))
1502 def metadata_isdir(self, name):
1503 return self.egg_info and self._isdir(self._fn(self.egg_info, name))
1505 def resource_listdir(self, resource_name):
1506 return self._listdir(self._fn(self.module_path, resource_name))
1508 def metadata_listdir(self, name):
1510 return self._listdir(self._fn(self.egg_info, name))
1513 def run_script(self, script_name, namespace):
1514 script = 'scripts/' + script_name
1515 if not self.has_metadata(script):
1516 raise ResolutionError(
1517 "Script {script!r} not found in metadata at {self.egg_info!r}"
1518 .format(**locals()),
1520 script_text = self.get_metadata(script).replace('\r\n', '\n')
1521 script_text = script_text.replace('\r', '\n')
1522 script_filename = self._fn(self.egg_info, script)
1523 namespace['__file__'] = script_filename
1524 if os.path.exists(script_filename):
1525 source = open(script_filename).read()
1526 code = compile(source, script_filename, 'exec')
1527 exec(code, namespace, namespace)
1529 from linecache import cache
1530 cache[script_filename] = (
1531 len(script_text), 0, script_text.split('\n'), script_filename
1533 script_code = compile(script_text, script_filename, 'exec')
1534 exec(script_code, namespace, namespace)
1536 def _has(self, path):
1537 raise NotImplementedError(
1538 "Can't perform this operation for unregistered loader type"
1541 def _isdir(self, path):
1542 raise NotImplementedError(
1543 "Can't perform this operation for unregistered loader type"
1546 def _listdir(self, path):
1547 raise NotImplementedError(
1548 "Can't perform this operation for unregistered loader type"
1551 def _fn(self, base, resource_name):
1553 return os.path.join(base, *resource_name.split('/'))
1556 def _get(self, path):
1557 if hasattr(self.loader, 'get_data'):
1558 return self.loader.get_data(path)
1559 raise NotImplementedError(
1560 "Can't perform this operation for loaders without 'get_data()'"
1564 register_loader_type(object, NullProvider)
1567 class EggProvider(NullProvider):
1568 """Provider based on a virtual filesystem"""
1570 def __init__(self, module):
1571 NullProvider.__init__(self, module)
1572 self._setup_prefix()
1574 def _setup_prefix(self):
1575 # we assume here that our metadata may be nested inside a "basket"
1576 # of multiple eggs; that's why we use module_path instead of .archive
1577 path = self.module_path
1580 if _is_egg_path(path):
1581 self.egg_name = os.path.basename(path)
1582 self.egg_info = os.path.join(path, 'EGG-INFO')
1583 self.egg_root = path
1586 path, base = os.path.split(path)
1589 class DefaultProvider(EggProvider):
1590 """Provides access to package resources in the filesystem"""
1592 def _has(self, path):
1593 return os.path.exists(path)
1595 def _isdir(self, path):
1596 return os.path.isdir(path)
1598 def _listdir(self, path):
1599 return os.listdir(path)
1601 def get_resource_stream(self, manager, resource_name):
1602 return open(self._fn(self.module_path, resource_name), 'rb')
1604 def _get(self, path):
1605 with open(path, 'rb') as stream:
1606 return stream.read()
1610 loader_cls = getattr(
1611 importlib_machinery,
1615 register_loader_type(loader_cls, cls)
1618 DefaultProvider._register()
1621 class EmptyProvider(NullProvider):
1622 """Provider that returns nothing for all requests"""
1624 _isdir = _has = lambda self, path: False
1625 _get = lambda self, path: ''
1626 _listdir = lambda self, path: []
1633 empty_provider = EmptyProvider()
1636 class ZipManifests(dict):
1638 zip manifest builder
1642 def build(cls, path):
1644 Build a dictionary similar to the zipimport directory
1645 caches, except instead of tuples, store ZipInfo objects.
1647 Use a platform-specific path separator (os.sep) for the path keys
1648 for compatibility with pypy on Windows.
1650 with zipfile.ZipFile(path) as zfile:
1653 name.replace('/', os.sep),
1654 zfile.getinfo(name),
1656 for name in zfile.namelist()
1663 class MemoizedZipManifests(ZipManifests):
1665 Memoized zipfile manifests.
1667 manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
1669 def load(self, path):
1671 Load a manifest at path or return a suitable manifest already loaded.
1673 path = os.path.normpath(path)
1674 mtime = os.stat(path).st_mtime
1676 if path not in self or self[path].mtime != mtime:
1677 manifest = self.build(path)
1678 self[path] = self.manifest_mod(manifest, mtime)
1680 return self[path].manifest
1683 class ZipProvider(EggProvider):
1684 """Resource support for zips and eggs"""
1687 _zip_manifests = MemoizedZipManifests()
1689 def __init__(self, module):
1690 EggProvider.__init__(self, module)
1691 self.zip_pre = self.loader.archive + os.sep
1693 def _zipinfo_name(self, fspath):
1694 # Convert a virtual filename (full path to file) into a zipfile subpath
1695 # usable with the zipimport directory cache for our target archive
1696 fspath = fspath.rstrip(os.sep)
1697 if fspath == self.loader.archive:
1699 if fspath.startswith(self.zip_pre):
1700 return fspath[len(self.zip_pre):]
1701 raise AssertionError(
1702 "%s is not a subpath of %s" % (fspath, self.zip_pre)
1705 def _parts(self, zip_path):
1706 # Convert a zipfile subpath into an egg-relative path part list.
1708 fspath = self.zip_pre + zip_path
1709 if fspath.startswith(self.egg_root + os.sep):
1710 return fspath[len(self.egg_root) + 1:].split(os.sep)
1711 raise AssertionError(
1712 "%s is not a subpath of %s" % (fspath, self.egg_root)
1717 return self._zip_manifests.load(self.loader.archive)
1719 def get_resource_filename(self, manager, resource_name):
1720 if not self.egg_name:
1721 raise NotImplementedError(
1722 "resource_filename() only supported for .egg, not .zip"
1724 # no need to lock for extraction, since we use temp names
1725 zip_path = self._resource_to_zip(resource_name)
1726 eagers = self._get_eager_resources()
1727 if '/'.join(self._parts(zip_path)) in eagers:
1729 self._extract_resource(manager, self._eager_to_zip(name))
1730 return self._extract_resource(manager, zip_path)
1733 def _get_date_and_size(zip_stat):
1734 size = zip_stat.file_size
1735 # ymdhms+wday, yday, dst
1736 date_time = zip_stat.date_time + (0, 0, -1)
1737 # 1980 offset already done
1738 timestamp = time.mktime(date_time)
1739 return timestamp, size
1741 def _extract_resource(self, manager, zip_path):
1743 if zip_path in self._index():
1744 for name in self._index()[zip_path]:
1745 last = self._extract_resource(
1746 manager, os.path.join(zip_path, name)
1748 # return the extracted directory name
1749 return os.path.dirname(last)
1751 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1753 if not WRITE_SUPPORT:
1754 raise IOError('"os.rename" and "os.unlink" are not supported '
1758 real_path = manager.get_cache_path(
1759 self.egg_name, self._parts(zip_path)
1762 if self._is_current(real_path, zip_path):
1765 outf, tmpnam = _mkstemp(
1767 dir=os.path.dirname(real_path),
1769 os.write(outf, self.loader.get_data(zip_path))
1771 utime(tmpnam, (timestamp, timestamp))
1772 manager.postprocess(tmpnam, real_path)
1775 rename(tmpnam, real_path)
1778 if os.path.isfile(real_path):
1779 if self._is_current(real_path, zip_path):
1780 # the file became current since it was checked above,
1783 # Windows, del old file and retry
1784 elif os.name == 'nt':
1786 rename(tmpnam, real_path)
1791 # report a user-friendly error
1792 manager.extraction_error()
1796 def _is_current(self, file_path, zip_path):
1798 Return True if the file_path is current for this zip_path
1800 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1801 if not os.path.isfile(file_path):
1803 stat = os.stat(file_path)
1804 if stat.st_size != size or stat.st_mtime != timestamp:
1806 # check that the contents match
1807 zip_contents = self.loader.get_data(zip_path)
1808 with open(file_path, 'rb') as f:
1809 file_contents = f.read()
1810 return zip_contents == file_contents
1812 def _get_eager_resources(self):
1813 if self.eagers is None:
1815 for name in ('native_libs.txt', 'eager_resources.txt'):
1816 if self.has_metadata(name):
1817 eagers.extend(self.get_metadata_lines(name))
1818 self.eagers = eagers
1823 return self._dirindex
1824 except AttributeError:
1826 for path in self.zipinfo:
1827 parts = path.split(os.sep)
1829 parent = os.sep.join(parts[:-1])
1831 ind[parent].append(parts[-1])
1834 ind[parent] = [parts.pop()]
1835 self._dirindex = ind
1838 def _has(self, fspath):
1839 zip_path = self._zipinfo_name(fspath)
1840 return zip_path in self.zipinfo or zip_path in self._index()
1842 def _isdir(self, fspath):
1843 return self._zipinfo_name(fspath) in self._index()
1845 def _listdir(self, fspath):
1846 return list(self._index().get(self._zipinfo_name(fspath), ()))
1848 def _eager_to_zip(self, resource_name):
1849 return self._zipinfo_name(self._fn(self.egg_root, resource_name))
1851 def _resource_to_zip(self, resource_name):
1852 return self._zipinfo_name(self._fn(self.module_path, resource_name))
1855 register_loader_type(zipimport.zipimporter, ZipProvider)
1858 class FileMetadata(EmptyProvider):
1859 """Metadata handler for standalone PKG-INFO files
1863 metadata = FileMetadata("/path/to/PKG-INFO")
1865 This provider rejects all data and metadata requests except for PKG-INFO,
1866 which is treated as existing, and will be the contents of the file at
1867 the provided location.
1870 def __init__(self, path):
1873 def has_metadata(self, name):
1874 return name == 'PKG-INFO' and os.path.isfile(self.path)
1876 def get_metadata(self, name):
1877 if name != 'PKG-INFO':
1878 raise KeyError("No metadata except PKG-INFO is available")
1880 with io.open(self.path, encoding='utf-8', errors="replace") as f:
1882 self._warn_on_replacement(metadata)
1885 def _warn_on_replacement(self, metadata):
1886 # Python 2.7 compat for: replacement_char = '�'
1887 replacement_char = b'\xef\xbf\xbd'.decode('utf-8')
1888 if replacement_char in metadata:
1889 tmpl = "{self.path} could not be properly decoded in UTF-8"
1890 msg = tmpl.format(**locals())
1893 def get_metadata_lines(self, name):
1894 return yield_lines(self.get_metadata(name))
1897 class PathMetadata(DefaultProvider):
1898 """Metadata provider for egg directories
1904 egg_info = "/path/to/PackageName.egg-info"
1905 base_dir = os.path.dirname(egg_info)
1906 metadata = PathMetadata(base_dir, egg_info)
1907 dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1908 dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
1910 # Unpacked egg directories:
1912 egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1913 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1914 dist = Distribution.from_filename(egg_path, metadata=metadata)
1917 def __init__(self, path, egg_info):
1918 self.module_path = path
1919 self.egg_info = egg_info
1922 class EggMetadata(ZipProvider):
1923 """Metadata provider for .egg files"""
1925 def __init__(self, importer):
1926 """Create a metadata provider from a zipimporter"""
1928 self.zip_pre = importer.archive + os.sep
1929 self.loader = importer
1931 self.module_path = os.path.join(importer.archive, importer.prefix)
1933 self.module_path = importer.archive
1934 self._setup_prefix()
1937 _declare_state('dict', _distribution_finders={})
1940 def register_finder(importer_type, distribution_finder):
1941 """Register `distribution_finder` to find distributions in sys.path items
1943 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1944 handler), and `distribution_finder` is a callable that, passed a path
1945 item and the importer instance, yields ``Distribution`` instances found on
1946 that path item. See ``pkg_resources.find_on_path`` for an example."""
1947 _distribution_finders[importer_type] = distribution_finder
1950 def find_distributions(path_item, only=False):
1951 """Yield distributions accessible via `path_item`"""
1952 importer = get_importer(path_item)
1953 finder = _find_adapter(_distribution_finders, importer)
1954 return finder(importer, path_item, only)
1957 def find_eggs_in_zip(importer, path_item, only=False):
1959 Find eggs in zip files; possibly multiple nested eggs.
1961 if importer.archive.endswith('.whl'):
1962 # wheels are not supported with this finder
1963 # they don't have PKG-INFO metadata, and won't ever contain eggs
1965 metadata = EggMetadata(importer)
1966 if metadata.has_metadata('PKG-INFO'):
1967 yield Distribution.from_filename(path_item, metadata=metadata)
1969 # don't yield nested distros
1971 for subitem in metadata.resource_listdir('/'):
1972 if _is_egg_path(subitem):
1973 subpath = os.path.join(path_item, subitem)
1974 dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
1977 elif subitem.lower().endswith('.dist-info'):
1978 subpath = os.path.join(path_item, subitem)
1979 submeta = EggMetadata(zipimport.zipimporter(subpath))
1980 submeta.egg_info = subpath
1981 yield Distribution.from_location(path_item, subitem, submeta)
1984 register_finder(zipimport.zipimporter, find_eggs_in_zip)
1987 def find_nothing(importer, path_item, only=False):
1991 register_finder(object, find_nothing)
1994 def _by_version_descending(names):
1996 Given a list of filenames, return them in descending order
1999 >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
2000 >>> _by_version_descending(names)
2001 ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']
2002 >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
2003 >>> _by_version_descending(names)
2004 ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
2005 >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
2006 >>> _by_version_descending(names)
2007 ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
2009 def _by_version(name):
2011 Parse each component of the filename
2013 name, ext = os.path.splitext(name)
2014 parts = itertools.chain(name.split('-'), [ext])
2015 return [packaging.version.parse(part) for part in parts]
2017 return sorted(names, key=_by_version, reverse=True)
2020 def find_on_path(importer, path_item, only=False):
2021 """Yield distributions accessible on a sys.path directory"""
2022 path_item = _normalize_cached(path_item)
2024 if _is_unpacked_egg(path_item):
2025 yield Distribution.from_filename(
2026 path_item, metadata=PathMetadata(
2027 path_item, os.path.join(path_item, 'EGG-INFO')
2032 entries = safe_listdir(path_item)
2034 # for performance, before sorting by version,
2035 # screen entries for only those that will yield
2039 for entry in entries
2040 if dist_factory(path_item, entry, only)
2043 # scan for .egg and .egg-info in directory
2044 path_item_entries = _by_version_descending(filtered)
2045 for entry in path_item_entries:
2046 fullpath = os.path.join(path_item, entry)
2047 factory = dist_factory(path_item, entry, only)
2048 for dist in factory(fullpath):
2052 def dist_factory(path_item, entry, only):
2054 Return a dist_factory for a path_item and entry
2056 lower = entry.lower()
2057 is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info')))
2059 distributions_from_metadata
2062 if not only and _is_egg_path(entry) else
2064 if not only and lower.endswith('.egg-link') else
2074 >>> list(NoDists()('anything'))
2080 __nonzero__ = __bool__
2082 def __call__(self, fullpath):
2086 def safe_listdir(path):
2088 Attempt to list contents of path, but suppress some exceptions.
2091 return os.listdir(path)
2092 except (PermissionError, NotADirectoryError):
2094 except OSError as e:
2095 # Ignore the directory if does not exist, not a directory or
2098 e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT)
2099 # Python 2 on Windows needs to be handled this way :(
2100 or getattr(e, "winerror", None) == 267
2107 def distributions_from_metadata(path):
2108 root = os.path.dirname(path)
2109 if os.path.isdir(path):
2110 if len(os.listdir(path)) == 0:
2111 # empty metadata dir; skip
2113 metadata = PathMetadata(root, path)
2115 metadata = FileMetadata(path)
2116 entry = os.path.basename(path)
2117 yield Distribution.from_location(
2118 root, entry, metadata, precedence=DEVELOP_DIST,
2122 def non_empty_lines(path):
2124 Yield non-empty lines from file at path
2126 with open(path) as f:
2133 def resolve_egg_link(path):
2135 Given a path to an .egg-link, resolve distributions
2136 present in the referenced path.
2138 referenced_paths = non_empty_lines(path)
2140 os.path.join(os.path.dirname(path), ref)
2141 for ref in referenced_paths
2143 dist_groups = map(find_distributions, resolved_paths)
2144 return next(dist_groups, ())
2147 register_finder(pkgutil.ImpImporter, find_on_path)
2149 if hasattr(importlib_machinery, 'FileFinder'):
2150 register_finder(importlib_machinery.FileFinder, find_on_path)
2152 _declare_state('dict', _namespace_handlers={})
2153 _declare_state('dict', _namespace_packages={})
2156 def register_namespace_handler(importer_type, namespace_handler):
2157 """Register `namespace_handler` to declare namespace packages
2159 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
2160 handler), and `namespace_handler` is a callable like this::
2162 def namespace_handler(importer, path_entry, moduleName, module):
2163 # return a path_entry to use for child packages
2165 Namespace handlers are only called if the importer object has already
2166 agreed that it can handle the relevant path item, and they should only
2167 return a subpath if the module __path__ does not already contain an
2168 equivalent subpath. For an example namespace handler, see
2169 ``pkg_resources.file_ns_handler``.
2171 _namespace_handlers[importer_type] = namespace_handler
2174 def _handle_ns(packageName, path_item):
2175 """Ensure that named package includes a subpath of path_item (if needed)"""
2177 importer = get_importer(path_item)
2178 if importer is None:
2180 loader = importer.find_module(packageName)
2183 module = sys.modules.get(packageName)
2185 module = sys.modules[packageName] = types.ModuleType(packageName)
2186 module.__path__ = []
2187 _set_parent_ns(packageName)
2188 elif not hasattr(module, '__path__'):
2189 raise TypeError("Not a package:", packageName)
2190 handler = _find_adapter(_namespace_handlers, importer)
2191 subpath = handler(importer, path_item, packageName, module)
2192 if subpath is not None:
2193 path = module.__path__
2194 path.append(subpath)
2195 loader.load_module(packageName)
2196 _rebuild_mod_path(path, packageName, module)
2200 def _rebuild_mod_path(orig_path, package_name, module):
2202 Rebuild module.__path__ ensuring that all entries are ordered
2203 corresponding to their sys.path order
2205 sys_path = [_normalize_cached(p) for p in sys.path]
2207 def safe_sys_path_index(entry):
2209 Workaround for #520 and #513.
2212 return sys_path.index(entry)
2216 def position_in_sys_path(path):
2218 Return the ordinal of the path based on its position in sys.path
2220 path_parts = path.split(os.sep)
2221 module_parts = package_name.count('.') + 1
2222 parts = path_parts[:-module_parts]
2223 return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
2225 if not isinstance(orig_path, list):
2226 # Is this behavior useful when module.__path__ is not a list?
2229 orig_path.sort(key=position_in_sys_path)
2230 module.__path__[:] = [_normalize_cached(p) for p in orig_path]
2233 def declare_namespace(packageName):
2234 """Declare that package 'packageName' is a namespace package"""
2238 if packageName in _namespace_packages:
2241 path, parent = sys.path, None
2242 if '.' in packageName:
2243 parent = '.'.join(packageName.split('.')[:-1])
2244 declare_namespace(parent)
2245 if parent not in _namespace_packages:
2248 path = sys.modules[parent].__path__
2249 except AttributeError:
2250 raise TypeError("Not a package:", parent)
2252 # Track what packages are namespaces, so when new path items are added,
2253 # they can be updated
2254 _namespace_packages.setdefault(parent, []).append(packageName)
2255 _namespace_packages.setdefault(packageName, [])
2257 for path_item in path:
2258 # Ensure all the parent's path items are reflected in the child,
2260 _handle_ns(packageName, path_item)
2266 def fixup_namespace_packages(path_item, parent=None):
2267 """Ensure that previously-declared namespace packages include path_item"""
2270 for package in _namespace_packages.get(parent, ()):
2271 subpath = _handle_ns(package, path_item)
2273 fixup_namespace_packages(subpath, package)
2278 def file_ns_handler(importer, path_item, packageName, module):
2279 """Compute an ns-package subpath for a filesystem or zipfile importer"""
2281 subpath = os.path.join(path_item, packageName.split('.')[-1])
2282 normalized = _normalize_cached(subpath)
2283 for item in module.__path__:
2284 if _normalize_cached(item) == normalized:
2287 # Only return the path if it's not already there
2291 register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
2292 register_namespace_handler(zipimport.zipimporter, file_ns_handler)
2294 if hasattr(importlib_machinery, 'FileFinder'):
2295 register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
2298 def null_ns_handler(importer, path_item, packageName, module):
2302 register_namespace_handler(object, null_ns_handler)
2305 def normalize_path(filename):
2306 """Normalize a file/dir name for comparison purposes"""
2307 return os.path.normcase(os.path.realpath(filename))
2310 def _normalize_cached(filename, _cache={}):
2312 return _cache[filename]
2314 _cache[filename] = result = normalize_path(filename)
2318 def _is_egg_path(path):
2320 Determine if given path appears to be an egg.
2322 return path.lower().endswith('.egg')
2325 def _is_unpacked_egg(path):
2327 Determine if given path appears to be an unpacked egg.
2330 _is_egg_path(path) and
2331 os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO'))
2335 def _set_parent_ns(packageName):
2336 parts = packageName.split('.')
2339 parent = '.'.join(parts)
2340 setattr(sys.modules[parent], name, sys.modules[packageName])
2343 def yield_lines(strs):
2344 """Yield non-empty/non-comment lines of a string or sequence"""
2345 if isinstance(strs, six.string_types):
2346 for s in strs.splitlines():
2348 # skip blank lines/comments
2349 if s and not s.startswith('#'):
2353 for s in yield_lines(ss):
2357 MODULE = re.compile(r"\w+(\.\w+)*$").match
2358 EGG_NAME = re.compile(
2362 -py(?P<pyver>[^-]+) (
2368 re.VERBOSE | re.IGNORECASE,
2372 class EntryPoint(object):
2373 """Object representing an advertised importable object"""
2375 def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
2376 if not MODULE(module_name):
2377 raise ValueError("Invalid module name", module_name)
2379 self.module_name = module_name
2380 self.attrs = tuple(attrs)
2381 self.extras = tuple(extras)
2385 s = "%s = %s" % (self.name, self.module_name)
2387 s += ':' + '.'.join(self.attrs)
2389 s += ' [%s]' % ','.join(self.extras)
2393 return "EntryPoint.parse(%r)" % str(self)
2395 def load(self, require=True, *args, **kwargs):
2397 Require packages for this EntryPoint, then resolve it.
2399 if not require or args or kwargs:
2401 "Parameters to load are deprecated. Call .resolve and "
2402 ".require separately.",
2407 self.require(*args, **kwargs)
2408 return self.resolve()
2412 Resolve the entry point from its module and attrs.
2414 module = __import__(self.module_name, fromlist=['__name__'], level=0)
2416 return functools.reduce(getattr, self.attrs, module)
2417 except AttributeError as exc:
2418 raise ImportError(str(exc))
2420 def require(self, env=None, installer=None):
2421 if self.extras and not self.dist:
2422 raise UnknownExtra("Can't require() without a distribution", self)
2424 # Get the requirements for this entry point with all its extras and
2425 # then resolve them. We have to pass `extras` along when resolving so
2426 # that the working set knows what extras we want. Otherwise, for
2427 # dist-info distributions, the working set will assume that the
2428 # requirements for that extra are purely optional and skip over them.
2429 reqs = self.dist.requires(self.extras)
2430 items = working_set.resolve(reqs, env, installer, extras=self.extras)
2431 list(map(working_set.add, items))
2433 pattern = re.compile(
2437 r'(?P<module>[\w.]+)\s*'
2438 r'(:\s*(?P<attr>[\w.]+))?\s*'
2439 r'(?P<extras>\[.*\])?\s*$'
2443 def parse(cls, src, dist=None):
2444 """Parse a single entry point from string `src`
2446 Entry point syntax follows the form::
2448 name = some.module:some.attr [extra1, extra2]
2450 The entry name and module name are required, but the ``:attrs`` and
2451 ``[extras]`` parts are optional
2453 m = cls.pattern.match(src)
2455 msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
2456 raise ValueError(msg, src)
2458 extras = cls._parse_extras(res['extras'])
2459 attrs = res['attr'].split('.') if res['attr'] else ()
2460 return cls(res['name'], res['module'], attrs, extras, dist)
2463 def _parse_extras(cls, extras_spec):
2466 req = Requirement.parse('x' + extras_spec)
2472 def parse_group(cls, group, lines, dist=None):
2473 """Parse an entry point group"""
2474 if not MODULE(group):
2475 raise ValueError("Invalid group name", group)
2477 for line in yield_lines(lines):
2478 ep = cls.parse(line, dist)
2480 raise ValueError("Duplicate entry point", group, ep.name)
2485 def parse_map(cls, data, dist=None):
2486 """Parse a map of entry point groups"""
2487 if isinstance(data, dict):
2490 data = split_sections(data)
2492 for group, lines in data:
2496 raise ValueError("Entry points must be listed in groups")
2497 group = group.strip()
2499 raise ValueError("Duplicate group name", group)
2500 maps[group] = cls.parse_group(group, lines, dist)
2504 def _remove_md5_fragment(location):
2507 parsed = urllib.parse.urlparse(location)
2508 if parsed[-1].startswith('md5='):
2509 return urllib.parse.urlunparse(parsed[:-1] + ('',))
2513 def _version_from_file(lines):
2515 Given an iterable of lines from a Metadata file, return
2516 the value of the Version field, if present, or None otherwise.
2518 is_version_line = lambda line: line.lower().startswith('version:')
2519 version_lines = filter(is_version_line, lines)
2520 line = next(iter(version_lines), '')
2521 _, _, value = line.partition(':')
2522 return safe_version(value.strip()) or None
2525 class Distribution(object):
2526 """Wrap an actual or potential sys.path entry w/metadata"""
2527 PKG_INFO = 'PKG-INFO'
2530 self, location=None, metadata=None, project_name=None,
2531 version=None, py_version=PY_MAJOR, platform=None,
2532 precedence=EGG_DIST):
2533 self.project_name = safe_name(project_name or 'Unknown')
2534 if version is not None:
2535 self._version = safe_version(version)
2536 self.py_version = py_version
2537 self.platform = platform
2538 self.location = location
2539 self.precedence = precedence
2540 self._provider = metadata or empty_provider
2543 def from_location(cls, location, basename, metadata=None, **kw):
2544 project_name, version, py_version, platform = [None] * 4
2545 basename, ext = os.path.splitext(basename)
2546 if ext.lower() in _distributionImpl:
2547 cls = _distributionImpl[ext.lower()]
2549 match = EGG_NAME(basename)
2551 project_name, version, py_version, platform = match.group(
2552 'name', 'ver', 'pyver', 'plat'
2555 location, metadata, project_name=project_name, version=version,
2556 py_version=py_version, platform=platform, **kw
2559 def _reload_version(self):
2565 self.parsed_version,
2568 _remove_md5_fragment(self.location),
2569 self.py_version or '',
2570 self.platform or '',
2574 return hash(self.hashcmp)
2576 def __lt__(self, other):
2577 return self.hashcmp < other.hashcmp
2579 def __le__(self, other):
2580 return self.hashcmp <= other.hashcmp
2582 def __gt__(self, other):
2583 return self.hashcmp > other.hashcmp
2585 def __ge__(self, other):
2586 return self.hashcmp >= other.hashcmp
2588 def __eq__(self, other):
2589 if not isinstance(other, self.__class__):
2590 # It's not a Distribution, so they are not equal
2592 return self.hashcmp == other.hashcmp
2594 def __ne__(self, other):
2595 return not self == other
2597 # These properties have to be lazy so that we don't have to load any
2598 # metadata until/unless it's actually needed. (i.e., some distributions
2599 # may not know their name or version without loading PKG-INFO)
2605 except AttributeError:
2606 self._key = key = self.project_name.lower()
2610 def parsed_version(self):
2611 if not hasattr(self, "_parsed_version"):
2612 self._parsed_version = parse_version(self.version)
2614 return self._parsed_version
2616 def _warn_legacy_version(self):
2617 LV = packaging.version.LegacyVersion
2618 is_legacy = isinstance(self._parsed_version, LV)
2622 # While an empty version is technically a legacy version and
2623 # is not a valid PEP 440 version, it's also unlikely to
2624 # actually come from someone and instead it is more likely that
2625 # it comes from setuptools attempting to parse a filename and
2626 # including it in the list. So for that we'll gate this warning
2627 # on if the version is anything at all or not.
2628 if not self.version:
2631 tmpl = textwrap.dedent("""
2632 '{project_name} ({version})' is being parsed as a legacy,
2634 version. You may find odd behavior and sort order.
2635 In particular it will be sorted as less than 0.0. It
2636 is recommended to migrate to PEP 440 compatible
2638 """).strip().replace('\n', ' ')
2640 warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
2645 return self._version
2646 except AttributeError:
2647 version = _version_from_file(self._get_metadata(self.PKG_INFO))
2649 tmpl = "Missing 'Version:' header and/or %s file"
2650 raise ValueError(tmpl % self.PKG_INFO, self)
2656 return self.__dep_map
2657 except AttributeError:
2658 dm = self.__dep_map = {None: []}
2659 for name in 'requires.txt', 'depends.txt':
2660 for extra, reqs in split_sections(self._get_metadata(name)):
2663 extra, marker = extra.split(':', 1)
2664 if invalid_marker(marker):
2667 elif not evaluate_marker(marker):
2669 extra = safe_extra(extra) or None
2670 dm.setdefault(extra, []).extend(parse_requirements(reqs))
2673 def requires(self, extras=()):
2674 """List of Requirements needed for this distro if `extras` are used"""
2677 deps.extend(dm.get(None, ()))
2680 deps.extend(dm[safe_extra(ext)])
2683 "%s has no such extra feature %r" % (self, ext)
2687 def _get_metadata(self, name):
2688 if self.has_metadata(name):
2689 for line in self.get_metadata_lines(name):
2692 def activate(self, path=None, replace=False):
2693 """Ensure distribution is importable on `path` (default=sys.path)"""
2696 self.insert_on(path, replace=replace)
2697 if path is sys.path:
2698 fixup_namespace_packages(self.location)
2699 for pkg in self._get_metadata('namespace_packages.txt'):
2700 if pkg in sys.modules:
2701 declare_namespace(pkg)
2704 """Return what this distribution's standard .egg filename should be"""
2705 filename = "%s-%s-py%s" % (
2706 to_filename(self.project_name), to_filename(self.version),
2707 self.py_version or PY_MAJOR
2711 filename += '-' + self.platform
2716 return "%s (%s)" % (self, self.location)
2722 version = getattr(self, 'version', None)
2725 version = version or "[unknown version]"
2726 return "%s %s" % (self.project_name, version)
2728 def __getattr__(self, attr):
2729 """Delegate all unrecognized public attributes to .metadata provider"""
2730 if attr.startswith('_'):
2731 raise AttributeError(attr)
2732 return getattr(self._provider, attr)
2735 def from_filename(cls, filename, metadata=None, **kw):
2736 return cls.from_location(
2737 _normalize_cached(filename), os.path.basename(filename), metadata,
2741 def as_requirement(self):
2742 """Return a ``Requirement`` that matches this distribution exactly"""
2743 if isinstance(self.parsed_version, packaging.version.Version):
2744 spec = "%s==%s" % (self.project_name, self.parsed_version)
2746 spec = "%s===%s" % (self.project_name, self.parsed_version)
2748 return Requirement.parse(spec)
2750 def load_entry_point(self, group, name):
2751 """Return the `name` entry point of `group` or raise ImportError"""
2752 ep = self.get_entry_info(group, name)
2754 raise ImportError("Entry point %r not found" % ((group, name),))
2757 def get_entry_map(self, group=None):
2758 """Return the entry point map for `group`, or the full entry map"""
2760 ep_map = self._ep_map
2761 except AttributeError:
2762 ep_map = self._ep_map = EntryPoint.parse_map(
2763 self._get_metadata('entry_points.txt'), self
2765 if group is not None:
2766 return ep_map.get(group, {})
2769 def get_entry_info(self, group, name):
2770 """Return the EntryPoint object for `group`+`name`, or ``None``"""
2771 return self.get_entry_map(group).get(name)
2773 def insert_on(self, path, loc=None, replace=False):
2774 """Ensure self.location is on path
2776 If replace=False (default):
2777 - If location is already in path anywhere, do nothing.
2779 - If it's an egg and its parent directory is on path,
2780 insert just ahead of the parent.
2781 - Else: add to the end of path.
2783 - If location is already on path anywhere (not eggs)
2784 or higher priority than its parent (eggs)
2787 - If it's an egg and its parent directory is on path,
2788 insert just ahead of the parent,
2789 removing any lower-priority entries.
2790 - Else: add it to the front of path.
2793 loc = loc or self.location
2797 nloc = _normalize_cached(loc)
2798 bdir = os.path.dirname(nloc)
2799 npath = [(p and _normalize_cached(p) or p) for p in path]
2801 for p, item in enumerate(npath):
2806 # don't modify path (even removing duplicates) if
2807 # found and not replace
2809 elif item == bdir and self.precedence == EGG_DIST:
2810 # if it's an .egg, give it precedence over its directory
2811 # UNLESS it's already been added to sys.path and replace=False
2812 if (not replace) and nloc in npath[p:]:
2814 if path is sys.path:
2815 self.check_version_conflict()
2817 npath.insert(p, nloc)
2820 if path is sys.path:
2821 self.check_version_conflict()
2828 # p is the spot where we found or inserted loc; now remove duplicates
2831 np = npath.index(nloc, p + 1)
2835 del npath[np], path[np]
2841 def check_version_conflict(self):
2842 if self.key == 'setuptools':
2843 # ignore the inevitable setuptools self-conflicts :(
2846 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2847 loc = normalize_path(self.location)
2848 for modname in self._get_metadata('top_level.txt'):
2849 if (modname not in sys.modules or modname in nsp
2850 or modname in _namespace_packages):
2852 if modname in ('pkg_resources', 'setuptools', 'site'):
2854 fn = getattr(sys.modules[modname], '__file__', None)
2855 if fn and (normalize_path(fn).startswith(loc) or
2856 fn.startswith(self.location)):
2859 "Module %s was already imported from %s, but %s is being added"
2860 " to sys.path" % (modname, fn, self.location),
2863 def has_version(self):
2867 issue_warning("Unbuilt egg for " + repr(self))
2871 def clone(self, **kw):
2872 """Copy this distribution, substituting in any changed keyword args"""
2873 names = 'project_name version py_version platform location precedence'
2874 for attr in names.split():
2875 kw.setdefault(attr, getattr(self, attr, None))
2876 kw.setdefault('metadata', self._provider)
2877 return self.__class__(**kw)
2881 return [dep for dep in self._dep_map if dep]
2884 class EggInfoDistribution(Distribution):
2885 def _reload_version(self):
2887 Packages installed by distutils (e.g. numpy or scipy),
2888 which uses an old safe_version, and so
2889 their version numbers can get mangled when
2890 converted to filenames (e.g., 1.11.0.dev0+2329eae to
2891 1.11.0.dev0_2329eae). These distributions will not be
2893 downstream by Distribution and safe_version, so
2894 take an extra step and try to get the version number from
2895 the metadata file itself instead of the filename.
2897 md_version = _version_from_file(self._get_metadata(self.PKG_INFO))
2899 self._version = md_version
2903 class DistInfoDistribution(Distribution):
2905 Wrap an actual or potential sys.path entry
2906 w/metadata, .dist-info style.
2908 PKG_INFO = 'METADATA'
2909 EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
2912 def _parsed_pkg_info(self):
2913 """Parse and cache metadata"""
2915 return self._pkg_info
2916 except AttributeError:
2917 metadata = self.get_metadata(self.PKG_INFO)
2918 self._pkg_info = email.parser.Parser().parsestr(metadata)
2919 return self._pkg_info
2924 return self.__dep_map
2925 except AttributeError:
2926 self.__dep_map = self._compute_dependencies()
2927 return self.__dep_map
2929 def _compute_dependencies(self):
2930 """Recompute this distribution's dependencies."""
2931 dm = self.__dep_map = {None: []}
2934 # Including any condition expressions
2935 for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
2936 reqs.extend(parse_requirements(req))
2938 def reqs_for_extra(extra):
2940 if not req.marker or req.marker.evaluate({'extra': extra}):
2943 common = frozenset(reqs_for_extra(None))
2944 dm[None].extend(common)
2946 for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
2947 s_extra = safe_extra(extra.strip())
2948 dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common)
2953 _distributionImpl = {
2954 '.egg': Distribution,
2955 '.egg-info': EggInfoDistribution,
2956 '.dist-info': DistInfoDistribution,
2960 def issue_warning(*args, **kw):
2964 # find the first stack frame that is *not* code in
2965 # the pkg_resources module, to use for the warning
2966 while sys._getframe(level).f_globals is g:
2970 warnings.warn(stacklevel=level + 1, *args, **kw)
2973 class RequirementParseError(ValueError):
2975 return ' '.join(self.args)
2978 def parse_requirements(strs):
2979 """Yield ``Requirement`` objects for each specification in `strs`
2981 `strs` must be a string, or a (possibly-nested) iterable thereof.
2983 # create a steppable iterator, so we can handle \-continuations
2984 lines = iter(yield_lines(strs))
2987 # Drop comments -- a hash without a space may be in a URL.
2989 line = line[:line.find(' #')]
2990 # If there is a line continuation, drop it, and append the next line.
2991 if line.endswith('\\'):
2992 line = line[:-2].strip()
2994 yield Requirement(line)
2997 class Requirement(packaging.requirements.Requirement):
2998 def __init__(self, requirement_string):
2999 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
3001 super(Requirement, self).__init__(requirement_string)
3002 except packaging.requirements.InvalidRequirement as e:
3003 raise RequirementParseError(str(e))
3004 self.unsafe_name = self.name
3005 project_name = safe_name(self.name)
3006 self.project_name, self.key = project_name, project_name.lower()
3008 (spec.operator, spec.version) for spec in self.specifier]
3009 self.extras = tuple(map(safe_extra, self.extras))
3013 frozenset(self.extras),
3014 str(self.marker) if self.marker else None,
3016 self.__hash = hash(self.hashCmp)
3018 def __eq__(self, other):
3020 isinstance(other, Requirement) and
3021 self.hashCmp == other.hashCmp
3024 def __ne__(self, other):
3025 return not self == other
3027 def __contains__(self, item):
3028 if isinstance(item, Distribution):
3029 if item.key != self.key:
3034 # Allow prereleases always in order to match the previous behavior of
3035 # this method. In the future this should be smarter and follow PEP 440
3037 return self.specifier.contains(item, prereleases=True)
3043 return "Requirement.parse(%r)" % str(self)
3047 req, = parse_requirements(s)
3051 def _always_object(classes):
3053 Ensure object appears in the mro even
3054 for old-style classes.
3056 if object not in classes:
3057 return classes + (object,)
3061 def _find_adapter(registry, ob):
3062 """Return an adapter factory for `ob` from `registry`"""
3063 types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob))))
3069 def ensure_directory(path):
3070 """Ensure that the parent directory of `path` exists"""
3071 dirname = os.path.dirname(path)
3072 py31compat.makedirs(dirname, exist_ok=True)
3075 def _bypass_ensure_directory(path):
3076 """Sandbox-bypassing version of ensure_directory()"""
3077 if not WRITE_SUPPORT:
3078 raise IOError('"os.mkdir" not supported on this platform.')
3079 dirname, filename = split(path)
3080 if dirname and filename and not isdir(dirname):
3081 _bypass_ensure_directory(dirname)
3082 mkdir(dirname, 0o755)
3085 def split_sections(s):
3086 """Split a string or iterable thereof into (section, content) pairs
3088 Each ``section`` is a stripped version of the section header ("[section]")
3089 and each ``content`` is a list of stripped lines excluding blank lines and
3090 comment-only lines. If there are any such lines before the first section
3091 header, they're returned in a first ``section`` of ``None``.
3095 for line in yield_lines(s):
3096 if line.startswith("["):
3097 if line.endswith("]"):
3098 if section or content:
3099 yield section, content
3100 section = line[1:-1].strip()
3103 raise ValueError("Invalid section heading", line)
3105 content.append(line)
3107 # wrap up last segment
3108 yield section, content
3111 def _mkstemp(*args, **kw):
3114 # temporarily bypass sandboxing
3116 return tempfile.mkstemp(*args, **kw)
3118 # and then put it back
3122 # Silence the PEP440Warning by default, so that end users don't get hit by it
3123 # randomly just because they use pkg_resources. We want to append the rule
3124 # because we want earlier uses of filterwarnings to take precedence over this
3126 warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
3129 # from jaraco.functools 1.3
3130 def _call_aside(f, *args, **kwargs):
3136 def _initialize(g=globals()):
3137 "Set up global resource manager (deliberately not state-saved)"
3138 manager = ResourceManager()
3139 g['_manager'] = manager
3141 (name, getattr(manager, name))
3142 for name in dir(manager)
3143 if not name.startswith('_')
3148 def _initialize_master_working_set():
3150 Prepare the master working set and make the ``require()``
3153 This function has explicit effects on the global state
3154 of pkg_resources. It is intended to be invoked once at
3155 the initialization of this module.
3157 Invocation by other packages is unsupported and done
3160 working_set = WorkingSet._build_master()
3161 _declare_state('object', working_set=working_set)
3163 require = working_set.require
3164 iter_entry_points = working_set.iter_entry_points
3165 add_activation_listener = working_set.subscribe
3166 run_script = working_set.run_script
3167 # backward compatibility
3168 run_main = run_script
3169 # Activate all distributions already on sys.path with replace=False and
3170 # ensure that all distributions added to the working set in the future
3171 # (e.g. by calling ``require()``) will get activated as well,
3172 # with higher priority (replace=True).
3174 dist.activate(replace=False)
3175 for dist in working_set
3177 add_activation_listener(
3178 lambda dist: dist.activate(replace=True),
3181 working_set.entries = []
3183 list(map(working_set.add_entry, sys.path))
3184 globals().update(locals())