diff --git a/.gitignore b/.gitignore index 7efeda1189b..71fa511b725 100644 --- a/.gitignore +++ b/.gitignore @@ -10,4 +10,4 @@ build/* *.pyo *.~ .tox - +env/* diff --git a/pip/baseparser.py b/pip/baseparser.py index 1803db202ae..7eac453c9db 100644 --- a/pip/baseparser.py +++ b/pip/baseparser.py @@ -2,7 +2,7 @@ import sys import optparse -import pkg_resources +from pip.compat import pkg_resources import os import textwrap from distutils.util import strtobool diff --git a/pip/commands/freeze.py b/pip/commands/freeze.py index 9c2ab7239ff..3bfaa1dbf5a 100644 --- a/pip/commands/freeze.py +++ b/pip/commands/freeze.py @@ -1,6 +1,6 @@ import re import sys -import pkg_resources +from pip.compat import pkg_resources import pip from pip.req import InstallRequirement from pip.log import logger diff --git a/pip/commands/search.py b/pip/commands/search.py index 892eddd12f7..27ae6c6a2f2 100644 --- a/pip/commands/search.py +++ b/pip/commands/search.py @@ -1,6 +1,6 @@ import sys import textwrap -import pkg_resources +from pip.compat import pkg_resources import pip.download from pip.basecommand import Command, SUCCESS from pip.util import get_terminal_size diff --git a/pip/commands/show.py b/pip/commands/show.py index f47f4b01e64..c6c35534c04 100644 --- a/pip/commands/show.py +++ b/pip/commands/show.py @@ -1,5 +1,5 @@ import os -import pkg_resources +from pip.compat import pkg_resources from pip.basecommand import Command from pip.log import logger diff --git a/pip/compat/__init__.py b/pip/compat/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/pip/compat/pkg_resources.py b/pip/compat/pkg_resources.py new file mode 100644 index 00000000000..de22a6a5070 --- /dev/null +++ b/pip/compat/pkg_resources.py @@ -0,0 +1,281 @@ +# This module is a shim to help migrate from the real pkg_resources +import logging +import os +import re +import sys + +from pip.log import logger +from pip.vendor.distlib import database +from pip.vendor.distlib.compat import string_types +from pip.vendor.distlib.database import (DistributionPath, + InstalledDistribution as DistInfoDistribution, + EggInfoDistribution) +from pip.vendor.distlib.markers import interpret +from pip.vendor.distlib.util import parse_requirement +from pip.vendor.distlib.version import _legacy_key as parse_version + +logger = logging.getLogger(__name__) + +PY_MAJOR = sys.version[:3] + +NON_ALPHAS = re.compile('[^A-Za-z0-9.]+') + +def init_logging(): + # Since we're minimising changes to pip, update logging here + logger.setLevel(logging.DEBUG) + if not logger.handlers: + fn = os.path.expanduser('~/pkg_resources.log') + h = logging.FileHandler(fn, 'a') + f = logging.Formatter('%(lineno)3d %(funcName)-10s %(message)s') + h.setFormatter(f) + logger.addHandler(h) + +def log_files(path): + logger.debug('log of files under %s', path) + for root, dirs, files in os.walk(path): + dirs[:] = sorted(dirs) + for fn in sorted(files): + p = os.path.join(root, fn) + logger.debug(' %s', p) + + +class Requirement(object): + + state_machine = { + # =>< + '<' : '--T', + '<=': 'T-T', + '>' : 'F+F', + '>=': 'T+F', + '==': 'T..', + '!=': 'F++', + } + + def __init__(self, *args, **kwargs): + init_logging() + logger.debug('%s %s', args, kwargs) + self.__dict__.update(kwargs) + self.unsafe_name = self.name + self.project_name = NON_ALPHAS.sub('-', self.name) + self.key = self.project_name.lower() + self.specs = self.constraints or [] + if self.extras is None: + self.extras = [] + self.extras = tuple(self.extras) + self.index = sorted([(parse_version(v), self.state_machine[op], + op, v) for op, v in self.specs]) + + @staticmethod + def parse(s, replacement=True): + r = parse_requirement(s) + logger.debug('%s -> %s', s, r.__dict__) + return Requirement(**r.__dict__) + + def __str__(self): + if not self.extras: + extras = '' + else: + extras = '[%s]' % ','.join(self.extras) + cons = ','.join([''.join(s) for s in self.specs]) + return '%s%s%s' % (self.name, extras, cons) + + # copied from pkg_resources + def __contains__(self, item): + init_logging() + if isinstance(item,Distribution): + if item.key != self.key: + logger.debug('%s %s', item.key, self.key) + return False + if self.index: item = item.parsed_version # only get if we need it + elif isinstance(item, string_types): + item = parse_version(item) + last = None + compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1 + for parsed,trans, op, ver in self.index: + action = trans[compare(item,parsed)] # Indexing: 0, 1, -1 + if action=='F': return False + elif action=='T': return True + elif action=='+': last = True + elif action=='-' or last is None: last = False + if last is None: last = True # no rules encountered + logger.debug('%s %s', item, last) + return last + +def parse_requirements(slist): + if isinstance(slist, string_types): + slist = [slist] + return [Requirement.parse(s) for s in slist] + +class Common(object): + def as_requirement(self): + init_logging() + result = Requirement.parse('%s==%s' % (self.project_name, self.version)) + logger.debug('%s', result) + return result + +class Distribution(EggInfoDistribution, Common): + def __init__(self, *args, **kwargs): + project_name = kwargs.pop('project_name', None) + version = kwargs.pop('version', None) + # if args is None, the code is being called for test mocking only, + # so we take a different path + if args: + super(Distribution, self).__init__(*args, **kwargs) + if project_name is None: + project_name = self.name + if version is not None: + self.version = version + self.project_name = project_name + # if args is None, the code is being called for test mocking only, + # so we take a different path + if not args: + self.key = self.project_name.lower() + return + self.location = self.path + if not self.location.endswith('.egg'): + self.location = os.path.dirname(self.location) + + def _metadata_path(self, name): + parts = name.split('/') + root = self.path + if root.endswith('.egg'): + path = os.path.join(root, 'EGG-INFO') + if os.path.isdir(path): + root = path + result = os.path.join(root, *parts) + logger.debug('%s %s -> %s', self.path, name, result) + return result + + def has_metadata(self, name): + path = self._metadata_path(name) + result = os.path.exists(path) + logger.debug('%s %s -> %s', self.path, name, result) + return result + + def get_metadata(self, name): + path = self._metadata_path(name) + assert os.path.exists(path) + with open(path, 'rb') as f: + result = f.read().decode('utf-8') + return result + + def get_metadata_lines(self, name): + lines = self.get_metadata(name).splitlines() + for line in lines: + line = line.strip() + if line and line[0] != '#': + yield line + + @property + def parsed_version(self): + try: + result = self._parsed_version + except AttributeError: + self._parsed_version = result = parse_version(self.version) + return result + + def egg_name(self): + s1 = self.name.replace('-', '_') + s2 = self.version.replace('-', '_') + return '%s-%s-py%s' % (s1, s2, PY_MAJOR) + + def requires(self, extras=None): + init_logging() + try: + reqs = EggInfoDistribution.run_requires.__get__(self, None) + logger.debug('%s', reqs) + if 'requires' in self.__dict__: + del self.__dict__['requires'] + result = [] + for r in reqs: + d = parse_requirement(r) + logger.debug('%s -> %s', r, d.__dict__) + result.append(Requirement(**d.__dict__)) + logger.warning('requires: %s -> %s', self, result) + return result + except: + logger.exception('failed') + raise + + +class NewDistribution(DistInfoDistribution, Common): + def __init__(self, *args, **kwargs): + super(NewDistribution, self).__init__(*args, **kwargs) + self.project_name = self.name + self.location = os.path.dirname(self.path) + + def requires(self, extras=None): + init_logging() + try: + reqs = set(self.run_requires) + result = [] + logger.debug('requires(%s): %s -> %s', extras, self, reqs) + marked = [] + for r in list(reqs): + if ';' in r: + reqs.remove(r) + marked.append(r.split(';', 1)) + if marked: + if extras: + e = extras + (None,) + else: + e = (None,) + for extra in e: + context = {'extra': extra} + for r, marker in marked: + if interpret(marker, context): + reqs.add(r) + for r in reqs: + d = parse_requirement(r) + logger.debug('%s -> %s', r, d.__dict__) + result.append(Requirement(**d.__dict__)) + logger.debug('requires(%s): %s -> %s', extras, self, result) + return result + except: + logger.exception('failed') + raise + + +database.old_dist_class = Distribution +database.new_dist_class = NewDistribution + +_installed_dists = DistributionPath(include_egg=True) +working_set = list(_installed_dists.get_distributions()) + +class DistributionNotFound(Exception): + """A requested distribution was not found""" + +class VersionConflict(Exception): + """An already-installed version conflicts with the requested version""" + +def get_distribution(req_or_name): + init_logging() + if isinstance(req_or_name, Requirement): + name = req_or_name.name + else: + name = req_or_name + result = _installed_dists.get_distribution(name) + logger.debug('%s -> %s', name, result) + if result is None: + raise DistributionNotFound(name) + if isinstance(req_or_name, Requirement) and result not in req_or_name: + raise VersionConflict(result, req_or_name) + return result + +def find_distributions(path_item, only=False): + init_logging() + logger.debug('%s (%s)', path_item, only) + try: + dp = DistributionPath([path_item], include_egg=True) + result = list(dp.get_distributions()) + except: + logger.exception('failed') + raise + logger.debug('%s', result) + return result + +# This is only here because pip's test infrastructure is unhelpful when it +# comes to logging :-( +def debug(s): + with open('/tmp/pkg_resources-debug.txt', 'a') as f: + f.write(s + '\n') diff --git a/pip/index.py b/pip/index.py index fdc2ee0b517..eb6ee337e18 100644 --- a/pip/index.py +++ b/pip/index.py @@ -6,7 +6,7 @@ import gzip import mimetypes import posixpath -import pkg_resources +from pip.compat import pkg_resources import random import socket import ssl diff --git a/pip/req.py b/pip/req.py index 31505082f93..e1e4146a769 100644 --- a/pip/req.py +++ b/pip/req.py @@ -1,7 +1,7 @@ from email.parser import FeedParser import os import imp -import pkg_resources +from pip.compat import pkg_resources import re import sys import shutil @@ -439,7 +439,6 @@ def uninstall(self, auto_confirm=False): if not self.check_if_exists(): raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,)) dist = self.satisfied_by or self.conflicts_with - paths_to_remove = UninstallPathSet(dist) pip_egg_info_path = os.path.join(dist.location, @@ -1114,7 +1113,8 @@ def prepare_files(self, finder, force_root_egg_info=False, bundle=False): elif is_wheel: req_to_install.source_dir = location req_to_install.url = url.url - dist = list(pkg_resources.find_distributions(location))[0] + dists = list(pkg_resources.find_distributions(location)) + dist = dists[0] if not req_to_install.req: req_to_install.req = dist.as_requirement() self.add_requirement(req_to_install) diff --git a/pip/util.py b/pip/util.py index c2e8ee9a20b..aaa5140fb37 100644 --- a/pip/util.py +++ b/pip/util.py @@ -4,7 +4,7 @@ import stat import re import posixpath -import pkg_resources +from pip.compat import pkg_resources import zipfile import tarfile import subprocess @@ -709,11 +709,11 @@ def is_prerelease(vers): Will return True if it is a pre-release and False if not. Versions are assumed to be a pre-release if they cannot be parsed. """ - normalized = version.suggest_normalized_version(vers) + normalized = version._suggest_normalized_version(vers) if normalized is None: # Cannot normalize, assume it is a pre-release return True - parsed = version.normalized_key(normalized) + parsed = version._normalized_key(normalized) return any([any([y in set(["a", "b", "c", "rc", "dev"]) for y in x]) for x in parsed]) diff --git a/pip/vendor/distlib/__init__.py b/pip/vendor/distlib/__init__.py index d6ae0f29540..38c3fedbcc1 100644 --- a/pip/vendor/distlib/__init__.py +++ b/pip/vendor/distlib/__init__.py @@ -6,7 +6,7 @@ # import logging -__version__ = '0.1.1' +__version__ = '0.1.3.dev0' class DistlibException(Exception): pass diff --git a/pip/vendor/distlib/_backport/sysconfig.cfg b/pip/vendor/distlib/_backport/sysconfig.cfg new file mode 100644 index 00000000000..1746bd01c1a --- /dev/null +++ b/pip/vendor/distlib/_backport/sysconfig.cfg @@ -0,0 +1,84 @@ +[posix_prefix] +# Configuration directories. Some of these come straight out of the +# configure script. They are for implementing the other variables, not to +# be used directly in [resource_locations]. +confdir = /etc +datadir = /usr/share +libdir = /usr/lib +statedir = /var +# User resource directory +local = ~/.local/{distribution.name} + +stdlib = {base}/lib/python{py_version_short} +platstdlib = {platbase}/lib/python{py_version_short} +purelib = {base}/lib/python{py_version_short}/site-packages +platlib = {platbase}/lib/python{py_version_short}/site-packages +include = {base}/include/python{py_version_short}{abiflags} +platinclude = {platbase}/include/python{py_version_short}{abiflags} +data = {base} + +[posix_home] +stdlib = {base}/lib/python +platstdlib = {base}/lib/python +purelib = {base}/lib/python +platlib = {base}/lib/python +include = {base}/include/python +platinclude = {base}/include/python +scripts = {base}/bin +data = {base} + +[nt] +stdlib = {base}/Lib +platstdlib = {base}/Lib +purelib = {base}/Lib/site-packages +platlib = {base}/Lib/site-packages +include = {base}/Include +platinclude = {base}/Include +scripts = {base}/Scripts +data = {base} + +[os2] +stdlib = {base}/Lib +platstdlib = {base}/Lib +purelib = {base}/Lib/site-packages +platlib = {base}/Lib/site-packages +include = {base}/Include +platinclude = {base}/Include +scripts = {base}/Scripts +data = {base} + +[os2_home] +stdlib = {userbase}/lib/python{py_version_short} +platstdlib = {userbase}/lib/python{py_version_short} +purelib = {userbase}/lib/python{py_version_short}/site-packages +platlib = {userbase}/lib/python{py_version_short}/site-packages +include = {userbase}/include/python{py_version_short} +scripts = {userbase}/bin +data = {userbase} + +[nt_user] +stdlib = {userbase}/Python{py_version_nodot} +platstdlib = {userbase}/Python{py_version_nodot} +purelib = {userbase}/Python{py_version_nodot}/site-packages +platlib = {userbase}/Python{py_version_nodot}/site-packages +include = {userbase}/Python{py_version_nodot}/Include +scripts = {userbase}/Scripts +data = {userbase} + +[posix_user] +stdlib = {userbase}/lib/python{py_version_short} +platstdlib = {userbase}/lib/python{py_version_short} +purelib = {userbase}/lib/python{py_version_short}/site-packages +platlib = {userbase}/lib/python{py_version_short}/site-packages +include = {userbase}/include/python{py_version_short} +scripts = {userbase}/bin +data = {userbase} + +[osx_framework_user] +stdlib = {userbase}/lib/python +platstdlib = {userbase}/lib/python +purelib = {userbase}/lib/python/site-packages +platlib = {userbase}/lib/python/site-packages +include = {userbase}/include +scripts = {userbase}/bin +data = {userbase} diff --git a/pip/vendor/distlib/compat.py b/pip/vendor/distlib/compat.py index fd2c1cb8e2c..83235d32d94 100644 --- a/pip/vendor/distlib/compat.py +++ b/pip/vendor/distlib/compat.py @@ -139,6 +139,85 @@ def match_hostname(cert, hostname): from ssl import match_hostname, CertificateError + +try: + from types import SimpleNamespace as Container +except ImportError: + class Container(object): + """ + A generic container for when multiple values need to be returned + """ + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + +try: + from shutil import which +except ImportError: + # Implementation from Python 3.3 + def which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + """ + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + def _access_check(fn, mode): + return (os.path.exists(fn) and os.access(fn, mode) + and not os.path.isdir(fn)) + + # If we're given a path with a directory part, look it up directly rather + # than referring to PATH directories. This includes checking relative to the + # current directory, e.g. ./script + if os.path.dirname(cmd): + if _access_check(cmd, mode): + return cmd + return None + + if path is None: + path = os.environ.get("PATH", os.defpath) + if not path: + return None + path = path.split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if not os.curdir in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path extensions. + # This will allow us to short circuit when given "python.exe". + # If it does match, only test that one, otherwise we have to try + # others. + if any(cmd.lower().endswith(ext.lower()) for ext in pathext): + files = [cmd] + else: + files = [cmd + ext for ext in pathext] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + normdir = os.path.normcase(dir) + if not normdir in seen: + seen.add(normdir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + return None + + # ZipFile is a context manager in 2.7, but not in 2.6 from zipfile import ZipFile as BaseZipFile @@ -752,3 +831,230 @@ def viewvalues(self): def viewitems(self): "od.viewitems() -> a set-like object providing a view on od's items" return ItemsView(self) + +try: + from logging.config import BaseConfigurator, valid_ident +except ImportError: # pragma: no cover + IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) + + + def valid_ident(s): + m = IDENTIFIER.match(s) + if not m: + raise ValueError('Not a valid Python identifier: %r' % s) + return True + + + # The ConvertingXXX classes are wrappers around standard Python containers, + # and they serve to convert any suitable values in the container. The + # conversion converts base dicts, lists and tuples to their wrapped + # equivalents, whereas strings which match a conversion format are converted + # appropriately. + # + # Each wrapper should have a configurator attribute holding the actual + # configurator to use for conversion. + + class ConvertingDict(dict): + """A converting dictionary wrapper.""" + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def get(self, key, default=None): + value = dict.get(self, key, default) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, key, default=None): + value = dict.pop(self, key, default) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class ConvertingList(list): + """A converting list wrapper.""" + def __getitem__(self, key): + value = list.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, idx=-1): + value = list.pop(self, idx) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + return result + + class ConvertingTuple(tuple): + """A converting tuple wrapper.""" + def __getitem__(self, key): + value = tuple.__getitem__(self, key) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class BaseConfigurator(object): + """ + The configurator base class which defines some useful defaults. + """ + + CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$') + + WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') + DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') + INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') + DIGIT_PATTERN = re.compile(r'^\d+$') + + value_converters = { + 'ext' : 'ext_convert', + 'cfg' : 'cfg_convert', + } + + # We might want to use a different one, e.g. importlib + importer = staticmethod(__import__) + + def __init__(self, config): + self.config = ConvertingDict(config) + self.config.configurator = self + + def resolve(self, s): + """ + Resolve strings to objects using standard import and attribute + syntax. + """ + name = s.split('.') + used = name.pop(0) + try: + found = self.importer(used) + for frag in name: + used += '.' + frag + try: + found = getattr(found, frag) + except AttributeError: + self.importer(used) + found = getattr(found, frag) + return found + except ImportError: + e, tb = sys.exc_info()[1:] + v = ValueError('Cannot resolve %r: %s' % (s, e)) + v.__cause__, v.__traceback__ = e, tb + raise v + + def ext_convert(self, value): + """Default converter for the ext:// protocol.""" + return self.resolve(value) + + def cfg_convert(self, value): + """Default converter for the cfg:// protocol.""" + rest = value + m = self.WORD_PATTERN.match(rest) + if m is None: + raise ValueError("Unable to convert %r" % value) + else: + rest = rest[m.end():] + d = self.config[m.groups()[0]] + #print d, rest + while rest: + m = self.DOT_PATTERN.match(rest) + if m: + d = d[m.groups()[0]] + else: + m = self.INDEX_PATTERN.match(rest) + if m: + idx = m.groups()[0] + if not self.DIGIT_PATTERN.match(idx): + d = d[idx] + else: + try: + n = int(idx) # try as number first (most likely) + d = d[n] + except TypeError: + d = d[idx] + if m: + rest = rest[m.end():] + else: + raise ValueError('Unable to convert ' + '%r at %r' % (value, rest)) + #rest should be empty + return d + + def convert(self, value): + """ + Convert values to an appropriate type. dicts, lists and tuples are + replaced by their converting alternatives. Strings are checked to + see if they have a conversion format and are converted if they do. + """ + if not isinstance(value, ConvertingDict) and isinstance(value, dict): + value = ConvertingDict(value) + value.configurator = self + elif not isinstance(value, ConvertingList) and isinstance(value, list): + value = ConvertingList(value) + value.configurator = self + elif not isinstance(value, ConvertingTuple) and\ + isinstance(value, tuple): + value = ConvertingTuple(value) + value.configurator = self + elif isinstance(value, string_types): + m = self.CONVERT_PATTERN.match(value) + if m: + d = m.groupdict() + prefix = d['prefix'] + converter = self.value_converters.get(prefix, None) + if converter: + suffix = d['suffix'] + converter = getattr(self, converter) + value = converter(suffix) + return value + + def configure_custom(self, config): + """Configure an object with a user-supplied factory.""" + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) + result = c(**kwargs) + if props: + for name, value in props.items(): + setattr(result, name, value) + return result + + def as_tuple(self, value): + """Utility function which converts lists to tuples.""" + if isinstance(value, list): + value = tuple(value) + return value diff --git a/pip/vendor/distlib/database.py b/pip/vendor/distlib/database.py index eb6444a5eeb..1441246aeac 100644 --- a/pip/vendor/distlib/database.py +++ b/pip/vendor/distlib/database.py @@ -16,12 +16,12 @@ import zipimport from . import DistlibException -from .compat import StringIO, configparser, string_types +from .compat import StringIO, string_types from .version import get_scheme, UnsupportedVersionError from .markers import interpret -from .metadata import Metadata -from .util import (parse_requirement, cached_property, get_export_entry, - CSVReader, CSVWriter) +from .metadata import Metadata, METADATA_FILENAME +from .util import (parse_requirement, cached_property, parse_name_and_version, + read_exports, write_exports, CSVReader, CSVWriter) __all__ = ['Distribution', 'BaseInstalledDistribution', @@ -31,8 +31,10 @@ logger = logging.getLogger(__name__) -DIST_FILES = ('INSTALLER', 'METADATA', 'RECORD', 'REQUESTED', 'RESOURCES', - 'EXPORTS', 'SHARED') +EXPORTS_FILENAME = 'EXPORTS' + +DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', + 'RESOURCES', EXPORTS_FILENAME, 'SHARED') DISTINFO_EXT = '.dist-info' @@ -239,18 +241,12 @@ def provides_distribution(self, name, version=None): provided = dist.provides for p in provided: - p_components = p.rsplit(' ', 1) - if len(p_components) == 1 or matcher is None: - if name == p_components[0]: + p_name, p_ver = parse_name_and_version(p) + if matcher is None: + if p_name == name: yield dist break else: - p_name, p_ver = p_components - if len(p_ver) < 2 or p_ver[0] != '(' or p_ver[-1] != ')': - raise DistlibException( - 'distribution %r has invalid Provides field: %r' % - (dist.name, p)) - p_ver = p_ver[1:-1] # trim off the parenthesis if p_name == name and matcher.match(p_ver): yield dist break @@ -313,13 +309,16 @@ def __init__(self, metadata): self.locator = None self.md5_digest = None self.extras = None # additional features requested during installation + self.context = None # environment marker overrides @property - def download_url(self): + def source_url(self): """ - The download URL for this distribution. + The source archive download URL for this distribution. """ - return self.metadata.download_url + return self.metadata.source_url + + download_url = source_url # Backward compatibility @property def name_and_version(self): @@ -334,56 +333,36 @@ def provides(self): A set of distribution names and versions provided by this distribution. :return: A set of "name (version)" strings. """ - plist = self.metadata['Provides-Dist'] + plist = self.metadata.provides s = '%s (%s)' % (self.name, self.version) if s not in plist: plist.append(s) - return self.filter_requirements(plist) + return plist + + def _get_requirements(self, req_attr): + reqts = getattr(self.metadata, req_attr) + return set(self.metadata.get_requirements(reqts, extras=self.extras, + env=self.context)) @property - def requires(self): - rlist = self.metadata['Requires-Dist'] - return self.filter_requirements(rlist) + def run_requires(self): + return self._get_requirements('run_requires') @property - def setup_requires(self): - rlist = self.metadata['Setup-Requires-Dist'] - return self.filter_requirements(rlist) + def meta_requires(self): + return self._get_requirements('meta_requires') + + @property + def build_requires(self): + return self._get_requirements('build_requires') @property def test_requires(self): - rlist = self.metadata['Requires-Dist'] - return self.filter_requirements(rlist, extras=['test']) + return self._get_requirements('test_requires') @property - def doc_requires(self): - rlist = self.metadata['Requires-Dist'] - return self.filter_requirements(rlist, extras=['doc']) - - def filter_requirements(self, rlist, context=None, extras=None): - result = set() - marked = [] - for req in rlist: - if ';' not in req: - result.add(req) - else: - marked.append(req.split(';', 1)) - if marked: - if context is None: - context = {} - if extras is None: - extras = self.extras - if not extras: - extras = [None] - else: - extras = list(extras) # leave original alone - extras.append(None) - for extra in extras: - context['extra'] = extra - for r, marker in marked: - if interpret(marker, context): - result.add(r.strip()) - return result + def dev_requires(self): + return self._get_requirements('dev_requires') def matches_requirement(self, req): """ @@ -392,9 +371,12 @@ def matches_requirement(self, req): :rtype req: str :return: True if it matches, else False. """ + # Requirement may contain extras - parse to lose those + # from what's passed to the matcher + r = parse_requirement(req) scheme = get_scheme(self.metadata.scheme) try: - matcher = scheme.matcher(req) + matcher = scheme.matcher(r.requirement) except UnsupportedVersionError: # XXX compat-mode if cannot read the version logger.warning('could not read version %r - using name only', @@ -405,15 +387,12 @@ def matches_requirement(self, req): name = matcher.key # case-insensitive result = False - # Note this is similar to code in make_graph - to be refactored for p in self.provides: - vm = scheme.matcher(p) - if vm.key != name: + p_name, p_ver = parse_name_and_version(p) + if p_name != name: continue - version = vm.exact_version - assert version try: - result = matcher.match(version) + result = matcher.match(p_ver) break except UnsupportedVersionError: pass @@ -423,8 +402,8 @@ def __repr__(self): """ Return a textual representation of this instance, """ - if self.download_url: - suffix = ' [%s]' % self.download_url + if self.source_url: + suffix = ' [%s]' % self.source_url else: suffix = '' return '' % (self.name, self.version, suffix) @@ -434,7 +413,7 @@ def __eq__(self, other): See if this distribution is the same as another. :param other: The distribution to compare with. To be equal to one another. distributions must have the same type, name, - version and download_url. + version and source_url. :return: True if it is the same, else False. """ if type(other) is not type(self): @@ -442,14 +421,14 @@ def __eq__(self, other): else: result = (self.name == other.name and self.version == other.version and - self.download_url == other.download_url) + self.source_url == other.source_url) return result def __hash__(self): """ Compute hash in a way which matches the equality test. """ - return hash(self.name) + hash(self.version) + hash(self.download_url) + return hash(self.name) + hash(self.version) + hash(self.source_url) class BaseInstalledDistribution(Distribution): @@ -508,7 +487,7 @@ def get_hash(self, data, hasher=None): class InstalledDistribution(BaseInstalledDistribution): """Created with the *path* of the ``.dist-info`` directory provided to the - constructor. It reads the metadata contained in ``METADATA`` when it is + constructor. It reads the metadata contained in ``pydist.json`` when it is instantiated., or uses a passed in Metadata instance (useful for when dry-run mode is being used).""" @@ -518,7 +497,13 @@ def __init__(self, path, metadata=None, env=None): if env and env._cache_enabled and path in env._cache.path: metadata = env._cache.path[path].metadata elif metadata is None: - metadata_path = os.path.join(path, 'METADATA') + metadata_path = os.path.join(path, METADATA_FILENAME) + # Temporary - for legacy support + if not os.path.exists(metadata_path): + metadata_path = os.path.join(path, 'METADATA') + if not os.path.exists(metadata_path): + raise ValueError('no %s found in %s' % (METADATA_FILENAME, + path)) metadata = Metadata(path=metadata_path, scheme='legacy') super(InstalledDistribution, self).__init__(metadata, path, env) @@ -567,7 +552,7 @@ def exports(self): export entries. """ result = {} - rf = self.get_distinfo_file('EXPORTS') + rf = self.get_distinfo_file(EXPORTS_FILENAME) if os.path.exists(rf): result = self.read_exports(rf) return result @@ -583,18 +568,10 @@ def read_exports(self, filename=None): export entries. """ result = {} - rf = filename or self.get_distinfo_file('EXPORTS') + rf = filename or self.get_distinfo_file(EXPORTS_FILENAME) if os.path.exists(rf): - cp = configparser.ConfigParser() - cp.read(rf) - for key in cp.sections(): - result[key] = entries = {} - for name, value in cp.items(key): - s = '%s = %s' % (name, value) - entry = get_export_entry(s) - assert entry is not None - entry.dist = self - entries[name] = entry + with open(rf, 'r') as f: + result = read_exports(f) return result def write_exports(self, exports, filename=None): @@ -607,21 +584,9 @@ def write_exports(self, exports, filename=None): specified, the EXPORTS file in the .dist-info directory is written to. """ - rf = filename or self.get_distinfo_file('EXPORTS') - cp = configparser.ConfigParser() - for k, v in exports.items(): - # TODO check k, v for valid values - cp.add_section(k) - for entry in v.values(): - if entry.suffix is None: - s = entry.prefix - else: - s = '%s:%s' % (entry.prefix, entry.suffix) - if entry.flags: - s = '%s [%s]' % (s, ', '.join(entry.flags)) - cp.set(k, entry.name, s) + rf = filename or self.get_distinfo_file(EXPORTS_FILENAME) with open(rf, 'w') as f: - cp.write(f) + write_exports(exports, f) def get_resource_path(self, relative_path): """ @@ -666,7 +631,7 @@ def write_installed_files(self, paths, prefix, dry_run=False): record_path = os.path.join(self.path, 'RECORD') logger.info('creating %s', record_path) if dry_run: - return + return None with CSVWriter(record_path) as writer: for path in paths: if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')): @@ -685,6 +650,7 @@ def write_installed_files(self, paths, prefix, dry_run=False): if record_path.startswith(base): record_path = os.path.relpath(record_path, base) writer.writerow((record_path, '', '')) + return record_path def check_installed_files(self): """ @@ -760,7 +726,7 @@ def write_shared_locations(self, paths, dry_run=False): shared_path = os.path.join(self.path, 'SHARED') logger.info('creating %s', shared_path) if dry_run: - return + return None lines = [] for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): path = paths[key] @@ -797,8 +763,8 @@ def get_distinfo_file(self, path): # The file must be relative if path not in DIST_FILES: - raise DistlibException('invalid path for a dist-info file: %r' % - path) + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) return os.path.join(self.path, path) @@ -845,12 +811,12 @@ def set_name_and_version(s, n, v): self.dist_path = env if env and env._cache_enabled and path in env._cache_egg.path: metadata = env._cache_egg.path[path].metadata - set_name_and_version(self, metadata['Name'], metadata['Version']) + set_name_and_version(self, metadata.name, metadata.version) else: metadata = self._get_metadata(path) # Need to be set before caching - set_name_and_version(self, metadata['Name'], metadata['Version']) + set_name_and_version(self, metadata.name, metadata.version) if env and env._cache_enabled: env._cache_egg.add(self) @@ -859,19 +825,13 @@ def set_name_and_version(s, n, v): def _get_metadata(self, path): requires = None - def parse_requires(req_path): + def parse_requires_data(data): """Create a list of dependencies from a requires.txt file. - *req_path* must be the path to a setuptools-produced requires.txt file. + *data*: the contents of a setuptools-produced requires.txt file. """ - reqs = [] - try: - with open(req_path, 'r') as fp: - lines = fp.read().splitlines() - except IOError: - return reqs - + lines = data.splitlines() for line in lines: line = line.strip() if line.startswith('['): @@ -892,12 +852,26 @@ def parse_requires(req_path): reqs.append('%s (%s)' % (r.name, cons)) return reqs + def parse_requires_path(req_path): + """Create a list of dependencies from a requires.txt file. + + *req_path*: the path to a setuptools-produced requires.txt file. + """ + + reqs = [] + try: + with codecs.open(req_path, 'r', 'utf-8') as fp: + reqs = parse_requires_data(fp.read()) + except IOError: + pass + return reqs + if path.endswith('.egg'): if os.path.isdir(path): meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO') metadata = Metadata(path=meta_path, scheme='legacy') req_path = os.path.join(path, 'EGG-INFO', 'requires.txt') - requires = parse_requires(req_path) + requires = parse_requires_path(req_path) else: # FIXME handle the case where zipfile is not available zipf = zipimport.zipimporter(path) @@ -905,26 +879,22 @@ def parse_requires(req_path): zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) metadata = Metadata(fileobj=fileobj, scheme='legacy') try: - requires = zipf.get_data('EGG-INFO/requires.txt') + data = zipf.get_data('EGG-INFO/requires.txt') + requires = parse_requires_data(data.decode('utf-8')) except IOError: requires = None elif path.endswith('.egg-info'): if os.path.isdir(path): path = os.path.join(path, 'PKG-INFO') req_path = os.path.join(path, 'requires.txt') - requires = parse_requires(req_path) + requires = parse_requires_path(req_path) metadata = Metadata(path=path, scheme='legacy') else: raise DistlibException('path must end with .egg-info or .egg, ' 'got %r' % path) if requires: - if metadata['Metadata-Version'] == '1.1': - # we can't have 1.1 metadata *and* Setuptools requires - for field in ('Obsoletes', 'Requires', 'Provides'): - if field in metadata: - del metadata[field] - metadata['Requires-Dist'] += requires + metadata.add_requirements(requires) return metadata def __repr__(self): @@ -1191,28 +1161,14 @@ def make_graph(dists, scheme='default'): graph.add_distribution(dist) for p in dist.provides: - comps = p.strip().rsplit(" ", 1) - name = comps[0] - version = None - if len(comps) == 2: - version = comps[1] - if len(version) < 3 or version[0] != '(' or version[-1] != ')': - logger.warning('distribution %r has ill-formed ' - 'provides field: %r', dist.name, p) - continue - # don't raise an exception. Legacy installed distributions - # could have all manner of metadata - #raise DistlibException('distribution %r has ill-formed ' - # 'provides field: %r' % (dist.name, p)) - version = version[1:-1] # trim off parenthesis - # Add name in lower case for case-insensitivity - name = name.lower() + name, version = parse_name_and_version(p) logger.debug('Add to provided: %s, %s, %s', name, version, dist) provided.setdefault(name, []).append((version, dist)) # now make the edges for dist in dists: - requires = (dist.requires | dist.setup_requires) + requires = (dist.run_requires | dist.meta_requires | + dist.build_requires | dist.dev_requires) for req in requires: try: matcher = scheme.matcher(req) @@ -1295,7 +1251,9 @@ def make_dist(name, version, **kwargs): """ A convenience method for making a dist given just a name and version. """ + summary = kwargs.pop('summary', 'Placeholder for summary') md = Metadata(**kwargs) - md['Name'] = name - md['Version'] = version + md.name = name + md.version = version + md.summary = summary or 'Plaeholder for summary' return Distribution(md) diff --git a/pip/vendor/distlib/index.py b/pip/vendor/distlib/index.py index 282f6b74554..5e52d78556c 100644 --- a/pip/vendor/distlib/index.py +++ b/pip/vendor/distlib/index.py @@ -130,10 +130,8 @@ def register(self, metadata): request. """ self.check_credentials() - missing, warnings = metadata.check(True) # strict check - logger.debug('result of check: missing: %s, warnings: %s', - missing, warnings) - d = metadata.todict(True) + metadata.validate() + d = metadata.todict() d[':action'] = 'verify' request = self.encode_request(d.items(), []) response = self.send_request(request) @@ -259,10 +257,8 @@ def upload_file(self, metadata, filename, signer=None, sign_password=None, self.check_credentials() if not os.path.exists(filename): raise DistlibException('not found: %s' % filename) - missing, warnings = metadata.check(True) # strict check - logger.debug('result of check: missing: %s, warnings: %s', - missing, warnings) - d = metadata.todict(True) + metadata.validate() + d = metadata.todict() sig_file = None if signer: if not self.gpg: @@ -309,9 +305,7 @@ def upload_documentation(self, metadata, doc_dir): fn = os.path.join(doc_dir, 'index.html') if not os.path.exists(fn): raise DistlibException('not found: %r' % fn) - missing, warnings = metadata.check(True) # strict check - logger.debug('result of check: missing: %s, warnings: %s', - missing, warnings) + metadata.validate() name, version = metadata.name, metadata.version zip_data = zip_dir(doc_dir).getvalue() fields = [(':action', 'doc_upload'), @@ -382,12 +376,14 @@ def download_file(self, url, destfile, digest=None, reporthook=None): """ if digest is None: digester = None + logger.debug('No digest specified') else: if isinstance(digest, (list, tuple)): hasher, digest = digest else: hasher = 'md5' digester = getattr(hashlib, hasher)() + logger.debug('Digest specified: %s' % digest) # The following code is equivalent to urlretrieve. # We need to do it this way so that we can compute the # digest of the file as we go. @@ -431,6 +427,7 @@ def download_file(self, url, destfile, digest=None, reporthook=None): raise DistlibException('MD5 digest mismatch for %s: expected ' '%s, got %s' % (destfile, digest, actual)) + logger.debug('Digest verified: %s', digest) def send_request(self, req): """ diff --git a/pip/vendor/distlib/locators.py b/pip/vendor/distlib/locators.py index 3dab8a2fcbd..6e2696f09c9 100644 --- a/pip/vendor/distlib/locators.py +++ b/pip/vendor/distlib/locators.py @@ -24,7 +24,7 @@ from .metadata import Metadata from .util import (cached_property, parse_credentials, ensure_slash, split_filename, get_project_data, parse_requirement, - ServerProxy) + parse_name_and_version, ServerProxy) from .version import get_scheme, UnsupportedVersionError from .wheel import Wheel, is_compatible @@ -106,6 +106,10 @@ def __init__(self, scheme='default'): # Because of bugs in some of the handlers on some of the platforms, # we use our own opener rather than just using urlopen. self.opener = build_opener(RedirectHandler()) + # If get_project() is called from locate(), the matcher instance + # is set from the requirement passed to locate(). See issue #18 for + # why this can be useful to know. + self.matcher = None def clear_cache(self): self._cache.clear() @@ -124,6 +128,9 @@ def _get_project(self, name): instances. This should be implemented in subclasses. + + If called from a locate() request, self.matcher will be set to a + matcher for the requirement to satisfy, otherwise it will be None. """ raise NotImplementedError('Please implement in the subclass') @@ -167,10 +174,8 @@ def prefer_url(self, url1, url2): The current implement favours http:// URLs over https://, archives from PyPI over those from other locations and then the archive name. """ - if url1 == 'UNKNOWN': - result = url2 - else: - result = url2 + result = url2 + if url1: s1 = self.score_url(url1) s2 = self.score_url(url2) if s1 > s2: @@ -278,11 +283,8 @@ def _update_version_data(self, result, info): dist = make_dist(name, version, scheme=self.scheme) md = dist.metadata dist.md5_digest = info.get('md5_digest') - if 'python-version' in info: - md['Requires-Python'] = info['python-version'] - if md['Download-URL'] != info['url']: - md['Download-URL'] = self.prefer_url(md['Download-URL'], - info['url']) + if md.source_url != info['url']: + md.source_url = self.prefer_url(md.source_url, info['url']) dist.locator = self result[version] = dist @@ -300,20 +302,17 @@ def locate(self, requirement, prereleases=False): distribution could be located. """ result = None - scheme = get_scheme(self.scheme) r = parse_requirement(requirement) if r is None: raise DistlibException('Not a valid requirement: %r' % requirement) - if r.extras: - # lose the extras part of the requirement - requirement = r.requirement - matcher = scheme.matcher(requirement) - vcls = matcher.version_class + scheme = get_scheme(self.scheme) + self.matcher = matcher = scheme.matcher(r.requirement) logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) - versions = self.get_project(matcher.name) + versions = self.get_project(r.name) if versions: # sometimes, versions are invalid slist = [] + vcls = matcher.version_class for k in versions: try: if not matcher.match(k): @@ -322,7 +321,8 @@ def locate(self, requirement, prereleases=False): if prereleases or not vcls(k).is_prerelease: slist.append(k) else: - logger.debug('skipping pre-release version %s', k) + logger.debug('skipping pre-release ' + 'version %s of %s', k, matcher.name) except Exception: logger.warning('error matching %s with %r', matcher, k) pass # slist.append(k) @@ -333,6 +333,7 @@ def locate(self, requirement, prereleases=False): result = versions[slist[-1]] if result and r.extras: result.extras = r.extras + self.matcher = None return result @@ -365,11 +366,15 @@ def _get_project(self, name): urls = self.client.release_urls(name, v) data = self.client.release_data(name, v) metadata = Metadata(scheme=self.scheme) - metadata.update(data) + metadata.name = data['name'] + metadata.version = data['version'] + metadata.license = data.get('license') + metadata.keywords = data.get('keywords', []) + metadata.summary = data.get('summary') dist = Distribution(metadata) if urls: info = urls[0] - metadata['Download-URL'] = info['url'] + metadata.source_url = info['url'] dist.md5_digest = info.get('md5_digest') dist.locator = self result[v] = dist @@ -398,12 +403,17 @@ def _get_project(self, name): data = resp.read().decode() # for now d = json.loads(data) md = Metadata(scheme=self.scheme) - md.update(d['info']) + data = d['info'] + md.name = data['name'] + md.version = data['version'] + md.license = data.get('license') + md.keywords = data.get('keywords', []) + md.summary = data.get('summary') dist = Distribution(md) urls = d['urls'] if urls: info = urls[0] - md['Download-URL'] = info['url'] + md.source_url = info['url'] dist.md5_digest = info.get('md5_digest') dist.locator = self result[md.version] = dist @@ -791,12 +801,17 @@ def _get_project(self, name): for info in data.get('files', []): if info['ptype'] != 'sdist' or info['pyversion'] != 'source': continue + # We don't store summary in project metadata as it makes + # the data bigger for no benefit during dependency + # resolution dist = make_dist(data['name'], info['version'], + summary=data.get('summary', + 'Placeholder for summary'), scheme=self.scheme) md = dist.metadata - md['Download-URL'] = info['url'] + md.source_url = info['url'] dist.md5_digest = info.get('digest') - md.dependencies = info.get('requirements', {}) + md.dependencies = info.get('new-requirements', {}) dist.exports = info.get('exports', {}) result[dist.version] = dist return result @@ -860,13 +875,32 @@ def _set_scheme(self, value): def _get_project(self, name): result = {} for locator in self.locators: - r = locator.get_project(name) - if r: + d = locator.get_project(name) + if d: if self.merge: - result.update(r) + result.update(d) else: - result = r - break + # See issue #18. If any dists are found and we're looking + # for specific constraints, we only return something if + # a match is found. For example, if a DirectoryLocator + # returns just foo (1.0) while we're looking for + # foo (>= 2.0), we'll pretend there was nothing there so + # that subsequent locators can be queried. Otherwise we + # would just return foo (1.0) which would then lead to a + # failure to find foo (>= 2.0), because other locators + # weren't searched. Note that this only matters when + # merge=False. + if self.matcher is None: + found = True + else: + found = False + for k in d: + if self.matcher.match(k): + found = True + break + if found: + result = d + break return result def get_distribution_names(self): @@ -882,13 +916,19 @@ def get_distribution_names(self): return result +# We use a legacy scheme simply because most of the dists on PyPI use legacy +# versions which don't conform to PEP 426 / PEP 440. default_locator = AggregatingLocator( JSONLocator(), SimpleScrapingLocator('https://pypi.python.org/simple/', - timeout=3.0)) + timeout=3.0), + scheme='legacy') locate = default_locator.locate +NAME_VERSION_RE = re.compile(r'(?P[\w-]+)\s*' + r'\(\s*(==\s*)?(?P[^)]+)\)$') + class DependencyFinder(object): """ Locate dependencies for distributions. @@ -902,25 +942,6 @@ def __init__(self, locator=None): self.locator = locator or default_locator self.scheme = get_scheme(self.locator.scheme) - def _get_name_and_version(self, p): - """ - A utility method used to get name and version from e.g. a Provides-Dist - value. - - :param p: A value in a form foo (1.0) - :return: The name and version as a tuple. - """ - comps = p.strip().rsplit(' ', 1) - name = comps[0] - version = None - if len(comps) == 2: - version = comps[1] - if len(version) < 3 or version[0] != '(' or version[-1] != ')': - raise DistlibException('Ill-formed provides field: %r' % p) - version = version[1:-1] # trim off parentheses - # Name in lower case for case-insensitivity - return name.lower(), version - def add_distribution(self, dist): """ Add a distribution to the finder. This will update internal information @@ -932,7 +953,7 @@ def add_distribution(self, dist): self.dists_by_name[name] = dist self.dists[(name, dist.version)] = dist for p in dist.provides: - name, version = self._get_name_and_version(p) + name, version = parse_name_and_version(p) logger.debug('Add to provided: %s, %s, %s', name, version, dist) self.provided.setdefault(name, set()).add((version, dist)) @@ -947,7 +968,7 @@ def remove_distribution(self, dist): del self.dists_by_name[name] del self.dists[(name, dist.version)] for p in dist.provides: - name, version = self._get_name_and_version(p) + name, version = parse_name_and_version(p) logger.debug('Remove from provided: %s, %s, %s', name, version, dist) s = self.provided[name] s.remove((version, dist)) @@ -1033,15 +1054,17 @@ def try_to_replace(self, provider, other, problems): result = True return result - def find(self, requirement, tests=False, prereleases=False): + def find(self, requirement, meta_extras=None, prereleases=False): """ - Find a distribution matching requirement and all distributions - it depends on. Use the ``tests`` argument to determine whether - distributions used only for testing should be included in the - results. Allow ``requirement`` to be either a :class:`Distribution` - instance or a string expressing a requirement. If ``prereleases`` - is True, allow pre-release versions to be returned - otherwise, - don't. + Find a distribution and all distributions it depends on. + + :param requirement: The requirement specifying the distribution to + find, or a Distribution instance. + :param meta_extras: A list of meta extras such as :test:, :build: and + so on. + :param prereleases: If ``True``, allow pre-release versions to be + returned - otherwise, don't return prereleases + unless they're all that's available. Return a set of :class:`Distribution` instances and a set of problems. @@ -1062,6 +1085,7 @@ def find(self, requirement, tests=False, prereleases=False): self.dists_by_name = {} self.reqts = {} + meta_extras = set(meta_extras or []) if isinstance(requirement, Distribution): dist = odist = requirement logger.debug('passed %s as requirement', odist) @@ -1077,7 +1101,7 @@ def find(self, requirement, tests=False, prereleases=False): install_dists = set([odist]) while todo: dist = todo.pop() - name = dist.key # case-insensitive + name = dist.key # case-insensitive if name not in self.dists_by_name: self.add_distribution(dist) else: @@ -1086,19 +1110,24 @@ def find(self, requirement, tests=False, prereleases=False): if other != dist: self.try_to_replace(dist, other, problems) - ireqts = dist.requires - sreqts = dist.setup_requires + ireqts = dist.run_requires | dist.meta_requires + sreqts = dist.build_requires ereqts = set() - if not tests or dist not in install_dists: - treqts = set() - else: - treqts = dist.test_requires - all_reqts = ireqts | sreqts | treqts | ereqts + if dist in install_dists: + for key in ('test', 'build', 'dev'): + e = ':%s:' % key + if e in meta_extras: + ereqts |= getattr(dist, '%s_requires' % key) + all_reqts = ireqts | sreqts | ereqts for r in all_reqts: providers = self.find_providers(r) if not providers: logger.debug('No providers found for %r', r) provider = self.locator.locate(r, prereleases=prereleases) + # If no provider is found and we didn't consider + # prereleases, consider them now. + if provider is None and not prereleases: + provider = self.locator.locate(r, prereleases=True) if provider is None: logger.debug('Cannot satisfy %r', r) problems.add(('unsatisfied', r)) diff --git a/pip/vendor/distlib/markers.py b/pip/vendor/distlib/markers.py index 182474008a9..623f45697d4 100644 --- a/pip/vendor/distlib/markers.py +++ b/pip/vendor/distlib/markers.py @@ -34,16 +34,17 @@ class Evaluator(object): } allowed_values = { - 'sys.platform': sys.platform, + 'sys_platform': sys.platform, 'python_version': '%s.%s' % sys.version_info[:2], # parsing sys.platform is not reliable, but there is no other # way to get e.g. 2.7.2+, and the PEP is defined with sys.version 'python_full_version': sys.version.split(' ', 1)[0], - 'os.name': os.name, - 'platform.in_venv': str(in_venv()), - 'platform.version': platform.version(), - 'platform.machine': platform.machine(), - 'platform.python_implementation': platform.python_implementation(), + 'os_name': os.name, + 'platform_in_venv': str(in_venv()), + 'platform_release': platform.release(), + 'platform_version': platform.version(), + 'platform_machine': platform.machine(), + 'platform_python_implementation': platform.python_implementation(), } def __init__(self, context=None): @@ -133,11 +134,11 @@ def sanity_check(lhsnode, rhsnode): valid = True if isinstance(lhsnode, ast.Str) and isinstance(rhsnode, ast.Str): valid = False - elif (isinstance(lhsnode, ast.Attribute) - and isinstance(rhsnode, ast.Attribute)): - klhs = self.get_attr_key(lhsnode) - krhs = self.get_attr_key(rhsnode) - valid = klhs != krhs + #elif (isinstance(lhsnode, ast.Attribute) + # and isinstance(rhsnode, ast.Attribute)): + # klhs = self.get_attr_key(lhsnode) + # krhs = self.get_attr_key(rhsnode) + # valid = klhs != krhs if not valid: s = self.get_fragment(node.col_offset) raise SyntaxError('Invalid comparison: %s' % s) diff --git a/pip/vendor/distlib/metadata.py b/pip/vendor/distlib/metadata.py index 51dbe853f68..ab24fcd25d7 100644 --- a/pip/vendor/distlib/metadata.py +++ b/pip/vendor/distlib/metadata.py @@ -11,17 +11,20 @@ import codecs from email import message_from_file +import json import logging import re -from . import DistlibException +from . import DistlibException, __version__ from .compat import StringIO, string_types from .markers import interpret -from .version import get_scheme +from .util import extract_by_key +from .version import get_scheme, PEP426_VERSION_RE logger = logging.getLogger(__name__) + class MetadataMissingError(DistlibException): """A required metadata is missing""" @@ -34,31 +37,8 @@ class MetadataUnrecognizedVersionError(DistlibException): """Unknown metadata version number.""" -try: - # docutils is installed - from docutils.utils import Reporter - from docutils.parsers.rst import Parser - from docutils import frontend - from docutils import nodes - - class SilentReporter(Reporter, object): - - def __init__(self, source, report_level, halt_level, stream=None, - debug=0, encoding='ascii', error_handler='replace'): - self.messages = [] - super(SilentReporter, self).__init__( - source, report_level, halt_level, stream, - debug, encoding, error_handler) - - def system_message(self, level, message, *children, **kwargs): - self.messages.append((level, message, children, kwargs)) - return nodes.system_message(message, level=level, type=self. - levels[level], *children, **kwargs) - - _HAS_DOCUTILS = True -except ImportError: - # docutils is not installed - _HAS_DOCUTILS = False +class MetadataInvalidError(DistlibException): + """A metadata value is invalid""" # public API of this module __all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] @@ -118,6 +98,7 @@ def system_message(self, level, message, *children, **kwargs): EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') + def _version2fieldlist(version): if version == '1.0': return _241_FIELDS @@ -238,32 +219,38 @@ def _has_marker(keys, markers): _FILESAFE = re.compile('[^A-Za-z0-9.]+') -class Metadata(object): - """The metadata of a release. +def _get_name_and_version(name, version, for_filename=False): + """Return the distribution name with version. + + If for_filename is true, return a filename-escaped form.""" + if for_filename: + # For both name and version any runs of non-alphanumeric or '.' + # characters are replaced with a single '-'. Additionally any + # spaces in the version string become '.' + name = _FILESAFE.sub('-', name) + version = _FILESAFE.sub('-', version.replace(' ', '.')) + return '%s-%s' % (name, version) + +class LegacyMetadata(object): + """The legacy metadata of a release. Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can instantiate the class with one of these arguments (or none): - - *path*, the path to a METADATA file - - *fileobj* give a file-like object with METADATA as content + - *path*, the path to a metadata file + - *fileobj* give a file-like object with metadata as content - *mapping* is a dict-like object - *scheme* is a version scheme name """ - # TODO document that execution_context and platform_dependent are used - # to filter on query, not when setting a key - # also document the mapping API and UNKNOWN default key + # TODO document the mapping API and UNKNOWN default key - def __init__(self, path=None, platform_dependent=False, - execution_context=None, fileobj=None, mapping=None, + def __init__(self, path=None, fileobj=None, mapping=None, scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') self._fields = {} self.requires_files = [] - self.docutils_support = _HAS_DOCUTILS - self.platform_dependent = platform_dependent - self.execution_context = execution_context self._dependencies = None self.scheme = scheme - if [path, fileobj, mapping].count(None) < 2: - raise TypeError('path, fileobj and mapping are exclusive') if path is not None: self.read(path) elif fileobj is not None: @@ -275,8 +262,8 @@ def __init__(self, path=None, platform_dependent=False, def set_metadata_version(self): self._fields['Metadata-Version'] = _best_version(self._fields) - def _write_field(self, file, name, value): - file.write('%s: %s\n' % (name, value)) + def _write_field(self, fileobj, name, value): + fileobj.write('%s: %s\n' % (name, value)) def __getitem__(self, name): return self.get(name) @@ -306,38 +293,6 @@ def _default_value(self, name): return [] return 'UNKNOWN' - def _check_rst_data(self, data): - """Return warnings when the provided data has syntax errors.""" - source_path = StringIO() - parser = Parser() - settings = frontend.OptionParser().get_default_values() - settings.tab_width = 4 - settings.pep_references = None - settings.rfc_references = None - reporter = SilentReporter(source_path, - settings.report_level, - settings.halt_level, - stream=settings.warning_stream, - debug=settings.debug, - encoding=settings.error_encoding, - error_handler=settings.error_encoding_error_handler) - - document = nodes.document(settings, reporter, source=source_path) - document.note_source(source_path, -1) - try: - parser.parse(data, document) - except AttributeError: - reporter.messages.append((-1, 'Could not finish the parsing.', - '', {})) - - return reporter.messages - - def _platform(self, value): - if not self.platform_dependent or ';' not in value: - return True, value - value, marker = value.split(';') - return interpret(marker, self.execution_context), value - def _remove_line_prefix(self, value): return _LINE_PREFIX.sub('\n', value) @@ -346,69 +301,62 @@ def __getattr__(self, name): return self[name] raise AttributeError(name) - def _get_dependencies(self): - def handle_req(req, rlist, extras): - if ';' not in req: - rlist.append(req) - else: - r, marker = req.split(';') - m = EXTRA_RE.search(marker) - if m: - extra = m.groups()[0][1:-1] - extras.setdefault(extra, []).append(r) - - result = self._dependencies - if result is None: - self._dependencies = result = {} - extras = {} - setup_reqs = self['Setup-Requires-Dist'] - if setup_reqs: - result['setup'] = setup_reqs - install_reqs = [] - for req in self['Requires-Dist']: - handle_req(req, install_reqs, extras) - if install_reqs: - result['install'] = install_reqs - if extras: - result['extras'] = extras - return result - - def _set_dependencies(self, value): - if 'test' in value: - value = dict(value) # don't change value passed in - value.setdefault('extras', {})['test'] = value.pop('test') - self._dependencies = value - setup_reqs = value.get('setup', []) - install_reqs = value.get('install', []) - klist = [] - for k, rlist in value.get('extras', {}).items(): - klist.append(k) - for r in rlist: - install_reqs.append('%s; extra == "%s"' % (r, k)) - if setup_reqs: - self['Setup-Requires-Dist'] = setup_reqs - if install_reqs: - self['Requires-Dist'] = install_reqs - if klist: - self['Provides-Extra'] = klist +# def _get_dependencies(self): +# def handle_req(req, rlist, extras): +# if ';' not in req: +# rlist.append(req) +# else: +# r, marker = req.split(';') +# m = EXTRA_RE.search(marker) +# if m: +# extra = m.groups()[0][1:-1] +# extras.setdefault(extra, []).append(r) + +# result = self._dependencies +# if result is None: +# self._dependencies = result = {} +# extras = {} +# setup_reqs = self['Setup-Requires-Dist'] +# if setup_reqs: +# result['setup'] = setup_reqs +# install_reqs = [] +# for req in self['Requires-Dist']: +# handle_req(req, install_reqs, extras) +# if install_reqs: +# result['install'] = install_reqs +# if extras: +# result['extras'] = extras +# return result + +# def _set_dependencies(self, value): +# if 'test' in value: +# value = dict(value) # don't change value passed in +# value.setdefault('extras', {})['test'] = value.pop('test') +# self._dependencies = value +# setup_reqs = value.get('setup', []) +# install_reqs = value.get('install', []) +# klist = [] +# for k, rlist in value.get('extras', {}).items(): +# klist.append(k) +# for r in rlist: +# install_reqs.append('%s; extra == "%s"' % (r, k)) +# if setup_reqs: +# self['Setup-Requires-Dist'] = setup_reqs +# if install_reqs: +# self['Requires-Dist'] = install_reqs +# if klist: +# self['Provides-Extra'] = klist # # Public API # - dependencies = property(_get_dependencies, _set_dependencies) +# dependencies = property(_get_dependencies, _set_dependencies) def get_fullname(self, filesafe=False): """Return the distribution name with version. If filesafe is true, return a filename-escaped form.""" - name, version = self['Name'], self['Version'] - if filesafe: - # For both name and version any runs of non-alphanumeric or '.' - # characters are replaced with a single '-'. Additionally any - # spaces in the version string become '.' - name = _FILESAFE.sub('-', name) - version = _FILESAFE.sub('-', version.replace(' ', '.')) - return '%s-%s' % (name, version) + return _get_name_and_version(self['Name'], self['Version'], filesafe) def is_field(self, name): """return True if name is a valid metadata key""" @@ -432,7 +380,10 @@ def read_file(self, fileob): msg = message_from_file(fileob) self._fields['Metadata-Version'] = msg['metadata-version'] - for field in _version2fieldlist(self['Metadata-Version']): + # When reading, get all the fields we can + for field in _ALL_FIELDS: + if field not in msg: + continue if field in _LISTFIELDS: # we can have multiple lines values = msg.get_all(field) @@ -564,9 +515,6 @@ def get(self, name, default=_MISSING): return [] res = [] for val in value: - valid, val = self._platform(val) - if not valid: - continue if name not in _LISTTUPLEFIELDS: res.append(val) else: @@ -575,17 +523,12 @@ def get(self, name, default=_MISSING): return res elif name in _ELEMENTSFIELD: - valid, value = self._platform(self._fields[name]) - if not valid: - return [] + value = self._fields[name] if isinstance(value, string_types): return value.split(',') - valid, value = self._platform(self._fields[name]) - if not valid: - return None - return value + return self._fields[name] - def check(self, strict=False, restructuredtext=False): + def check(self, strict=False): """Check if the metadata is compliant. If strict is True then raise if no Name or Version are provided""" self.set_metadata_version() @@ -605,9 +548,6 @@ def check(self, strict=False, restructuredtext=False): if attr not in self: missing.append(attr) - if _HAS_DOCUTILS and restructuredtext: - warnings.extend(self._check_rst_data(self['Description'])) - # checking metadata 1.2 (XXX needs to check 1.1, 1.0) if self['Metadata-Version'] != '1.2': return missing, warnings @@ -669,6 +609,8 @@ def todict(self, skip_missing=False): ('provides_dist', 'Provides-Dist'), ('obsoletes_dist', 'Obsoletes-Dist'), ('project_url', 'Project-URL'), + ('maintainer', 'Maintainer'), + ('maintainer_email', 'Maintainer-email'), ) for key, field_name in mapping_1_2: if not skip_missing or field_name in self._fields: @@ -683,11 +625,20 @@ def todict(self, skip_missing=False): ('requires', 'Requires'), ('obsoletes', 'Obsoletes'), ) - if not skip_missing or field_name in self._fields: - data[key] = self[field_name] + for key, field_name in mapping_1_1: + if not skip_missing or field_name in self._fields: + data[key] = self[field_name] return data + def add_requirements(self, requirements): + if self['Metadata-Version'] == '1.1': + # we can't have 1.1 metadata *and* Setuptools requires + for field in ('Obsoletes', 'Requires', 'Provides'): + if field in self: + del self[field] + self['Requires-Dist'] += requirements + # Mapping API # TODO could add iter* variants @@ -705,4 +656,390 @@ def items(self): return [(key, self[key]) for key in self.keys()] def __repr__(self): - return '' % (self.name, self.version) + return '<%s %s %s>' % (self.__class__.__name__, self.name, + self.version) + + +METADATA_FILENAME = 'pydist.json' + + +class Metadata(object): + """ + The metadata of a release. This implementation uses 2.0 (JSON) + metadata where possible. If not possible, it wraps a LegacyMetadata + instance which handles the key-value metadata format. + """ + + METADATA_VERSION_MATCHER = re.compile('^\d+(\.\d+)*$') + + NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) + + VERSION_MATCHER = PEP426_VERSION_RE + + SUMMARY_MATCHER = re.compile('.{1,2047}') + + METADATA_VERSION = '2.0' + + GENERATOR = 'distlib (%s)' % __version__ + + MANDATORY_KEYS = { + 'name': (), + 'version': (), + 'summary': ('legacy',), + } + + INDEX_KEYS = 'name version license summary description' + + DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' + 'dev_requires provides meta_requires obsoleted_by ' + 'supports_environments') + + SYNTAX_VALIDATORS = { + 'metadata_version': (METADATA_VERSION_MATCHER, ()), + 'name': (NAME_MATCHER, ('legacy',)), + 'version': (VERSION_MATCHER, ('legacy',)), + 'summary': (SUMMARY_MATCHER, ('legacy',)), + } + + __slots__ = ('_legacy', '_data', 'scheme') + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._legacy = None + self._data = None + self.scheme = scheme + #import pdb; pdb.set_trace() + if mapping is not None: + try: + self._validate_mapping(mapping, scheme) + self._data = mapping + except MetadataUnrecognizedVersionError: + self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) + self.validate() + else: + data = None + if path: + with codecs.open(path, 'r', 'utf-8') as f: + data = f.read() + elif fileobj: + data = fileobj.read() + if data is None: + # Initialised with no args - to be added + self._data = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + else: + try: + self._data = json.loads(data) + self._validate_mapping(self._data, scheme) + except ValueError: + # Note: MetadataUnrecognizedVersionError does not + # inherit from ValueError (it's a DistlibException, + # which should not inherit from ValueError). + # The ValueError comes from the json.load - if that + # succeeds and we get a validation error, we want + # that to propagate + self._legacy = LegacyMetadata(fileobj=StringIO(data), + scheme=scheme) + self.validate() + + common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) + + mapped_keys = { + 'run_requires': ('Requires-Dist', list), + 'build_requires': ('Setup-Requires-Dist', list), + 'dev_requires': (None, list), + 'test_requires': (None, list), + 'meta_requires': (None, list), + 'extras': ('Provides-Extra', list), + 'classifiers': ('Classifier', list), + 'source_url': ('Download-URL', None), + 'metadata_version': ('Metadata-Version', None), + } + + def __getattribute__(self, key): + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, maker = mapped[key] + if self._legacy: + if lk is None: + result = None if maker is None else maker() + else: + result = self._legacy.get(lk) + else: + value = None if maker is None else maker() + result = self._data.setdefault(key, value) + elif key not in common: + result = object.__getattribute__(self, key) + elif self._legacy: + result = self._legacy.get(key) + else: + result = self._data.get(key) + return result + + def _validate_value(self, key, value, scheme=None): + if key in self.SYNTAX_VALIDATORS: + pattern, exclusions = self.SYNTAX_VALIDATORS[key] + if (scheme or self.scheme) not in exclusions: + m = pattern.match(value) + if not m: + raise MetadataInvalidError('%r is an invalid value for ' + 'the %r property' % (value, + key)) + + def __setattr__(self, key, value): + self._validate_value(key, value) + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, _ = mapped[key] + if self._legacy: + if lk is None: + raise NotImplementedError + self._legacy[lk] = value + else: + self._data[key] = value + elif key not in common: + object.__setattr__(self, key, value) + else: + if key == 'keywords': + if isinstance(value, string_types): + value = value.strip() + if value: + value = value.split() + else: + value = [] + if self._legacy: + self._legacy[key] = value + else: + self._data[key] = value + + @property + def name_and_version(self): + return _get_name_and_version(self.name, self.version, True) + + @property + def provides(self): + if self._legacy: + result = self._legacy['Provides-Dist'] + else: + result = self._data.setdefault('provides', []) + s = '%s (%s)' % (self.name, self.version) + if s not in result: + result.append(s) + return result + + @provides.setter + def provides(self, value): + if self._legacy: + self._legacy['Provides-Dist'] = value + else: + self._data['provides'] = value + + def get_requirements(self, reqts, extras=None, env=None): + """ + Base method to get dependencies, given a set of extras + to satisfy and an optional environment context. + :param reqts: A list of sometimes-wanted dependencies, + perhaps dependent on extras and environment. + :param extras: A list of optional components being requested. + :param env: An optional environment for marker evaluation. + """ + if self._legacy: + result = reqts + else: + result = [] + extras = set(extras or []) + for d in reqts: + if 'extra' not in d and 'environment' not in d: + # unconditional + include = True + else: + if 'extra' not in d: + # Not extra-dependent - only environment-dependent + include = True + else: + include = d.get('extra') in extras + if include: + # Not excluded because of extras, check environment + marker = d.get('environment') + if marker: + include = interpret(marker, env) + if include: + result.extend(d['requires']) + for key in ('build', 'dev', 'test'): + e = ':%s:' % key + if e in extras: + extras.remove(e) + # A recursive call, but it should terminate since 'test' + # has been removed from the extras + reqts = self._data.get('%s_requires' % key, []) + result.extend(self.get_requirements(reqts, extras=extras, + env=env)) + return result + + @property + def dictionary(self): + if self._legacy: + return self._from_legacy() + return self._data + + @property + def dependencies(self): + if self._legacy: + raise NotImplementedError + else: + return extract_by_key(self._data, self.DEPENDENCY_KEYS) + + @dependencies.setter + def dependencies(self, value): + if self._legacy: + raise NotImplementedError + else: + self._data.update(value) + + def _validate_mapping(self, mapping, scheme): + if mapping.get('metadata_version') != self.METADATA_VERSION: + raise MetadataUnrecognizedVersionError() + missing = [] + for key, exclusions in self.MANDATORY_KEYS.items(): + if key not in mapping: + if scheme not in exclusions: + missing.append(key) + if missing: + msg = 'Missing metadata items: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + for k, v in mapping.items(): + self._validate_value(k, v, scheme) + + def validate(self): + if self._legacy: + missing, warnings = self._legacy.check(True) + if missing or warnings: + logger.warning('Metadata: missing: %s, warnings: %s', + missing, warnings) + else: + self._validate_mapping(self._data, self.scheme) + + def todict(self): + if self._legacy: + return self._legacy.todict(True) + else: + result = extract_by_key(self._data, self.INDEX_KEYS) + return result + + def _from_legacy(self): + assert self._legacy and not self._data + result = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + lmd = self._legacy.todict(True) # skip missing ones + for k in ('name', 'version', 'license', 'summary', 'description', + 'classifier'): + if k in lmd: + if k == 'classifier': + nk = 'classifiers' + else: + nk = k + result[nk] = lmd[k] + kw = lmd.get('Keywords', []) + if kw == ['']: + kw = [] + result['keywords'] = kw + keys = (('requires_dist', 'run_requires'), + ('setup_requires_dist', 'build_requires')) + for ok, nk in keys: + if ok in lmd and lmd[ok]: + result[nk] = [{'requires': lmd[ok]}] + result['provides'] = self.provides + author = {} + maintainer = {} + return result + + LEGACY_MAPPING = { + 'name': 'Name', + 'version': 'Version', + 'license': 'License', + 'summary': 'Summary', + 'description': 'Description', + 'classifiers': 'Classifier', + } + + def _to_legacy(self): + def process_entries(entries): + reqts = set() + for e in entries: + extra = e.get('extra') + env = e.get('environment') + rlist = e['requires'] + for r in rlist: + if not env and not extra: + reqts.add(r) + else: + marker = '' + if extra: + marker = 'extra == %r' % extra + if env: + if marker: + marker = '(%s) and %s' % (env, marker) + else: + marker = env + reqts.add(';'.join((r, marker))) + return reqts + + assert self._data and not self._legacy + result = LegacyMetadata() + nmd = self._data + for nk, ok in self.LEGACY_MAPPING.items(): + result[ok] = nmd[nk] + extras = set() + rlist = set() + r1 = process_entries(self.run_requires + self.meta_requires) + r2 = process_entries(self.build_requires + self.dev_requires) + if self.extras: + result['Provides-Extra'] = sorted(self.extras) + result['Requires-Dist'] = sorted(r1) + result['Setup-Requires-Dist'] = sorted(r2) + # TODO: other fields such as contacts + return result + + def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): + if [path, fileobj].count(None) != 1: + raise ValueError('Exactly one of path and fileobj is needed') + self.validate() + if legacy: + if self._legacy: + legacy_md = self._legacy + else: + legacy_md = self._to_legacy() + if path: + legacy_md.write(path, skip_unknown=skip_unknown) + else: + legacy_md.write_file(fileobj, skip_unknown=skip_unknown) + else: + if self._legacy: + d = self._from_legacy() + else: + d = self._data + if fileobj: + json.dump(d, fileobj, ensure_ascii=True, indent=2) + else: + with codecs.open(path, 'w', 'utf-8') as f: + json.dump(d, f, ensure_ascii=True, indent=2) + + def add_requirements(self, requirements): + if self._legacy: + self._legacy.add_requirements(requirements) + else: + self._data.setdefault('run_requires', []).extend(requirements) + + def __repr__(self): + name = self.name or '(no name)' + version = self.version or 'no version' + return '<%s %s %s (%s)>' % (self.__class__.__name__, + self.metadata_version, name, version) diff --git a/pip/vendor/distlib/resources.py b/pip/vendor/distlib/resources.py index 5f2d212b17e..f5e38ef6b24 100644 --- a/pip/vendor/distlib/resources.py +++ b/pip/vendor/distlib/resources.py @@ -113,26 +113,26 @@ class Resource(ResourceBase): :class:`ResourceFinder` which manages the resource. """ is_container = False # Backwards compatibility - + def as_stream(self): "Get the resource as a stream. Not a property, as not idempotent." return self.finder.get_stream(self) - + @cached_property def file_path(self): return cache.get(self) - + @cached_property def bytes(self): return self.finder.get_bytes(self) - + @cached_property def size(self): return self.finder.get_size(self) - + class ResourceContainer(ResourceBase): is_container = True # Backwards compatibility - + @cached_property def resources(self): return self.finder.get_resources(self) @@ -186,7 +186,7 @@ def allowed(f): def is_container(self, resource): return self._is_directory(resource.path) - + _is_directory = staticmethod(os.path.isdir) class ZipResourceFinder(ResourceFinder): diff --git a/pip/vendor/distlib/scripts.py b/pip/vendor/distlib/scripts.py index be923f61b49..e89ca38ebbc 100644 --- a/pip/vendor/distlib/scripts.py +++ b/pip/vendor/distlib/scripts.py @@ -17,6 +17,24 @@ logger = logging.getLogger(__name__) +_DEFAULT_MANIFEST = ''' + + + + + + + + + + + + +'''.strip() + # check if Python is called on the first line with this expression FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') SCRIPT_TEMPLATE = '''%(shebang)s @@ -59,7 +77,9 @@ def __init__(self, source_dir, target_dir, add_launchers=True, self.target_dir = target_dir self.add_launchers = add_launchers self.force = False + self.clobber = False self.set_mode = False + self.variants = set(('', 'X.Y')) self._fileop = fileop or FileOperator(dry_run) def _get_alternate_executable(self, executable, flags): @@ -115,27 +135,49 @@ def _get_script_text(self, shebang, entry): module=entry.prefix, func=entry.suffix) + manifest = _DEFAULT_MANIFEST + + def get_manifest(self, exename): + base = os.path.basename(exename) + return self.manifest % base + def _make_script(self, entry, filenames): shebang = self._get_shebang('utf-8', flags=entry.flags).decode('utf-8') script = self._get_script_text(shebang, entry) - outname = os.path.join(self.target_dir, entry.name) - use_launcher = self.add_launchers and os.name == 'nt' - if use_launcher: - exename = '%s.exe' % outname - if 'gui' in entry.flags: - ext = 'pyw' - launcher = self._get_launcher('w') - else: - ext = 'py' - launcher = self._get_launcher('t') - outname = '%s-script.%s' % (outname, ext) - self._fileop.write_text_file(outname, script, 'utf-8') - if self.set_mode: - self._fileop.set_executable_mode([outname]) - filenames.append(outname) - if use_launcher: - self._fileop.write_binary_file(exename, launcher) - filenames.append(exename) + name = entry.name + scriptnames = set() + if '' in self.variants: + scriptnames.add(name) + if 'X' in self.variants: + scriptnames.add('%s%s' % (name, sys.version[0])) + if 'X.Y' in self.variants: + scriptnames.add('%s-%s' % (name, sys.version[:3])) + for name in scriptnames: + outname = os.path.join(self.target_dir, name) + use_launcher = self.add_launchers and os.name == 'nt' + if use_launcher: + exename = '%s.exe' % outname + if 'gui' in entry.flags: + ext = 'pyw' + launcher = self._get_launcher('w') + else: + ext = 'py' + launcher = self._get_launcher('t') + outname = '%s-script.%s' % (outname, ext) + if os.path.exists(outname) and not self.clobber: + logger.warning('Skipping existing file %s', outname) + continue + self._fileop.write_text_file(outname, script, 'utf-8') + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + if use_launcher: + self._fileop.write_binary_file(exename, launcher) + filenames.append(exename) + manifest = self.get_manifest(exename) + manifestname = exename + '.manifest' + self._fileop.write_text_file(manifestname, manifest, 'utf-8') + filenames.append(manifestname) def _copy_script(self, script, filenames): adjust = False diff --git a/pip/vendor/distlib/t32.exe b/pip/vendor/distlib/t32.exe new file mode 100644 index 00000000000..c6ccc0c66b1 Binary files /dev/null and b/pip/vendor/distlib/t32.exe differ diff --git a/pip/vendor/distlib/t64.exe b/pip/vendor/distlib/t64.exe new file mode 100644 index 00000000000..034c0969bae Binary files /dev/null and b/pip/vendor/distlib/t64.exe differ diff --git a/pip/vendor/distlib/util.py b/pip/vendor/distlib/util.py index f44e8c35281..ab890ea2784 100644 --- a/pip/vendor/distlib/util.py +++ b/pip/vendor/distlib/util.py @@ -20,23 +20,16 @@ import tarfile import tempfile import time -import zipfile from . import DistlibException from .compat import (string_types, text_type, shutil, raw_input, cache_from_source, urlopen, httplib, xmlrpclib, splittype, HTTPHandler, HTTPSHandler as BaseHTTPSHandler, - URLError, match_hostname, CertificateError) + BaseConfigurator, valid_ident, Container, configparser, + URLError, match_hostname, CertificateError, ZipFile) logger = logging.getLogger(__name__) -class Container(object): - """ - A generic container for when multiple values need to be returned - """ - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - # # Requirement parsing code for name + optional constraints + optional extras # @@ -50,21 +43,26 @@ def __init__(self, **kwargs): COMMA_RE = re.compile(COMMA) IDENT = r'(\w|[.-])+' -RELOP = '([<>=!]=)|[<>]' +EXTRA_IDENT = r'(\*|:(\*|\w+):|' + IDENT + ')' +VERSPEC = IDENT + r'\*?' + +RELOP = '([<>=!~]=)|[<>]' # -# The first relop is optional - if absent, will be taken as '==' +# The first relop is optional - if absent, will be taken as '~=' # -BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + IDENT + ')(' + COMMA + '(' + - RELOP + r')\s*(' + IDENT + '))*') +BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + VERSPEC + ')(' + COMMA + '(' + + RELOP + r')\s*(' + VERSPEC + '))*') + +DIRECT_REF = '(from\s+(?P.*))' # # Either the bare constraints or the bare constraints in parentheses # -CONSTRAINTS = (r'\(\s*(?P' + BARE_CONSTRAINTS + r')\s*\)|(?P' + - BARE_CONSTRAINTS + '\s*)') +CONSTRAINTS = (r'\(\s*(?P' + BARE_CONSTRAINTS + '|' + DIRECT_REF + + r')\s*\)|(?P' + BARE_CONSTRAINTS + '\s*)') -EXTRA_LIST = IDENT + '(' + COMMA + IDENT + ')*' +EXTRA_LIST = EXTRA_IDENT + '(' + COMMA + EXTRA_IDENT + ')*' EXTRAS = r'\[\s*(?P' + EXTRA_LIST + r')?\s*\]' REQUIREMENT = ('(?P' + IDENT + r')\s*(' + EXTRAS + r'\s*)?(\s*' + CONSTRAINTS + ')?$') @@ -73,7 +71,7 @@ def __init__(self, **kwargs): # # Used to scan through the constraints # -RELOP_IDENT = '(?P' + RELOP + r')\s*(?P' + IDENT + ')' +RELOP_IDENT = '(?P' + RELOP + r')\s*(?P' + VERSPEC + ')' RELOP_IDENT_RE = re.compile(RELOP_IDENT) def parse_requirement(s): @@ -88,13 +86,19 @@ def get_constraint(m): d = m.groupdict() name = d['dn'] cons = d['c1'] or d['c2'] + if not d['diref']: + url = None + else: + # direct reference + cons = None + url = d['diref'].strip() if not cons: cons = None constr = '' rs = d['dn'] else: if cons[0] not in '<>!=': - cons = '==' + cons + cons = '~=' + cons iterator = RELOP_IDENT_RE.finditer(cons) cons = [get_constraint(m) for m in iterator] rs = '%s (%s)' % (name, ', '.join(['%s %s' % con for con in cons])) @@ -103,7 +107,7 @@ def get_constraint(m): else: extras = COMMA_RE.split(d['ex']) result = Container(name=name, constraints=cons, extras=extras, - requirement=rs, source=s) + requirement=rs, source=s, url=url) return result @@ -168,6 +172,50 @@ def proceed(prompt, allowed_chars, error_prompt=None, default=None): p = '%c: %s\n%s' % (c, error_prompt, prompt) return c + +def extract_by_key(d, keys): + if isinstance(keys, string_types): + keys = keys.split() + result = {} + for key in keys: + if key in d: + result[key] = d[key] + return result + +def read_exports(stream): + cp = configparser.ConfigParser() + if hasattr(cp, 'read_file'): + cp.read_file(stream) + else: + cp.readfp(stream) + result = {} + for key in cp.sections(): + result[key] = entries = {} + for name, value in cp.items(key): + s = '%s = %s' % (name, value) + entry = get_export_entry(s) + assert entry is not None + #entry.dist = self + entries[name] = entry + return result + + +def write_exports(exports, stream): + cp = configparser.ConfigParser() + for k, v in exports.items(): + # TODO check k, v for valid values + cp.add_section(k) + for entry in v.values(): + if entry.suffix is None: + s = entry.prefix + else: + s = '%s:%s' % (entry.prefix, entry.suffix) + if entry.flags: + s = '%s [%s]' % (s, ', '.join(entry.flags)) + cp.set(k, entry.name, s) + cp.write(stream) + + @contextlib.contextmanager def tempdir(): td = tempfile.mkdtemp() @@ -280,8 +328,7 @@ def copy_file(self, infile, outfile): logger.info('Copying %s to %s', infile, outfile) if not self.dry_run: shutil.copyfile(infile, outfile) - if self.record: - self.files_written.add(outfile) + self.record_as_written(outfile) def copy_stream(self, instream, outfile, encoding=None): assert not os.path.isdir(outfile) @@ -296,24 +343,21 @@ def copy_stream(self, instream, outfile, encoding=None): shutil.copyfileobj(instream, outstream) finally: outstream.close() - if self.record: - self.files_written.add(outfile) + self.record_as_written(outfile) def write_binary_file(self, path, data): self.ensure_dir(os.path.dirname(path)) if not self.dry_run: with open(path, 'wb') as f: f.write(data) - if self.record: - self.files_written.add(path) + self.record_as_written(path) def write_text_file(self, path, data, encoding): self.ensure_dir(os.path.dirname(path)) if not self.dry_run: with open(path, 'wb') as f: f.write(data.encode(encoding)) - if self.record: - self.files_written.add(path) + self.record_as_written(path) def set_mode(self, bits, mask, files): if os.name == 'posix': @@ -352,8 +396,7 @@ def byte_compile(self, path, optimize=False, force=False, prefix=None): assert path.startswith(prefix) diagpath = path[len(prefix):] py_compile.compile(path, dpath, diagpath, True) # raise on error - if self.record: - self.files_written.add(dpath) + self.record_as_written(dpath) return dpath def ensure_removed(self, path): @@ -604,6 +647,25 @@ def split_filename(filename, project_name=None): result = m.group(1), m.group(3), pyver return result +# Allow spaces in name because of legacy dists like "Twisted Core" +NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*' + r'\(\s*(?P[^\s)]+)\)$') + +def parse_name_and_version(p): + """ + A utility method used to get name and version from a string. + + From e.g. a Provides-Dist value. + + :param p: A value in a form 'foo (1.0)' + :return: The name and version as a tuple. + """ + m = NAME_VERSION_RE.match(p) + if not m: + raise DistlibException('Ill-formed name/version string: \'%s\'' % p) + d = m.groupdict() + return d['name'].strip().lower(), d['ver'] + # # Extended metadata functionality # @@ -634,35 +696,10 @@ def get_project_data(name): result = _get_external_data(url) return result -def get_package_data(dist): - name, version = dist.name, dist.version +def get_package_data(name, version): url = ('https://www.red-dove.com/pypi/projects/' '%s/%s/package-%s.json' % (name[0].upper(), name, version)) - result = _get_external_data(url) - if 'metadata' in result and dist.metadata: - update_metadata(dist.metadata, result) - return result - -RENAMES = { # Temporary - 'classifiers': 'Classifier', - 'use_2to3': None, - 'use_2to3_fixers': None, - 'test_suite': None, -} - -def update_metadata(metadata, pkginfo): - # update dist's metadata from received package data - assert metadata - assert 'metadata' in pkginfo - for k, v in pkginfo['metadata'].items(): - k = k.replace('-', '_') - k = RENAMES.get(k, k) - if k is not None: - metadata[k] = v - metadata.set_metadata_version() - if 'requirements' in pkginfo: - metadata.dependencies = pkginfo['requirements'] - + return _get_external_data(url) # # Simple event pub/sub @@ -897,7 +934,7 @@ def check_path(path): raise ValueError('Unknown format for %r' % archive_filename) try: if format == 'zip': - archive = zipfile.ZipFile(archive_filename, 'r') + archive = ZipFile(archive_filename, 'r') if check: names = archive.namelist() for name in names: @@ -927,7 +964,7 @@ def zip_dir(directory): """zip a directory tree into a BytesIO object""" result = io.BytesIO() dlen = len(directory) - with zipfile.ZipFile(result, "w") as zf: + with ZipFile(result, "w") as zf: for root, dirs, files in os.walk(directory): for name in files: full = os.path.join(root, name) @@ -1120,6 +1157,7 @@ def connect(self): if self.ca_certs and self.check_domain: try: match_hostname(self.sock.getpeercert(), self.host) + logger.debug('Host verified: %s', self.host) except CertificateError: self.sock.shutdown(socket.SHUT_RDWR) self.sock.close() @@ -1311,3 +1349,61 @@ def writerow(self, row): r.append(item) row = r self.writer.writerow(row) + +# +# Configurator functionality +# + +class Configurator(BaseConfigurator): + + value_converters = dict(BaseConfigurator.value_converters) + value_converters['inc'] = 'inc_convert' + + def __init__(self, config, base=None): + super(Configurator, self).__init__(config) + self.base = base or os.getcwd() + + def configure_custom(self, config): + def convert(o): + if isinstance(o, (list, tuple)): + result = type(o)([convert(i) for i in o]) + elif isinstance(o, dict): + if '()' in o: + result = self.configure_custom(o) + else: + result = {} + for k in o: + result[k] = convert(o[k]) + else: + result = self.convert(o) + return result + + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + args = config.pop('[]', ()) + if args: + args = tuple([convert(o) for o in args]) + items = [(k, convert(config[k])) for k in config if valid_ident(k)] + kwargs = dict(items) + result = c(*args, **kwargs) + if props: + for n, v in props.items(): + setattr(result, n, convert(v)) + return result + + def __getitem__(self, key): + result = self.config[key] + if isinstance(result, dict) and '()' in result: + self.config[key] = result = self.configure_custom(result) + return result + + def inc_convert(self, value): + """Default converter for the inc:// protocol.""" + if not os.path.isabs(value): + value = os.path.join(self.base, value) + with codecs.open(value, 'r', encoding='utf-8') as f: + result = json.load(f) + return result diff --git a/pip/vendor/distlib/version.py b/pip/vendor/distlib/version.py index 1e45e311829..6dd288ca2db 100644 --- a/pip/vendor/distlib/version.py +++ b/pip/vendor/distlib/version.py @@ -15,37 +15,14 @@ __all__ = ['NormalizedVersion', 'NormalizedMatcher', 'LegacyVersion', 'LegacyMatcher', 'SemanticVersion', 'SemanticMatcher', - 'AdaptiveVersion', 'AdaptiveMatcher', - 'UnsupportedVersionError', 'HugeMajorVersionError', - 'suggest_normalized_version', 'suggest_semantic_version', - 'suggest_adaptive_version', - 'normalized_key', 'legacy_key', 'semantic_key', 'adaptive_key', - 'get_scheme'] - -class UnsupportedVersionError(Exception): - """This is an unsupported version.""" - pass - + 'UnsupportedVersionError', 'get_scheme'] -class HugeMajorVersionError(UnsupportedVersionError): - """An irrational version because the major version number is huge - (often because a year or date was used). - - See `error_on_huge_major_num` option in `NormalizedVersion` for details. - This guard can be disabled by setting that option False. - """ +class UnsupportedVersionError(ValueError): + """This is an unsupported version.""" pass -class _Common(object): - def __repr__(self): - return "%s(%r)" % (self.__class__.__name__, self._string) - - def __str__(self): - return self._string - - -class Version(_Common): +class Version(object): def __init__(self, s): self._string = s = s.strip() self._parts = parts = self.parse(s) @@ -83,30 +60,41 @@ def __ge__(self, other): def __hash__(self): return hash(self._parts) + def __repr__(self): + return "%s('%s')" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + @property def is_prerelease(self): raise NotImplementedError('Please implement in subclasses.') -class Matcher(_Common): + +class Matcher(object): version_class = None - predicate_re = re.compile(r"^(\w[\s\w'.-]*)(\((.*)\))?") - constraint_re = re.compile(r'^(<=|>=|<|>|!=|==)?\s*([^\s,]+)$') + dist_re = re.compile(r"^(\w[\s\w'.-]*)(\((.*)\))?") + comp_re = re.compile(r'^(<=|>=|<|>|!=|==|~=)?\s*([^\s,]+)$') + num_re = re.compile(r'^\d+(\.\d+)*$') + # value is either a callable or the name of a method _operators = { - "<": lambda x, y: x < y, - ">": lambda x, y: x > y, - "<=": lambda x, y: x == y or x < y, - ">=": lambda x, y: x == y or x > y, - "==": lambda x, y: x == y, - "!=": lambda x, y: x != y, + '<': lambda v, c, p: v < c, + '>': lambda v, c, p: v > c, + '<=': lambda v, c, p: v == c or v < c, + '>=': lambda v, c, p: v == c or v > c, + '==': lambda v, c, p: v == c, + # by default, compatible => >=. + '~=': lambda v, c, p: v == c or v > c, + '!=': lambda v, c, p: v != c, } def __init__(self, s): if self.version_class is None: raise ValueError('Please specify a version class') self._string = s = s.strip() - m = self.predicate_re.match(s) + m = self.dist_re.match(s) if not m: raise ValueError('Not valid: %r' % s) groups = m.groups('') @@ -116,19 +104,42 @@ def __init__(self, s): if groups[2]: constraints = [c.strip() for c in groups[2].split(',')] for c in constraints: - m = self.constraint_re.match(c) + m = self.comp_re.match(c) if not m: raise ValueError('Invalid %r in %r' % (c, s)) - groups = m.groups('==') - clist.append((groups[0], self.version_class(groups[1]))) + groups = m.groups() + op = groups[0] or '~=' + s = groups[1] + if s.endswith('.*'): + if op not in ('==', '!='): + raise ValueError('\'.*\' not allowed for ' + '%r constraints' % op) + # Could be a partial version (e.g. for '2.*') which + # won't parse as a version, so keep it as a string + vn, prefix = s[:-2], True + if not self.num_re.match(vn): + # Just to check that vn is a valid version + self.version_class(vn) + else: + # Should parse as a version, so we can create an + # instance for the comparison + vn, prefix = self.version_class(s), False + clist.append((op, vn, prefix)) self._parts = tuple(clist) def match(self, version): """Check if the provided version matches the constraints.""" if isinstance(version, string_types): version = self.version_class(version) - for operator, constraint in self._parts: - if not self._operators[operator](version, constraint): + for operator, constraint, prefix in self._parts: + f = self._operators.get(operator) + if isinstance(f, string_types): + f = getattr(self, f) + if not f: + msg = ('%r not implemented ' + 'for %s' % (operator, self.__class__.__name__)) + raise NotImplementedError(msg) + if not f(version, constraint, prefix): return False return True @@ -154,6 +165,12 @@ def __ne__(self, other): def __hash__(self): return hash(self.key) + hash(self._parts) + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + # A marker used in the second and third parts of the `parts` tuple, for # versions that don't have those segments, to sort properly. An example # of versions in sort order ('highest' last): @@ -205,7 +222,7 @@ def _parse_numdots(s, full_ver, drop_zeroes=False, min_length=0): result.pop() return result -def pep386_key(s, fail_on_huge_major_ver=True): +def _pep386_key(s): """Parses a string version into parts using PEP-386 logic.""" match = _VERSION_RE.search(s) @@ -242,16 +259,13 @@ def pep386_key(s, fail_on_huge_major_ver=True): parts.append(tuple(postdev)) else: parts.append(_FINAL_MARKER) - if fail_on_huge_major_ver and parts[0][0] > 1980: - raise HugeMajorVersionError("huge major version number, %r, " - "which might cause future problems: %r" % (parts[0][0], s)) return tuple(parts) PEP426_VERSION_RE = re.compile('^(\d+\.\d+(\.\d+)*)((a|b|c|rc)(\d+))?' '(\.(post)(\d+))?(\.(dev)(\d+))?$') -def pep426_key(s, _=None): +def _pep426_key(s): s = s.strip() m = PEP426_VERSION_RE.match(s) if not m: @@ -293,7 +307,7 @@ def pep426_key(s, _=None): return nums, pre, post, dev -normalized_key = pep426_key +_normalized_key = _pep426_key class NormalizedVersion(Version): """A rational version. @@ -313,7 +327,16 @@ class NormalizedVersion(Version): 1.2a # release level must have a release serial 1.2.3b """ - def parse(self, s): return normalized_key(s) + def parse(self, s): + result = _normalized_key(s) + # _normalized_key loses trailing zeroes in the release + # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0 + # However, PEP 440 prefix matching needs it: for example, + # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0). + m = PEP426_VERSION_RE.match(s) # must succeed + groups = m.groups() + self._release_clause = tuple(int(v) for v in groups[0].split('.')) + return result PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev']) @@ -321,16 +344,13 @@ def parse(self, s): return normalized_key(s) def is_prerelease(self): return any(t[0] in self.PREREL_TAGS for t in self._parts) -class UnlimitedMajorVersion(Version): - def parse(self, s): return normalized_key(s, False) - # We want '2.5' to match '2.5.4' but not '2.50'. -def _match_at_front(x, y): - if x == y: - return True +def _match_prefix(x, y): x = str(x) y = str(y) + if x == y: + return True if not x.startswith(y): return False n = len(y) @@ -339,13 +359,61 @@ def _match_at_front(x, y): class NormalizedMatcher(Matcher): version_class = NormalizedVersion - _operators = dict(Matcher._operators) - _operators.update({ - "<=": lambda x, y: _match_at_front(x, y) or x < y, - ">=": lambda x, y: _match_at_front(x, y) or x > y, - "==": lambda x, y: _match_at_front(x, y), - "!=": lambda x, y: not _match_at_front(x, y), - }) + # value is either a callable or the name of a method + _operators = { + '~=': '_match_compatible', + '<': '_match_lt', + '>': '_match_gt', + '<=': '_match_le', + '>=': '_match_ge', + '==': '_match_eq', + '!=': '_match_ne', + } + + def _match_lt(self, version, constraint, prefix): + if version >= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_gt(self, version, constraint, prefix): + if version <= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_le(self, version, constraint, prefix): + return version <= constraint + + def _match_ge(self, version, constraint, prefix): + return version >= constraint + + def _match_eq(self, version, constraint, prefix): + if not prefix: + result = (version == constraint) + else: + result = _match_prefix(version, constraint) + return result + + def _match_ne(self, version, constraint, prefix): + if not prefix: + result = (version != constraint) + else: + result = not _match_prefix(version, constraint) + return result + + def _match_compatible(self, version, constraint, prefix): + if version == constraint: + return True + if version < constraint: + return False + release_clause = constraint._release_clause + if len(release_clause) > 1: + release_clause = release_clause[:-1] + pfx = '.'.join([str(i) for i in release_clause]) + return _match_prefix(version, pfx) _REPLACEMENTS = ( (re.compile('[.+-]$'), ''), # remove trailing puncts @@ -371,10 +439,10 @@ class NormalizedMatcher(Matcher): _NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)') -def suggest_semantic_version(s): +def _suggest_semantic_version(s): """ Try to suggest a semantic form for a version for which - suggest_normalized_version couldn't come up with anything. + _suggest_normalized_version couldn't come up with anything. """ result = s.strip().lower() for pat, repl in _REPLACEMENTS: @@ -417,7 +485,7 @@ def suggest_semantic_version(s): return result -def suggest_normalized_version(s): +def _suggest_normalized_version(s): """Suggest a normalized version close to the given version string. If you have a version string that isn't rational (i.e. NormalizedVersion @@ -435,7 +503,7 @@ def suggest_normalized_version(s): @returns A rational version string, or None, if couldn't determine one. """ try: - normalized_key(s) + _normalized_key(s) return s # already rational except UnsupportedVersionError: pass @@ -522,14 +590,11 @@ def suggest_normalized_version(s): rs = re.sub(r"p(\d+)$", r".post\1", rs) try: - normalized_key(rs) + _normalized_key(rs) except UnsupportedVersionError: rs = None return rs -def suggest_adaptive_version(s): - return suggest_normalized_version(s) or suggest_semantic_version(s) - # # Legacy version processing (distribute-compatible) # @@ -546,7 +611,7 @@ def suggest_adaptive_version(s): } -def legacy_key(s): +def _legacy_key(s): def get_parts(s): result = [] for p in _VERSION_PART.split(s.lower()): @@ -572,7 +637,7 @@ def get_parts(s): return tuple(result) class LegacyVersion(Version): - def parse(self, s): return legacy_key(s) + def parse(self, s): return _legacy_key(s) PREREL_TAGS = set( ['*a', '*alpha', '*b', '*beta', '*c', '*rc', '*r', '*@', '*pre'] @@ -585,6 +650,24 @@ def is_prerelease(self): class LegacyMatcher(Matcher): version_class = LegacyVersion + _operators = dict(Matcher._operators) + _operators['~='] = '_match_compatible' + + numeric_re = re.compile('^(\d+(\.\d+)*)') + + def _match_compatible(self, version, constraint, prefix): + if version < constraint: + return False + m = self.numeric_re.match(str(constraint)) + if not m: + logger.warning('Cannot compute compatible match for version %s ' + ' and constraint %s', version, constraint) + return True + s = m.groups()[0] + if '.' in s: + s = s.rsplit('.', 1)[0] + return _match_prefix(version, s) + # # Semantic versioning # @@ -596,7 +679,7 @@ class LegacyMatcher(Matcher): def is_semver(s): return _SEMVER_RE.match(s) -def semantic_key(s): +def _semantic_key(s): def make_tuple(s, absent): if s is None: result = (absent,) @@ -619,7 +702,7 @@ def make_tuple(s, absent): class SemanticVersion(Version): - def parse(self, s): return semantic_key(s) + def parse(self, s): return _semantic_key(s) @property def is_prerelease(self): @@ -629,42 +712,6 @@ def is_prerelease(self): class SemanticMatcher(Matcher): version_class = SemanticVersion -# -# Adaptive versioning. When handed a legacy version string, tries to -# determine a suggested normalized version, and work with that. -# - -def adaptive_key(s): - try: - result = normalized_key(s, False) - except UnsupportedVersionError: - ss = suggest_normalized_version(s) - if ss is not None: - result = normalized_key(ss) # "guaranteed" to work - else: - ss = s # suggest_semantic_version(s) or s - result = semantic_key(ss) # let's hope ... - return result - - -class AdaptiveVersion(NormalizedVersion): - def parse(self, s): return adaptive_key(s) - - @property - def is_prerelease(self): - try: - normalized_key(self._string) - not_sem = True - except UnsupportedVersionError: - ss = suggest_normalized_version(self._string) - not_sem = ss is not None - if not_sem: - return any(t[0] in self.PREREL_TAGS for t in self._parts) - return self._parts[1][0] != '|' - -class AdaptiveMatcher(NormalizedMatcher): - version_class = AdaptiveVersion - class VersionScheme(object): def __init__(self, key, matcher, suggester=None): @@ -702,16 +749,14 @@ def suggest(self, s): return result _SCHEMES = { - 'normalized': VersionScheme(normalized_key, NormalizedMatcher, - suggest_normalized_version), - 'legacy': VersionScheme(legacy_key, LegacyMatcher, lambda self, s: s), - 'semantic': VersionScheme(semantic_key, SemanticMatcher, - suggest_semantic_version), - 'adaptive': VersionScheme(adaptive_key, AdaptiveMatcher, - suggest_adaptive_version), + 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, + _suggest_normalized_version), + 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s), + 'semantic': VersionScheme(_semantic_key, SemanticMatcher, + _suggest_semantic_version), } -_SCHEMES['default'] = _SCHEMES['adaptive'] +_SCHEMES['default'] = _SCHEMES['normalized'] def get_scheme(name): if name not in _SCHEMES: diff --git a/pip/vendor/distlib/w32.exe b/pip/vendor/distlib/w32.exe new file mode 100644 index 00000000000..04fa9aac44a Binary files /dev/null and b/pip/vendor/distlib/w32.exe differ diff --git a/pip/vendor/distlib/w64.exe b/pip/vendor/distlib/w64.exe new file mode 100644 index 00000000000..acf9e6952f5 Binary files /dev/null and b/pip/vendor/distlib/w64.exe differ diff --git a/pip/vendor/distlib/wheel.py b/pip/vendor/distlib/wheel.py index 8274732adcc..5dcc43a603a 100644 --- a/pip/vendor/distlib/wheel.py +++ b/pip/vendor/distlib/wheel.py @@ -23,10 +23,10 @@ import tempfile import zipfile -from . import DistlibException +from . import __version__, DistlibException from .compat import sysconfig, ZipFile, fsdecode, text_type, filter from .database import DistributionPath, InstalledDistribution -from .metadata import Metadata +from .metadata import Metadata, METADATA_FILENAME from .scripts import ScriptMaker from .util import (FileOperator, convert_path, CSVReader, CSVWriter, cached_property, get_cache_base) @@ -112,7 +112,9 @@ def load_module(self, fullname): raise ImportError('unable to find extension for %s' % fullname) result = imp.load_dynamic(fullname, self.libs[fullname]) result.__loader__ = self - result.__package__, _ = fullname.rsplit('.', 1) + parts = fullname.rsplit('.', 1) + if len(parts) > 1: + result.__package__ = parts[0] return result _hook = Mounter() @@ -193,13 +195,16 @@ def metadata(self): pathname = os.path.join(self.dirname, self.filename) name_ver = '%s-%s' % (self.name, self.version) info_dir = '%s.dist-info' % name_ver - metadata_filename = posixpath.join(info_dir, 'METADATA') wrapper = codecs.getreader('utf-8') + metadata_filename = posixpath.join(info_dir, METADATA_FILENAME) with ZipFile(pathname, 'r') as zf: - with zf.open(metadata_filename) as bf: - wf = wrapper(bf) - result = Metadata() - result.read_file(wf) + try: + with zf.open(metadata_filename) as bf: + wf = wrapper(bf) + result = Metadata(fileobj=wf) + except KeyError: + raise ValueError('Invalid wheel, because %s is ' + 'missing' % METADATA_FILENAME) return result @cached_property @@ -334,11 +339,9 @@ def build(self, paths, tags=None): ap = to_posix(os.path.join(info_dir, fn)) archive_paths.append((ap, p)) - import distlib - wheel_metadata = [ 'Wheel-Version: %d.%d' % self.wheel_version, - 'Generator: distlib %s' % distlib.__version__, + 'Generator: distlib %s' % __version__, 'Root-Is-Purelib: %s' % is_pure, ] for pyver, abi, arch in self.tags: @@ -372,7 +375,8 @@ def build(self, paths, tags=None): zf.write(p, ap) return pathname - def install(self, paths, dry_run=False, executable=None, warner=None): + def install(self, paths, dry_run=False, executable=None, warner=None, + lib_only=False): """ Install a wheel to the specified paths. If ``executable`` is specified, it should be the Unicode absolute path the to the executable written @@ -381,6 +385,11 @@ def install(self, paths, dry_run=False, executable=None, warner=None): tuples indicating the wheel version of this software and the wheel version in the file, if there is a discrepancy in the versions. This can be used to issue any warnings to raise any exceptions. + If ``lib_only`` is True, only the purelib/platlib files are installed, + and the headers, scripts, data and dist-info metadata are not written. + + The return value is a :class:`InstalledDistribution` instance unless + ``lib_only`` is True, in which case the return value is ``None``. """ pathname = os.path.join(self.dirname, self.filename) name_ver = '%s-%s' % (self.name, self.version) @@ -413,6 +422,7 @@ def install(self, paths, dry_run=False, executable=None, warner=None): records[p] = row data_pfx = posixpath.join(data_dir, '') + info_pfx = posixpath.join(info_dir, '') script_pfx = posixpath.join(data_dir, 'scripts', '') fileop = FileOperator(dry_run=dry_run) @@ -437,6 +447,10 @@ def install(self, paths, dry_run=False, executable=None, warner=None): u_arcname = arcname else: u_arcname = arcname.decode('utf-8') + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + if u_arcname.endswith('/RECORD.jws'): + continue row = records[u_arcname] if row[2] and str(zinfo.file_size) != row[2]: raise DistlibException('size mismatch for ' @@ -450,6 +464,9 @@ def install(self, paths, dry_run=False, executable=None, warner=None): raise DistlibException('digest mismatch for ' '%s' % arcname) + if lib_only and u_arcname.startswith((info_pfx, data_pfx)): + logger.debug('lib_only: skipping %s', u_arcname) + continue is_script = (u_arcname.startswith(script_pfx) and not u_arcname.endswith('.exe')) @@ -495,20 +512,25 @@ def install(self, paths, dry_run=False, executable=None, warner=None): fileop.set_executable_mode(filenames) outfiles.extend(filenames) - p = os.path.join(libdir, info_dir) - dist = InstalledDistribution(p) - - # Write SHARED - paths = dict(paths) # don't change passed in dict - del paths['purelib'] - del paths['platlib'] - paths['lib'] = libdir - p = dist.write_shared_locations(paths, dry_run) - outfiles.append(p) - - # Write RECORD - dist.write_installed_files(outfiles, paths['prefix'], - dry_run) + if lib_only: + logger.debug('lib_only: returning None') + dist = None + else: + p = os.path.join(libdir, info_dir) + dist = InstalledDistribution(p) + + # Write SHARED + paths = dict(paths) # don't change passed in dict + del paths['purelib'] + del paths['platlib'] + paths['lib'] = libdir + p = dist.write_shared_locations(paths, dry_run) + if p: + outfiles.append(p) + + # Write RECORD + dist.write_installed_files(outfiles, paths['prefix'], + dry_run) return dist except Exception as e: # pragma: no cover logger.exception('installation failed.') @@ -518,7 +540,7 @@ def install(self, paths, dry_run=False, executable=None, warner=None): shutil.rmtree(workdir) def _get_dylib_cache(self): - result = os.path.join(get_cache_base(), 'dylib-cache') + result = os.path.join(get_cache_base(), 'dylib-cache', sys.version[:3]) if not os.path.isdir(result): os.makedirs(result) return result diff --git a/pip/wheel.py b/pip/wheel.py index 5401f0114d8..957e92b1033 100644 --- a/pip/wheel.py +++ b/pip/wheel.py @@ -7,7 +7,7 @@ import functools import hashlib import os -import pkg_resources +from pip.compat import pkg_resources import re import shutil import sys diff --git a/tests/functional/test_show.py b/tests/functional/test_show.py index cf68e0404fa..99c39597b80 100644 --- a/tests/functional/test_show.py +++ b/tests/functional/test_show.py @@ -12,7 +12,7 @@ def test_show(script): assert len(lines) == 6 assert lines[0] == '---', lines[0] assert lines[1] == 'Name: pip', lines[1] - assert lines[2] == 'Version: %s' % __version__, lines[2] + assert lines[2] == 'Version: %s' % __version__, '%r should be %r' % (lines[2], 'Version: %s' % __version__) assert lines[3].startswith('Location: '), lines[3] assert lines[4] == 'Requires: ' @@ -27,7 +27,7 @@ def test_show_with_files_not_found(script): assert len(lines) == 8 assert lines[0] == '---', lines[0] assert lines[1] == 'Name: pip', lines[1] - assert lines[2] == 'Version: %s' % __version__, lines[2] + assert lines[2] == 'Version: %s' % __version__, '%r should be %r' % (lines[2], 'Version: %s' % __version__) assert lines[3].startswith('Location: '), lines[3] assert lines[4] == 'Requires: ' assert lines[5] == 'Files:', lines[4] diff --git a/tests/functional/test_uninstall.py b/tests/functional/test_uninstall.py index 0bcd271e054..b75cb522933 100644 --- a/tests/functional/test_uninstall.py +++ b/tests/functional/test_uninstall.py @@ -5,6 +5,7 @@ import sys from os.path import join, abspath, normpath from tempfile import mkdtemp +from nose import SkipTest from mock import patch from tests.lib import assert_all_changes, pyversion from tests.lib.local_repos import local_repo, local_checkout @@ -199,7 +200,7 @@ def test_uninstallpathset_no_paths(mock_logger): """ from pip.req import UninstallPathSet - from pkg_resources import get_distribution + from pip.compat.pkg_resources import get_distribution test_dist = get_distribution('pip') # ensure that the distribution is "local" with patch("pip.req.dist_is_local") as mock_dist_is_local: @@ -217,7 +218,7 @@ def test_uninstallpathset_non_local(mock_logger): """ nonlocal_path = os.path.abspath("/nonlocal") from pip.req import UninstallPathSet - from pkg_resources import get_distribution + from pip.compat.pkg_resources import get_distribution test_dist = get_distribution('pip') test_dist.location = nonlocal_path # ensure that the distribution is "non-local" diff --git a/tests/packages/LocalExtras/LocalExtras.egg-info/PKG-INFO b/tests/packages/LocalExtras/LocalExtras.egg-info/PKG-INFO new file mode 100644 index 00000000000..0f15a4ba837 --- /dev/null +++ b/tests/packages/LocalExtras/LocalExtras.egg-info/PKG-INFO @@ -0,0 +1,10 @@ +Metadata-Version: 1.0 +Name: LocalExtras +Version: 0.0.1 +Summary: UNKNOWN +Home-page: UNKNOWN +Author: UNKNOWN +Author-email: UNKNOWN +License: UNKNOWN +Description: UNKNOWN +Platform: UNKNOWN diff --git a/tests/packages/LocalExtras/LocalExtras.egg-info/SOURCES.txt b/tests/packages/LocalExtras/LocalExtras.egg-info/SOURCES.txt new file mode 100644 index 00000000000..2753ff598fb --- /dev/null +++ b/tests/packages/LocalExtras/LocalExtras.egg-info/SOURCES.txt @@ -0,0 +1,6 @@ +LocalExtras.egg-info/PKG-INFO +LocalExtras.egg-info/SOURCES.txt +LocalExtras.egg-info/dependency_links.txt +LocalExtras.egg-info/requires.txt +LocalExtras.egg-info/top_level.txt +localextras/__init__.py \ No newline at end of file diff --git a/tests/packages/LocalExtras/LocalExtras.egg-info/dependency_links.txt b/tests/packages/LocalExtras/LocalExtras.egg-info/dependency_links.txt new file mode 100644 index 00000000000..d427d3750d3 --- /dev/null +++ b/tests/packages/LocalExtras/LocalExtras.egg-info/dependency_links.txt @@ -0,0 +1 @@ +file:///home/vinay/projects/pip-distlib/tests/indexes/simple/simple diff --git a/tests/packages/LocalExtras/LocalExtras.egg-info/requires.txt b/tests/packages/LocalExtras/LocalExtras.egg-info/requires.txt new file mode 100644 index 00000000000..df5275fb7c7 --- /dev/null +++ b/tests/packages/LocalExtras/LocalExtras.egg-info/requires.txt @@ -0,0 +1,4 @@ + + +[bar] +simple \ No newline at end of file diff --git a/tests/packages/LocalExtras/LocalExtras.egg-info/top_level.txt b/tests/packages/LocalExtras/LocalExtras.egg-info/top_level.txt new file mode 100644 index 00000000000..ae3864d84e4 --- /dev/null +++ b/tests/packages/LocalExtras/LocalExtras.egg-info/top_level.txt @@ -0,0 +1 @@ +localextras diff --git a/tests/unit/test_util.py b/tests/unit/test_util.py index 11e2aa7cea5..733fff9d01c 100644 --- a/tests/unit/test_util.py +++ b/tests/unit/test_util.py @@ -172,7 +172,7 @@ def dist_is_local(self, dist): @patch('pip.util.dist_is_local') @patch('pip.util.dist_is_editable') - @patch('pkg_resources.working_set', workingset) + @patch('pip.compat.pkg_resources.working_set', workingset) def test_editables_only(self, mock_dist_is_editable, mock_dist_is_local): mock_dist_is_editable.side_effect = self.dist_is_editable mock_dist_is_local.side_effect = self.dist_is_local @@ -183,7 +183,7 @@ def test_editables_only(self, mock_dist_is_editable, mock_dist_is_local): @patch('pip.util.dist_is_local') @patch('pip.util.dist_is_editable') - @patch('pkg_resources.working_set', workingset) + @patch('pip.compat.pkg_resources.working_set', workingset) def test_exclude_editables(self, mock_dist_is_editable, mock_dist_is_local): mock_dist_is_editable.side_effect = self.dist_is_editable mock_dist_is_local.side_effect = self.dist_is_local @@ -194,7 +194,7 @@ def test_exclude_editables(self, mock_dist_is_editable, mock_dist_is_local): @patch('pip.util.dist_is_local') @patch('pip.util.dist_is_editable') - @patch('pkg_resources.working_set', workingset) + @patch('pip.compat.pkg_resources.working_set', workingset) def test_include_globals(self, mock_dist_is_editable, mock_dist_is_local): mock_dist_is_editable.side_effect = self.dist_is_editable mock_dist_is_local.side_effect = self.dist_is_local