Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revert "Merge pull request #1519 from dstufft/remove-dependency-links" #1955

Merged
merged 1 commit into from
Aug 4, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 0 additions & 7 deletions CHANGES.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,6 @@
until their removal in pip v1.8. For more information please see
https://pip.pypa.io/en/latest/reference/pip_install.html#caching

* Removed the deprecated support for dependency links and the
``--process-dependency-links`` flag that turned them on. For alternatives to
dependency links please see http://www.pip-installer.org/en/latest/dependency_links.html

* `wsgiref` and `argparse` (for >py26) are now excluded from `pip list` and `pip
freeze` (:pull:`1606`, :pull:`1369`)

Expand Down Expand Up @@ -167,9 +163,6 @@
* **BACKWARD INCOMPATIBLE** pip no longer respects dependency links by default.
Users may opt into respecting them again using ``--process-dependency-links``.

* **DEPRECATION** ``pip install --process-dependency-links`` and the ability to
use dependency links at all has been deprecated and will be removed in 1.6.

* **DEPRECATION** ``pip install --no-install`` and ``pip install
--no-download`` are now formally deprecated. See :issue:`906` for discussion on
possible alternatives, or lack thereof, in future releases.
Expand Down
64 changes: 0 additions & 64 deletions docs/dependency_links.rst

This file was deleted.

35 changes: 33 additions & 2 deletions pip/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ def __init__(self, name, req, editable, comments=()):
_date_re = re.compile(r'-(20\d\d\d\d\d\d)$')

@classmethod
def from_dist(cls, dist, find_tags=False):
def from_dist(cls, dist, dependency_links, find_tags=False):
location = os.path.normcase(os.path.abspath(dist.location))
comments = []
from pip.vcs import vcs, get_src_requirement
Expand Down Expand Up @@ -227,7 +227,38 @@ def from_dist(cls, dist, find_tags=False):
req = dist.as_requirement()
specs = req.specs
assert len(specs) == 1 and specs[0][0] == '=='

version = specs[0][1]
ver_match = cls._rev_re.search(version)
date_match = cls._date_re.search(version)
if ver_match or date_match:
svn_backend = vcs.get_backend('svn')
if svn_backend:
svn_location = svn_backend().get_location(
dist,
dependency_links,
)
if not svn_location:
logger.warn(
'Warning: cannot find svn location for %s' % req)
comments.append(
'## FIXME: could not find svn URL in dependency_links '
'for this package:'
)
else:
comments.append(
'# Installing as editable to satisfy requirement %s:' %
req
)
if ver_match:
rev = ver_match.group(1)
else:
rev = '{%s}' % date_match.group(1)
editable = True
req = '%s@%s#egg=%s' % (
svn_location,
rev,
cls.egg_name(dist)
)
return cls(dist.project_name, req, editable, comments)

@staticmethod
Expand Down
10 changes: 10 additions & 0 deletions pip/cmdoptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,6 +279,15 @@ def make(self):
help=SUPPRESS_HELP
)

# Remove after 1.5
process_dependency_links = OptionMaker(
"--process-dependency-links",
dest="process_dependency_links",
action="store_true",
default=False,
help="Enable the processing of dependency links.",
)

requirements = OptionMaker(
'-r', '--requirement',
dest='requirements',
Expand Down Expand Up @@ -431,5 +440,6 @@ def make(self):
no_allow_external,
allow_unsafe,
no_allow_unsafe,
process_dependency_links,
]
}
17 changes: 16 additions & 1 deletion pip/commands/freeze.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from pip.log import logger
from pip.basecommand import Command
from pip.util import get_installed_distributions
from pip._vendor import pkg_resources

# packages to exclude from freeze output
freeze_excludes = stdlib_pkgs + ['setuptools', 'pip', 'distribute']
Expand Down Expand Up @@ -67,14 +68,28 @@ def run(self, options, args):
if skip_regex:
skip_match = re.compile(skip_regex)

dependency_links = []

f = sys.stdout

for dist in pkg_resources.working_set:
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt')
)
for link in find_links:
if '#egg=' in link:
dependency_links.append(link)
for link in find_links:
f.write('-f %s\n' % link)
installations = {}
for dist in get_installed_distributions(local_only=local_only,
skip=freeze_excludes):
req = pip.FrozenRequirement.from_dist(dist, find_tags=find_tags)
req = pip.FrozenRequirement.from_dist(
dist,
dependency_links,
find_tags=find_tags,
)
installations[req.name] = req
if requirement:
req_f = open(requirement)
Expand Down
1 change: 1 addition & 0 deletions pip/commands/install.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,7 @@ def _build_package_finder(self, options, index_urls, session):
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
process_dependency_links=options.process_dependency_links,
session=session,
)

Expand Down
9 changes: 9 additions & 0 deletions pip/commands/list.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@ def _build_package_finder(self, options, index_urls, session):
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
process_dependency_links=options.process_dependency_links,
session=session,
)

Expand Down Expand Up @@ -115,8 +116,16 @@ def find_packages_latests_versions(self, options):
)
index_urls += options.mirrors

dependency_links = []
for dist in get_installed_distributions(local_only=options.local):
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt'),
)

with self._build_session(options) as session:
finder = self._build_package_finder(options, index_urls, session)
finder.add_dependency_links(dependency_links)

installed_packages = get_installed_distributions(
local_only=options.local,
Expand Down
1 change: 1 addition & 0 deletions pip/commands/wheel.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,7 @@ def run(self, options, args):
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
process_dependency_links=options.process_dependency_links,
session=session,
)

Expand Down
39 changes: 37 additions & 2 deletions pip/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ class PackageFinder(object):
def __init__(self, find_links, index_urls,
use_wheel=True, allow_external=[], allow_unverified=[],
allow_all_external=False, allow_all_prereleases=False,
session=None):
process_dependency_links=False, session=None):
if session is None:
raise TypeError(
"PackageFinder() missing 1 required keyword argument: "
Expand All @@ -47,6 +47,7 @@ def __init__(self, find_links, index_urls,

self.find_links = find_links
self.index_urls = index_urls
self.dependency_links = []

# These are boring links that have already been logged somehow:
self.logged_links = set()
Expand Down Expand Up @@ -78,9 +79,28 @@ def __init__(self, find_links, index_urls,
# Do we want to allow _all_ pre-releases?
self.allow_all_prereleases = allow_all_prereleases

# Do we process dependency links?
self.process_dependency_links = process_dependency_links
self._have_warned_dependency_links = False

# The Session we'll use to make requests
self.session = session

def add_dependency_links(self, links):
# # FIXME: this shouldn't be global list this, it should only
# # apply to requirements of the package that specifies the
# # dependency_links value
# # FIXME: also, we should track comes_from (i.e., use Link)
if self.process_dependency_links:
if not self._have_warned_dependency_links:
logger.deprecated(
"1.6",
"Dependency Links processing has been deprecated with an "
"accelerated time schedule and will be removed in pip 1.6",
)
self._have_warned_dependency_links = True
self.dependency_links.extend(links)

def _sort_locations(self, locations):
"""
Sort locations into "files" (archives) and "urls", and return
Expand Down Expand Up @@ -207,11 +227,16 @@ def mkurl_pypi_url(url):
posixpath.join(main_index_url.url, version)] + locations

file_locations, url_locations = self._sort_locations(locations)
_flocations, _ulocations = self._sort_locations(self.dependency_links)
file_locations.extend(_flocations)

# We trust every url that the user has given us whether it was given
# via --index-url or --find-links
locations = [Link(url, trusted=True) for url in url_locations]

# We explicitly do not trust links that came from dependency_links
locations.extend([Link(url) for url in _ulocations])

logger.debug('URLs to search for versions for %s:' % req)
for location in locations:
logger.debug('* %s' % location)
Expand Down Expand Up @@ -260,6 +285,15 @@ def mkurl_pypi_url(url):
)
finally:
logger.indent -= 2
dependency_versions = list(self._package_versions(
[Link(url) for url in self.dependency_links], req.name.lower()))
if dependency_versions:
logger.info(
'dependency_links found: %s' %
', '.join([
link.url for p, link, version in dependency_versions
])
)
file_versions = list(
self._package_versions(
[Link(url) for url in file_locations],
Expand All @@ -268,6 +302,7 @@ def mkurl_pypi_url(url):
)
if (not found_versions
and not page_versions
and not dependency_versions
and not file_versions):
logger.fatal(
'Could not find any downloads that satisfy the requirement'
Expand Down Expand Up @@ -307,7 +342,7 @@ def mkurl_pypi_url(url):
)
# this is an intentional priority ordering
all_versions = installed_version + file_versions + found_versions \
+ page_versions
+ page_versions + dependency_versions
applicable_versions = []
for (parsed_version, link, version) in all_versions:
if version not in req.req:
Expand Down
4 changes: 4 additions & 0 deletions pip/req/req_install.py
Original file line number Diff line number Diff line change
Expand Up @@ -448,6 +448,10 @@ def pkg_info(self):
p.feed(data or '')
return p.close()

@property
def dependency_links(self):
return self.egg_info_lines('dependency_links.txt')

_requirements_section_re = re.compile(r'\[(.*?)\]')

def requirements(self, extras=()):
Expand Down
4 changes: 4 additions & 0 deletions pip/req/req_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,6 +425,10 @@ def prepare_files(self, finder):

# sdists
else:
# FIXME: shouldn't be globally added:
finder.add_dependency_links(
req_to_install.dependency_links
)
if (req_to_install.extras):
logger.notify(
"Installing extra requirements: %r" %
Expand Down
17 changes: 17 additions & 0 deletions pip/vcs/subversion.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import os
import re

from pip.compat import urlparse
from pip.index import Link
from pip.util import rmtree, display_path, call_subprocess
from pip.log import logger
from pip.vcs import vcs, VersionControl
Expand Down Expand Up @@ -34,6 +36,7 @@ def get_info(self, location):
'Cannot determine URL of svn checkout %s' %
display_path(location)
)
logger.info('Output that cannot be parsed: \n%s' % output)
return None, None
url = match.group(1).strip()
match = _svn_revision_re.search(output)
Expand Down Expand Up @@ -84,6 +87,20 @@ def obtain(self, dest):
call_subprocess(
[self.cmd, 'checkout', '-q'] + rev_options + [url, dest])

def get_location(self, dist, dependency_links):
for url in dependency_links:
egg_fragment = Link(url).egg_fragment
if not egg_fragment:
continue
if '-' in egg_fragment:
# FIXME: will this work when a package has - in the name?
key = '-'.join(egg_fragment.split('-')[:-1]).lower()
else:
key = egg_fragment
if key == dist.key:
return url.split('#', 1)[0]
return None

def get_revision(self, location):
"""
Return the maximum revision for all files under a given location
Expand Down
1 change: 1 addition & 0 deletions tests/data/packages/LocalExtras/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
/LocalExtras.egg-info
Empty file.
Loading