Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ensure resolver doesn't compare editable specifiers #3660

Closed
wants to merge 11 commits into from
1 change: 1 addition & 0 deletions news/3647.bugfix.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Pipenv will no longer inadvertently set ``editable=True`` on all vcs dependencies.
2 changes: 2 additions & 0 deletions news/3656.bugfix.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
The ``--keep-outdated`` argument to ``pipenv install`` and ``pipenv lock`` will now drop specifier constraints when encountering editable dependencies.
- In addition, ``--keep-outdated`` will retain specifiers that would otherwise be dropped from any entries that have not been updated.
25 changes: 15 additions & 10 deletions pipenv/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -1411,15 +1411,20 @@ def pip_install(
name = requirement.name
if requirement.extras:
name = "{0}{1}".format(name, requirement.extras_as_pip)
line = "-e {0}#egg={1}".format(vistir.path.path_to_url(repo.checkout_directory), requirement.name)
line = "{0}{1}#egg={2}".format(
line,
vistir.path.path_to_url(repo.checkout_directory),
requirement.name
)
if repo.subdirectory:
line = "{0}&subdirectory={1}".format(line, repo.subdirectory)
else:
line = requirement.as_line(**line_kwargs)
click.echo(
"Writing requirement line to temporary file: {0!r}".format(line),
err=True
)
if environments.is_verbose():
click.echo(
"Writing requirement line to temporary file: {0!r}".format(line),
err=True
)
f.write(vistir.misc.to_bytes(line))
r = f.name
f.close()
Expand All @@ -1436,10 +1441,11 @@ def pip_install(
ignore_hashes = True if not requirement.hashes else ignore_hashes
line = requirement.as_line(include_hashes=not ignore_hashes)
line = "{0} {1}".format(line, " ".join(src))
click.echo(
"Writing requirement line to temporary file: {0!r}".format(line),
err=True
)
if environments.is_verbose():
click.echo(
"Writing first requirement line to temporary file: {0!r}".format(line),
err=True
)
f.write(vistir.misc.to_bytes(line))
r = f.name
f.close()
Expand Down Expand Up @@ -1630,7 +1636,6 @@ def system_which(command, mult=False):
return result



def format_help(help):
"""Formats the help string."""
help = help.replace("Options:", str(crayons.normal("Options:", bold=True)))
Expand Down
2 changes: 1 addition & 1 deletion pipenv/patched/piptools/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def clean_requires_python(candidates):
all_candidates = []
py_version = parse_version(os.environ.get('PIP_PYTHON_VERSION', '.'.join(map(str, sys.version_info[:3]))))
for c in candidates:
if c.requires_python:
if getattr(c, "requires_python", None):
# Old specifications had people setting this to single digits
# which is effectively the same as '>=digit,<digit+1'
if c.requires_python.isdigit():
Expand Down
152 changes: 130 additions & 22 deletions pipenv/resolver.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
# -*- coding: utf-8 -*-

from __future__ import absolute_import, print_function
import json
import logging
import os
Expand Down Expand Up @@ -106,9 +109,14 @@ def __init__(self, name, entry_dict, project, resolver, reverse_deps=None, dev=F
self.pipfile = project.parsed_pipfile.get(pipfile_section, {})
self.lockfile = project.lockfile_content.get(section, {})
self.pipfile_dict = self.pipfile.get(self.pipfile_name, {})
self.lockfile_dict = self.lockfile.get(name, entry_dict)
if self.dev and self.name in project.lockfile_content.get("default", {}):
self.lockfile_dict = project.lockfile_content["default"][name]
else:
self.lockfile_dict = self.lockfile.get(name, entry_dict)
self.resolver = resolver
self.reverse_deps = reverse_deps
self._original_markers = None
self._markers = None
self._entry = None
self._lockfile_entry = None
self._pipfile_entry = None
Expand All @@ -133,13 +141,100 @@ def clean_initial_dict(cls, entry_dict):
del entry_dict["name"]
return entry_dict

@classmethod
def parse_pyparsing_exprs(cls, expr_iterable):
from pipenv.vendor.pyparsing import Literal, MatchFirst
keys = []
expr_list = []
expr = expr_iterable.copy()
if isinstance(expr, Literal) or (
expr.__class__.__name__ == Literal.__name__
):
keys.append(expr.match)
elif isinstance(expr, MatchFirst) or (
expr.__class__.__name__ == MatchFirst.__name__
):
expr_list = expr.exprs
elif isinstance(expr, list):
expr_list = expr
if expr_list:
for part in expr_list:
keys.extend(cls.parse_pyparsing_exprs(part))
return keys

@classmethod
def get_markers_from_dict(cls, entry_dict):
from pipenv.vendor.packaging import markers as packaging_markers
from pipenv.vendor.requirementslib.models.markers import normalize_marker_str
marker_keys = cls.parse_pyparsing_exprs(packaging_markers.VARIABLE)
markers = set()
keys_in_dict = [k for k in marker_keys if k in entry_dict]
markers = {
normalize_marker_str("{k} {v}".format(k=k, v=entry_dict.pop(k)))
for k in keys_in_dict
}
if "markers" in entry_dict:
markers.add(normalize_marker_str(entry_dict["markers"]))
if markers:
entry_dict["markers"] = " and ".join(list(markers))
return markers, entry_dict

@property
def markers(self):
self._markers, self.entry_dict = self.get_markers_from_dict(self.entry_dict)
return self._markers

@markers.setter
def markers(self, markers):
if not markers:
pass
marker_str = self.marker_to_str(markers)
self._entry = self.entry.merge_markers(marker_str)
self._markers = self.marker_to_str(self._entry.markers)
entry_dict = self.entry_dict.copy()
entry_dict["markers"] = self.marker_to_str(self._entry.markers)
self.entry_dict = entry_dict

@property
def original_markers(self):
original_markers, lockfile_dict = self.get_markers_from_dict(
self.lockfile_dict
)
self.lockfile_dict = lockfile_dict
self._original_markers = self.marker_to_str(original_markers)
return self._original_markers

@staticmethod
def marker_to_str(marker):
from pipenv.vendor.requirementslib.models.markers import normalize_marker_str
if not marker:
return None
from pipenv.vendor import six
from pipenv.vendor.vistir.compat import Mapping
marker_str = None
if isinstance(marker, Mapping):
marker_dict, _ = Entry.get_markers_from_dict(marker)
if marker_dict:
marker_str = "{0}".format(marker_dict.popitem()[1])
elif isinstance(marker, (list, set, tuple)):
marker_str = " and ".join([normalize_marker_str(m) for m in marker if m])
elif isinstance(marker, six.string_types):
marker_str = "{0}".format(normalize_marker_str(marker))
if isinstance(marker_str, six.string_types):
return marker_str
return None

def get_cleaned_dict(self):
if self.is_updated:
self.validate_constraints()
self.ensure_least_updates_possible()
if self.entry.extras != self.lockfile_entry.extras:
self._entry.req.extras.extend(self.lockfile_entry.req.extras)
self.entry_dict["extras"] = self.entry.extras
if self.original_markers and not self.markers:
original_markers = self.marker_to_str(self.original_markers)
self.markers = original_markers
self.entry_dict["markers"] = self.marker_to_str(original_markers)
entry_hashes = set(self.entry.hashes)
locked_hashes = set(self.lockfile_entry.hashes)
if entry_hashes != locked_hashes and not self.is_updated:
Expand All @@ -154,6 +249,10 @@ def lockfile_entry(self):
self._lockfile_entry = self.make_requirement(self.name, self.lockfile_dict)
return self._lockfile_entry

@lockfile_entry.setter
def lockfile_entry(self, entry):
self._lockfile_entry = entry

@property
def pipfile_entry(self):
if self._pipfile_entry is None:
Expand Down Expand Up @@ -265,6 +364,7 @@ def updated_version(self):

@property
def updated_specifier(self):
# type: () -> str
return self.entry.specifiers

@property
Expand All @@ -279,7 +379,7 @@ def original_version(self):
return None

def validate_specifiers(self):
if self.is_in_pipfile:
if self.is_in_pipfile and not self.pipfile_entry.editable:
return self.pipfile_entry.requirement.specifier.contains(self.updated_version)
return True

Expand Down Expand Up @@ -373,7 +473,7 @@ def get_constraints(self):
if c and c.name == self.entry.name
}
pipfile_constraint = self.get_pipfile_constraint()
if pipfile_constraint:
if pipfile_constraint and not self.pipfile_entry.editable:
constraints.add(pipfile_constraint)
return constraints

Expand Down Expand Up @@ -446,8 +546,11 @@ def validate_constraints(self):
constraint.check_if_exists(False)
except Exception:
from pipenv.exceptions import DependencyConflict
from pipenv.environments import is_verbose
if is_verbose():
print("Tried constraint: {0!r}".format(constraint), file=sys.stderr)
msg = (
"Cannot resolve conflicting version {0}{1} while {1}{2} is "
"Cannot resolve conflicting version {0}{1} while {2}{3} is "
"locked.".format(
self.name, self.updated_specifier, self.old_name, self.old_specifiers
)
Expand Down Expand Up @@ -502,6 +605,7 @@ def __getattribute__(self, key):

def clean_outdated(results, resolver, project, dev=False):
from pipenv.vendor.requirementslib.models.requirements import Requirement
from pipenv.environments import is_verbose
if not project.lockfile_exists:
return results
lockfile = project.lockfile_content
Expand All @@ -520,23 +624,28 @@ def clean_outdated(results, resolver, project, dev=False):
# TODO: Should this be the case for all locking?
if entry.was_editable and not entry.is_editable:
continue
# if the entry has not changed versions since the previous lock,
# don't introduce new markers since that is more restrictive
if entry.has_markers and not entry.had_markers and not entry.is_updated:
del entry.entry_dict["markers"]
entry._entry.req.req.marker = None
entry._entry.markers = ""
# do make sure we retain the original markers for entries that are not changed
elif entry.had_markers and not entry.has_markers and not entry.is_updated:
if entry._entry and entry._entry.req and entry._entry.req.req and (
entry.lockfile_entry and entry.lockfile_entry.req and
entry.lockfile_entry.req.req and entry.lockfile_entry.req.req.marker
):
entry._entry.req.req.marker = entry.lockfile_entry.req.req.marker
if entry.lockfile_entry and entry.lockfile_entry.markers:
entry._entry.markers = entry.lockfile_entry.markers
if entry.lockfile_dict and "markers" in entry.lockfile_dict:
entry.entry_dict["markers"] = entry.lockfile_dict["markers"]
lockfile_entry = lockfile[section].get(name, None)
if not lockfile_entry:
alternate_section = "develop" if not dev else "default"
if name in lockfile[alternate_section]:
lockfile_entry = lockfile[alternate_section][name]
if lockfile_entry and not entry.is_updated:
old_markers = next(iter(m for m in (
entry.lockfile_entry.markers, lockfile_entry.get("markers", None)
) if m is not None), None)
new_markers = entry_dict.get("markers", None)
if old_markers:
old_markers = Entry.marker_to_str(old_markers)
if old_markers and not new_markers:
entry.markers = old_markers
elif new_markers and not old_markers:
del entry.entry_dict["markers"]
entry._entry.req.req.marker = None
entry._entry.markers = ""
# if the entry has not changed versions since the previous lock,
# don't introduce new markers since that is more restrictive
# if entry.has_markers and not entry.had_markers and not entry.is_updated:
# do make sure we retain the original markers for entries that are not changed
entry_dict = entry.get_cleaned_dict()
new_results.append(entry_dict)
return new_results
Expand All @@ -557,7 +666,6 @@ def parse_packages(packages, pre, clear, system, requirements_dir=None):
sys.path.insert(0, req.req.setup_info.base_dir)
req.req._setup_info.get_info()
req.update_name_from_path(req.req.setup_info.base_dir)
print(os.listdir(req.req.setup_info.base_dir))
try:
name, entry = req.pipfile_entry
except Exception:
Expand Down
46 changes: 24 additions & 22 deletions pipenv/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import parse

from . import environments
from .exceptions import PipenvUsageError, ResolutionFailure, RequirementError, PipenvCmdError
from .exceptions import PipenvUsageError, RequirementError, PipenvCmdError
from .pep508checker import lookup
from .vendor.urllib3 import util as urllib3_util

Expand Down Expand Up @@ -375,7 +375,6 @@ def parse_line(
):
# type: (...) -> Tuple[Requirement, Dict[str, str], Dict[str, str]]
from .vendor.requirementslib.models.requirements import Requirement
from .exceptions import ResolutionFailure
if index_lookup is None:
index_lookup = {}
if markers_lookup is None:
Expand Down Expand Up @@ -858,8 +857,8 @@ def resolve(cmd, sp):
_out = decode_output("{0}".format(_out))
out += _out
sp.text = to_native_string("{0}".format(_out[:100]))
if environments.is_verbose():
sp.hide_and_write(_out.rstrip())
if environments.is_verbose():
sp.hide_and_write(_out.rstrip())
if result is None:
break
c.block()
Expand Down Expand Up @@ -1609,27 +1608,28 @@ def translate_markers(pipfile_entry):
raise TypeError("Entry is not a pipfile formatted mapping.")
from .vendor.distlib.markers import DEFAULT_CONTEXT as marker_context
from .vendor.packaging.markers import Marker
from .vendor.requirementslib.models.markers import normalize_marker_str
from .vendor.vistir.misc import dedup

allowed_marker_keys = ["markers"] + [k for k in marker_context.keys()]
provided_keys = list(pipfile_entry.keys()) if hasattr(pipfile_entry, "keys") else []
pipfile_markers = [k for k in provided_keys if k in allowed_marker_keys]
new_pipfile = dict(pipfile_entry).copy()
marker_set = set()
marker_list = []
if "markers" in new_pipfile:
marker = str(Marker(new_pipfile.pop("markers")))
if 'extra' not in marker:
marker_set.add(marker)
marker = new_pipfile.pop("markers")
if marker is not None and marker.strip() and "extra == " not in marker:
marker_list.append(normalize_marker_str(str(Marker(marker))))
for m in pipfile_markers:
entry = "{0}".format(pipfile_entry[m])
if m != "markers":
marker_set.add(str(Marker("{0}{1}".format(m, entry))))
new_pipfile.pop(m)
if marker_set:
new_pipfile["markers"] = str(Marker(" or ".join(
"{0}".format(s) if " and " in s else s
for s in sorted(dedup(marker_set))
))).replace('"', "'")
entry = "{0}".format(pipfile_entry.pop(m, None))
if m != "markers" and entry and entry.strip():
marker_list.append(normalize_marker_str(str(Marker(
"{0} {1}".format(m, entry)
))))
markers_to_add = " and ".join(dedup([m for m in marker_list if m]))
if markers_to_add:
markers_to_add = normalize_marker_str(str(Marker(markers_to_add)))
new_pipfile["markers"] = markers_to_add.replace('"', "'")
return new_pipfile


Expand Down Expand Up @@ -1669,13 +1669,15 @@ def clean_resolved_dep(dep, is_top_level=False, pipfile_entry=None):

# If a package is **PRESENT** in the pipfile but has no markers, make sure we
# **NEVER** include markers in the lockfile
if "markers" in dep:
if "markers" in dep and dep.get("markers", "").strip():
# First, handle the case where there is no top level dependency in the pipfile
if not is_top_level:
try:
lockfile["markers"] = translate_markers(dep)["markers"]
except TypeError:
pass
translated = translate_markers(dep).get("markers", "").strip()
if translated:
try:
lockfile["markers"] = translated
except TypeError:
pass
# otherwise make sure we are prioritizing whatever the pipfile says about the markers
# If the pipfile says nothing, then we should put nothing in the lockfile
else:
Expand Down
Loading