diff --git a/README.md b/README.md
index 7980f016c3..9b307f0f7d 100644
--- a/README.md
+++ b/README.md
@@ -78,10 +78,6 @@ For most users, we recommend installing Pipenv using `pip`:
pip install --user pipenv
-Or, if you\'re using Fedora:
-
- sudo dnf install pipenv
-
Or, if you\'re using FreeBSD:
pkg install py39-pipenv
diff --git a/docs/conf.py b/docs/conf.py
index 70f2b6fbde..aef08afa82 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -17,6 +17,11 @@
#
import os
+import pipenv.vendor.click
+
+# Hackery to get the CLI docs to generate
+from pipenv.vendor import click
+
# Path hackery to get current version number.
here = os.path.abspath(os.path.dirname(__file__))
@@ -24,11 +29,6 @@
with open(os.path.join(here, "..", "pipenv", "__version__.py")) as f:
exec(f.read(), about)
-# Hackery to get the CLI docs to generate
-import click
-
-import pipenv.vendor.click
-
click.Command = pipenv.vendor.click.Command
click.Group = pipenv.vendor.click.Group
click.BaseCommand = pipenv.vendor.click.BaseCommand
@@ -80,7 +80,10 @@
# General information about the project.
project = "pipenv"
-copyright = '2020. A project founded by Kenneth Reitz and maintained by Python Packaging Authority (PyPA).'
+copyright = (
+ "2020. A project founded by Kenneth Reitz and maintained by "
+ 'Python Packaging Authority (PyPA).'
+)
author = "Python Packaging Authority"
# The version info for the project you're documenting, acts as replacement for
diff --git a/news/6276.bugfix.rst b/news/6276.bugfix.rst
new file mode 100644
index 0000000000..691e5b730d
--- /dev/null
+++ b/news/6276.bugfix.rst
@@ -0,0 +1,13 @@
+Features & Bug Fixes
+-------------------
+- Refactored and simplified install routines, improving maintainability and reliability (#6276)
+ - Split install logic into smaller, focused functions.
+ - Eliminated Pipfile caching for now to prevent bugs and reduce complexity.
+ - Fixed edge cases with package category selection.
+ - Improved handling of VCS dependencies during updates, fixing when ref is a revision and not a branch.
+
+- Enhanced VCS URL handling with better environment variable support (#6276)
+ - More reliable expansion of environment variables in Git URLs.
+ - Better handling of authentication components in VCS URLs.
+ - Improved error messaging for missing environment variables.
+ - Fixed issue where Git reference could be dropped during relock.
diff --git a/pipenv/environment.py b/pipenv/environment.py
index a93817303d..58fbf1506d 100644
--- a/pipenv/environment.py
+++ b/pipenv/environment.py
@@ -101,9 +101,12 @@ def safe_import(self, name: str) -> ModuleType:
def python_version(self) -> str | None:
with self.activated() as active:
if active:
- sysconfig = self.safe_import("sysconfig")
- py_version = sysconfig.get_python_version()
- return py_version
+ from pipenv.patched.pip._vendor.packaging.version import Version
+
+ # Extract version parts
+ version_str = f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
+ python_version = Version(version_str) # Create PEP 440 compliant version
+ return str(python_version) # Return the string representation
else:
return None
diff --git a/pipenv/project.py b/pipenv/project.py
index ec692fa6d0..d3d9c2b5e7 100644
--- a/pipenv/project.py
+++ b/pipenv/project.py
@@ -134,11 +134,6 @@ def preferred_newlines(f):
return DEFAULT_NEWLINES
-# (path, file contents) => TOMLFile
-# keeps track of pipfiles that we've seen so we do not need to re-parse 'em
-_pipfile_cache = {}
-
-
class SourceNotFound(KeyError):
pass
@@ -670,16 +665,9 @@ def requirements_location(self) -> str | None:
@property
def parsed_pipfile(self) -> tomlkit.toml_document.TOMLDocument | TPipfile:
- """Parse Pipfile into a TOMLFile and cache it
-
- (call clear_pipfile_cache() afterwards if mutating)"""
+ """Parse Pipfile into a TOMLFile"""
contents = self.read_pipfile()
- # use full contents to get around str/bytes 2/3 issues
- cache_key = (self.pipfile_location, contents)
- if cache_key not in _pipfile_cache:
- parsed = self._parse_pipfile(contents)
- _pipfile_cache[cache_key] = parsed
- return _pipfile_cache[cache_key]
+ return self._parse_pipfile(contents)
def read_pipfile(self) -> str:
# Open the pipfile, read it into memory.
@@ -691,10 +679,6 @@ def read_pipfile(self) -> str:
return contents
- def clear_pipfile_cache(self) -> None:
- """Clear pipfile cache (e.g., so we can mutate parsed pipfile)"""
- _pipfile_cache.clear()
-
def _parse_pipfile(
self, contents: str
) -> tomlkit.toml_document.TOMLDocument | TPipfile:
@@ -991,8 +975,6 @@ def write_toml(self, data, path=None):
formatted_data = cleanup_toml(formatted_data)
with open(path, "w", newline=newlines) as f:
f.write(formatted_data)
- # pipfile is mutated!
- self.clear_pipfile_cache()
def write_lockfile(self, content):
"""Write out the lockfile."""
@@ -1088,7 +1070,6 @@ def find_source(sources, name=None, url=None):
sources = (self.sources, self.pipfile_sources())
if refresh:
- self.clear_pipfile_cache()
sources = reversed(sources)
found = next(
iter(find_source(source, name=name, url=url) for source in sources), None
@@ -1099,15 +1080,16 @@ def find_source(sources, name=None, url=None):
return found
def get_package_name_in_pipfile(self, package_name, category):
- """Get the equivalent package name in pipfile"""
- section = self.parsed_pipfile.get(category)
- if section is None:
- section = {}
- package_name = pep423_name(package_name)
+ section = self.parsed_pipfile.get(category, {})
+ normalized_name = pep423_name(package_name)
for name in section:
- if pep423_name(name) == package_name:
+ if pep423_name(name) == normalized_name:
return name
- return None
+ return package_name # Return original name if not found
+
+ def get_pipfile_entry(self, package_name, category):
+ name = self.get_package_name_in_pipfile(package_name, category)
+ return self.parsed_pipfile.get(category, {}).get(name)
def _sort_category(self, category) -> Table:
# copy table or create table from dict-like object
@@ -1244,26 +1226,28 @@ def add_pipfile_entry_to_pipfile(self, name, normalized_name, entry, category=No
newly_added = False
# Read and append Pipfile.
- p = self.parsed_pipfile
+ parsed_pipfile = self.parsed_pipfile
# Set empty group if it doesn't exist yet.
- if category not in p:
- p[category] = {}
+ if category not in parsed_pipfile:
+ parsed_pipfile[category] = {}
- if name and name != normalized_name:
- self.remove_package_from_pipfile(name, category=category)
+ section = parsed_pipfile.get(category, {})
+ for entry_name in section.copy().keys():
+ if entry_name.lower() == normalized_name.lower():
+ del parsed_pipfile[category][entry_name]
# Add the package to the group.
- if normalized_name not in p[category]:
+ if normalized_name not in parsed_pipfile[category]:
newly_added = True
- p[category][normalized_name] = entry
+ parsed_pipfile[category][normalized_name] = entry
if self.settings.get("sort_pipfile"):
- p[category] = self._sort_category(p[category])
+ parsed_pipfile[category] = self._sort_category(parsed_pipfile[category])
# Write Pipfile.
- self.write_toml(p)
+ self.write_toml(parsed_pipfile)
return newly_added, category, normalized_name
def src_name_from_url(self, index_url):
diff --git a/pipenv/resolver.py b/pipenv/resolver.py
index 02bf248d87..d7cd22a319 100644
--- a/pipenv/resolver.py
+++ b/pipenv/resolver.py
@@ -139,7 +139,6 @@ def make_requirement(name=None, entry=None):
def clean_initial_dict(cls, entry_dict):
from pipenv.patched.pip._vendor.packaging.requirements import Requirement
- entry_dict.get("version", "")
version = entry_dict.get("version", "")
if isinstance(version, Requirement):
version = str(version.specifier)
@@ -250,6 +249,8 @@ def marker_to_str(marker):
@cached_property
def get_cleaned_dict(self):
+ from pipenv.utils.constants import VCS_LIST
+
self.validate_constraints()
if self.entry.extras != self.lockfile_entry.extras:
entry_extras = list(self.entry.extras)
@@ -268,6 +269,12 @@ def get_cleaned_dict(self):
_, self.entry_dict = self.get_markers_from_dict(self.entry_dict)
if self.resolver.index_lookup.get(self.name):
self.entry_dict["index"] = self.resolver.index_lookup[self.name]
+
+ # Handle VCS entries
+ for key in VCS_LIST:
+ if key in self.lockfile_dict:
+ self.entry_dict[key] = self.lockfile_dict[key]
+ self.entry_dict.pop("version", None)
return self.entry_dict
@property
@@ -290,9 +297,7 @@ def pipfile_entry(self):
@property
def entry(self):
- if self._entry is None:
- self._entry = self.make_requirement(self.name, self.entry_dict)
- return self._entry
+ return self.make_requirement(self.name, self.lockfile_dict)
@property
def normalized_name(self):
@@ -548,18 +553,13 @@ def __getattribute__(self, key):
def clean_results(results, resolver, project, category):
from pipenv.utils.dependencies import (
- get_lockfile_section_using_pipfile_category,
translate_markers,
)
if not project.lockfile_exists:
return results
- lockfile = project.lockfile_content
- lockfile_section = get_lockfile_section_using_pipfile_category(category)
reverse_deps = project.environment.reverse_dependencies()
- new_results = [
- r for r in results if r["name"] not in lockfile.get(lockfile_section, {})
- ]
+ new_results = []
for result in results:
name = result.get("name")
entry_dict = result.copy()
diff --git a/pipenv/routines/check.py b/pipenv/routines/check.py
index 4f2ab27fc1..c6da10b1ef 100644
--- a/pipenv/routines/check.py
+++ b/pipenv/routines/check.py
@@ -1,19 +1,21 @@
import io
-import json as simplejson
+import json
import logging
import os
import sys
import tempfile
+from contextlib import redirect_stderr, redirect_stdout
from pathlib import Path
-from pipenv import exceptions, pep508checker
+from pipenv import pep508checker
+from pipenv.patched.safety.cli import cli
from pipenv.utils.processes import run_command
from pipenv.utils.project import ensure_project
-from pipenv.utils.shell import cmd_list_to_shell, project_python
+from pipenv.utils.shell import project_python
from pipenv.vendor import click, plette
-def build_options(
+def build_safety_options(
audit_and_monitor=True,
exit_code=True,
output="screen",
@@ -47,6 +49,133 @@ def build_options(
return options
+def run_pep508_check(project, system, python):
+ pep508checker_path = pep508checker.__file__.rstrip("cdo")
+ cmd = [project_python(project, system=system), Path(pep508checker_path).as_posix()]
+ c = run_command(cmd, is_verbose=project.s.is_verbose())
+
+ if c.returncode is not None:
+ try:
+ return json.loads(c.stdout.strip())
+ except json.JSONDecodeError:
+ click.echo(
+ f"Failed parsing pep508 results:\n{c.stdout.strip()}\n{c.stderr.strip()}",
+ err=True,
+ )
+ sys.exit(1)
+ return {}
+
+
+def check_pep508_requirements(project, results, quiet):
+ p = plette.Pipfile.load(open(project.pipfile_location))
+ p = plette.Lockfile.with_meta_from(p)
+ failed = False
+
+ for marker, specifier in p._data["_meta"]["requires"].items():
+ if marker in results:
+ if results[marker] != specifier:
+ failed = True
+ click.echo(
+ "Specifier {} does not match {} ({})."
+ "".format(
+ click.style(marker, fg="green"),
+ click.style(specifier, fg="cyan"),
+ click.style(results[marker], fg="yellow"),
+ ),
+ err=True,
+ )
+
+ if failed:
+ click.secho("Failed!", fg="red", err=True)
+ sys.exit(1)
+ elif not quiet and not project.s.is_quiet():
+ click.secho("Passed!", fg="green")
+
+
+def get_requirements(project, use_installed, categories):
+ _cmd = [project_python(project, system=False)]
+ if use_installed:
+ return run_command(
+ _cmd + ["-m", "pip", "list", "--format=freeze"],
+ is_verbose=project.s.is_verbose(),
+ )
+ elif categories:
+ return run_command(
+ ["pipenv", "requirements", "--categories", categories],
+ is_verbose=project.s.is_verbose(),
+ )
+ else:
+ return run_command(["pipenv", "requirements"], is_verbose=project.s.is_verbose())
+
+
+def create_temp_requirements(project, requirements):
+ temp_requirements = tempfile.NamedTemporaryFile(
+ mode="w+",
+ prefix=f"{project.virtualenv_name}",
+ suffix="_requirements.txt",
+ delete=False,
+ )
+ temp_requirements.write(requirements.stdout.strip())
+ temp_requirements.close()
+ return temp_requirements
+
+
+def run_safety_check(cmd, quiet):
+ sys.argv = cmd[1:]
+
+ if quiet:
+ out = io.StringIO()
+ err = io.StringIO()
+ exit_code = 0
+ with redirect_stdout(out), redirect_stderr(err):
+ try:
+ cli(prog_name="pipenv")
+ except SystemExit as exit_signal:
+ exit_code = exit_signal.code
+ return out.getvalue(), err.getvalue(), exit_code
+ else:
+ cli(prog_name="pipenv")
+
+
+def parse_safety_output(output, quiet):
+ try:
+ json_report = json.loads(output)
+ meta = json_report.get("report_meta", {})
+ vulnerabilities_found = meta.get("vulnerabilities_found", 0)
+ db_type = "commercial" if meta.get("api_key", False) else "free"
+
+ if quiet:
+ click.secho(
+ f"{vulnerabilities_found} vulnerabilities found.",
+ fg="red" if vulnerabilities_found else "green",
+ )
+ else:
+ fg = "red" if vulnerabilities_found else "green"
+ message = f"Scan complete using Safety's {db_type} vulnerability database."
+ click.echo()
+ click.secho(f"{vulnerabilities_found} vulnerabilities found.", fg=fg)
+ click.echo()
+
+ for vuln in json_report.get("vulnerabilities", []):
+ click.echo(
+ "{}: {} {} open to vulnerability {} ({}). More info: {}".format(
+ click.style(vuln["vulnerability_id"], bold=True, fg="red"),
+ click.style(vuln["package_name"], fg="green"),
+ click.style(vuln["analyzed_version"], fg="yellow", bold=True),
+ click.style(vuln["vulnerability_id"], bold=True),
+ click.style(vuln["vulnerable_spec"], fg="yellow", bold=False),
+ click.style(vuln["more_info_url"], bold=True),
+ )
+ )
+ click.echo(f"{vuln['advisory']}")
+ click.echo()
+
+ click.secho(message, fg="white", bold=True)
+
+ except json.JSONDecodeError:
+ click.echo("Failed to parse Safety output.")
+
+
def do_check(
project,
python=False,
@@ -66,13 +195,10 @@ def do_check(
use_installed=False,
categories="",
):
- import json
-
if not verbose:
- logging.getLogger("pipenv").setLevel(logging.WARN)
+ logging.getLogger("pipenv").setLevel(logging.ERROR if quiet else logging.WARN)
if not system:
- # Ensure that virtualenv is available.
ensure_project(
project,
python=python,
@@ -80,62 +206,16 @@ def do_check(
warn=False,
pypi_mirror=pypi_mirror,
)
+
if not quiet and not project.s.is_quiet():
click.secho("Checking PEP 508 requirements...", bold=True)
- pep508checker_path = pep508checker.__file__.rstrip("cdo")
- safety_path = os.path.join(
- os.path.dirname(os.path.abspath(__file__)), "patched", "safety"
- )
- _cmd = [project_python(project, system=system)]
- # Run the PEP 508 checker in the virtualenv.
- cmd = _cmd + [Path(pep508checker_path).as_posix()]
- c = run_command(cmd, is_verbose=project.s.is_verbose())
- results = []
- if c.returncode is not None:
- try:
- results = simplejson.loads(c.stdout.strip())
- except json.JSONDecodeError:
- click.echo(
- "{}\n{}\n{}".format(
- click.style(
- "Failed parsing pep508 results: ",
- fg="white",
- bold=True,
- ),
- c.stdout.strip(),
- c.stderr.strip(),
- )
- )
- sys.exit(1)
- # Load the pipfile.
- p = plette.Pipfile.load(open(project.pipfile_location))
- p = plette.Lockfile.with_meta_from(p)
- failed = False
- # Assert each specified requirement.
- for marker, specifier in p._data["_meta"]["requires"].items():
- if marker in results:
- try:
- assert results[marker] == specifier
- except AssertionError:
- failed = True
- click.echo(
- "Specifier {} does not match {} ({})."
- "".format(
- click.style(marker, fg="green"),
- click.style(specifier, fg="cyan"),
- click.style(results[marker], fg="yellow"),
- ),
- err=True,
- )
- if failed:
- click.secho("Failed!", fg="red", err=True)
- sys.exit(1)
- else:
- if not quiet and not project.s.is_quiet():
- click.secho("Passed!", fg="green")
- # Check for lockfile exists
+
+ results = run_pep508_check(project, system, python)
+ check_pep508_requirements(project, results, quiet)
+
if not project.lockfile_exists:
return
+
if not quiet and not project.s.is_quiet():
if use_installed:
click.secho(
@@ -147,46 +227,22 @@ def do_check(
"Checking Pipfile.lock packages for vulnerabilities...",
bold=True,
)
+
if ignore:
- if not isinstance(ignore, (tuple, list)):
- ignore = [ignore]
- ignored = [["--ignore", cve] for cve in ignore]
+ ignore = [ignore] if not isinstance(ignore, (tuple, list)) else ignore
if not quiet and not project.s.is_quiet():
click.echo(
"Notice: Ignoring Vulnerabilit{} {}".format(
- "ies" if len(ignored) > 1 else "y",
+ "ies" if len(ignore) > 1 else "y",
click.style(", ".join(ignore), fg="yellow"),
),
err=True,
)
- else:
- ignored = []
-
- if use_installed:
- target_venv_packages = run_command(
- _cmd + ["-m", "pip", "list", "--format=freeze"],
- is_verbose=project.s.is_verbose(),
- )
- elif categories:
- target_venv_packages = run_command(
- ["pipenv", "requirements", "--categories", categories],
- is_verbose=project.s.is_verbose(),
- )
- else:
- target_venv_packages = run_command(
- ["pipenv", "requirements"], is_verbose=project.s.is_verbose()
- )
- temp_requirements = tempfile.NamedTemporaryFile(
- mode="w+",
- prefix=f"{project.virtualenv_name}",
- suffix="_requirements.txt",
- delete=False,
- )
- temp_requirements.write(target_venv_packages.stdout.strip())
- temp_requirements.close()
+ requirements = get_requirements(project, use_installed, categories)
+ temp_requirements = create_temp_requirements(project, requirements)
- options = build_options(
+ options = build_safety_options(
audit_and_monitor=audit_and_monitor,
exit_code=exit_code,
output=output,
@@ -196,14 +252,17 @@ def do_check(
temp_requirements_name=temp_requirements.name,
)
- cmd = _cmd + [safety_path, "check"] + options
+ safety_path = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), "patched", "safety"
+ )
+ cmd = [project_python(project, system=system), safety_path, "check"] + options
if db:
if not quiet and not project.s.is_quiet():
click.echo(f"Using {db} database")
cmd.append(f"--db={db}")
elif key or project.s.PIPENV_PYUP_API_KEY:
- cmd = cmd + [f"--key={key or project.s.PIPENV_PYUP_API_KEY}"]
+ cmd.append(f"--key={key or project.s.PIPENV_PYUP_API_KEY}")
else:
PIPENV_SAFETY_DB = (
"https://d2qjmgddvqvu75.cloudfront.net/aws/safety/pipenv/1.0.0/"
@@ -211,74 +270,21 @@ def do_check(
os.environ["SAFETY_ANNOUNCEMENTS_URL"] = f"{PIPENV_SAFETY_DB}announcements.json"
cmd.append(f"--db={PIPENV_SAFETY_DB}")
- if ignored:
- for cve in ignored:
- cmd += cve
+ if ignore:
+ for cve in ignore:
+ cmd.extend(["--ignore", cve])
os.environ["SAFETY_CUSTOM_INTEGRATION"] = "True"
os.environ["SAFETY_SOURCE"] = "pipenv"
os.environ["SAFETY_PURE_YAML"] = "True"
- from pipenv.patched.safety.cli import cli
+ output, error, exit_code = run_safety_check(cmd, quiet)
- sys.argv = cmd[1:]
-
- if output == "minimal":
- from contextlib import redirect_stderr, redirect_stdout
-
- code = 0
-
- with redirect_stdout(io.StringIO()) as out, redirect_stderr(io.StringIO()) as err:
- try:
- cli(prog_name="pipenv")
- except SystemExit as exit_signal:
- code = exit_signal.code
-
- report = out.getvalue()
- error = err.getvalue()
-
- try:
- json_report = simplejson.loads(report)
- except Exception:
- raise exceptions.PipenvCmdError(
- cmd_list_to_shell(cmd), report, error, exit_code=code
- )
- meta = json_report.get("report_meta")
- vulnerabilities_found = meta.get("vulnerabilities_found")
-
- fg = "green"
- message = "All good!"
- db_type = "commercial" if meta.get("api_key", False) else "free"
-
- if vulnerabilities_found >= 0:
- fg = "red"
- message = (
- f"Scan was complete using Safety’s {db_type} vulnerability database."
- )
-
- click.echo()
- click.secho(f"{vulnerabilities_found} vulnerabilities found.", fg=fg)
- click.echo()
-
- vulnerabilities = json_report.get("vulnerabilities", [])
-
- for vuln in vulnerabilities:
- click.echo(
- "{}: {} {} open to vulnerability {} ({}). More info: {}".format(
- click.style(vuln["vulnerability_id"], bold=True, fg="red"),
- click.style(vuln["package_name"], fg="green"),
- click.style(vuln["analyzed_version"], fg="yellow", bold=True),
- click.style(vuln["vulnerability_id"], bold=True),
- click.style(vuln["vulnerable_spec"], fg="yellow", bold=False),
- click.style(vuln["more_info_url"], bold=True),
- )
- )
- click.echo(f"{vuln['advisory']}")
- click.echo()
-
- click.secho(message, fg="white", bold=True)
- sys.exit(code)
-
- cli(prog_name="pipenv")
+ if quiet:
+ parse_safety_output(output, quiet)
+ else:
+ sys.stdout.write(output)
+ sys.stderr.write(error)
- temp_requirements.remove()
+ temp_requirements.unlink()
+ sys.exit(exit_code)
diff --git a/pipenv/routines/install.py b/pipenv/routines/install.py
index 6f6281be5d..5d1e49195d 100644
--- a/pipenv/routines/install.py
+++ b/pipenv/routines/install.py
@@ -24,122 +24,48 @@
from pipenv.utils.project import ensure_project
from pipenv.utils.requirements import add_index_to_pipfile, import_requirements
from pipenv.utils.shell import temp_environ
-from pipenv.utils.virtualenv import cleanup_virtualenv, do_create_virtualenv
-def do_install(
+def handle_new_packages(
project,
- packages=False,
- editable_packages=False,
- index=False,
- dev=False,
- python=False,
- pypi_mirror=None,
- system=False,
- ignore_pipfile=False,
- requirementstxt=False,
- pre=False,
- deploy=False,
- site_packages=None,
- extra_pip_args=None,
- categories=None,
- skip_lock=False,
+ packages,
+ editable_packages,
+ dev,
+ pre,
+ system,
+ pypi_mirror,
+ extra_pip_args,
+ categories,
+ skip_lock,
+ index=None,
):
- requirements_directory = fileutils.create_tracked_tempdir(
- suffix="-requirements", prefix="pipenv-"
- )
- warnings.filterwarnings("ignore", category=ResourceWarning)
- packages = packages if packages else []
- editable_packages = editable_packages if editable_packages else []
- package_args = [p for p in packages if p] + [p for p in editable_packages if p]
- skip_requirements = False
- # Don't search for requirements.txt files if the user provides one
- if requirementstxt or package_args or project.pipfile_exists:
- skip_requirements = True
- # Ensure that virtualenv is available and pipfile are available
- ensure_project(
- project,
- python=python,
- system=system,
- warn=True,
- deploy=deploy,
- skip_requirements=skip_requirements,
- pypi_mirror=pypi_mirror,
- site_packages=site_packages,
- categories=categories,
- )
-
- do_install_validations(
- project,
- package_args,
- requirements_directory,
- dev=dev,
- system=system,
- ignore_pipfile=ignore_pipfile,
- requirementstxt=requirementstxt,
- pre=pre,
- deploy=deploy,
- categories=categories,
- skip_lock=skip_lock,
- )
-
- # Install all dependencies, if none was provided.
- # This basically ensures that we have a pipfile and lockfile, then it locks and
- # installs from the lockfile
new_packages = []
- if not packages and not editable_packages:
- if pre:
- project.update_settings({"allow_prereleases": pre})
- do_init(
- project,
- allow_global=system,
- ignore_pipfile=ignore_pipfile,
- system=system,
- deploy=deploy,
- pre=pre,
- requirements_dir=requirements_directory,
- pypi_mirror=pypi_mirror,
- extra_pip_args=extra_pip_args,
- categories=categories,
- skip_lock=skip_lock,
- )
+ if packages or editable_packages:
+ from pipenv.routines.update import do_update
- # This is for if the user passed in dependencies; handle with the update routine
- else:
pkg_list = packages + [f"-e {pkg}" for pkg in editable_packages]
- if not system and not project.virtualenv_exists:
- do_init(
- project,
- system=system,
- allow_global=system,
- requirements_dir=requirements_directory,
- deploy=deploy,
- pypi_mirror=pypi_mirror,
- extra_pip_args=extra_pip_args,
- categories=categories,
- skip_lock=skip_lock,
- packages=packages,
- editable_packages=editable_packages,
- )
for pkg_line in pkg_list:
- console.print(
- f"Installing {pkg_line}...",
- style="bold green",
- )
- # pip install:
- with temp_environ(), console.status(
- "Installing...", spinner=project.s.PIPENV_SPINNER
- ) as st:
+ console.print(f"Installing {pkg_line}...", style="bold green")
+ with temp_environ():
if not system:
os.environ["PIP_USER"] = "0"
if "PYTHONHOME" in os.environ:
del os.environ["PYTHONHOME"]
- st.console.print(f"Resolving {pkg_line}...", markup=False)
+
try:
pkg_requirement, _ = expansive_install_req_from_line(
pkg_line, expand_env=True
)
+ if index:
+ source = project.get_index_by_name(index)
+ default_index = project.get_default_index()["name"]
+ if not source:
+ index_name = add_index_to_pipfile(project, index)
+ if index_name != default_index:
+ pkg_requirement.index = index_name
+ elif source["name"] != default_index:
+ pkg_requirement.index = source["name"]
except ValueError as e:
err.print(f"[red]WARNING[/red]: {e}")
err.print(
@@ -148,25 +74,7 @@ def do_install(
)
)
sys.exit(1)
- st.update(f"Installing {pkg_requirement.name}...")
- if categories:
- pipfile_sections = ""
- for c in categories:
- pipfile_sections += f"[{c}]"
- elif dev:
- pipfile_sections = "[dev-packages]"
- else:
- pipfile_sections = "[packages]"
- # Add the package to the Pipfile.
- if index:
- source = project.get_index_by_name(index)
- default_index = project.get_default_index()["name"]
- if not source:
- index_name = add_index_to_pipfile(project, index)
- if index_name != default_index:
- pkg_requirement.index = index_name
- elif source["name"] != default_index:
- pkg_requirement.index = source["name"]
+
try:
if categories:
for category in categories:
@@ -175,20 +83,12 @@ def do_install(
)
if added:
new_packages.append((normalized_name, cat))
- st.console.print(
- f"[bold]Added [green]{normalized_name}[/green][/bold] to Pipfile's "
- f"[yellow]\\{pipfile_sections}[/yellow] ..."
- )
else:
added, cat, normalized_name = project.add_package_to_pipfile(
pkg_requirement, pkg_line, dev
)
if added:
new_packages.append((normalized_name, cat))
- st.console.print(
- f"[bold]Added [green]{normalized_name}[/green][/bold] to Pipfile's "
- f"[yellow]\\{pipfile_sections}[/yellow] ..."
- )
except ValueError:
import traceback
@@ -198,25 +98,210 @@ def do_install(
"Failed adding package to Pipfile"
)
)
- # ok has a nice v in front, should do something similar with rich
- st.console.print(
+
+ console.print(
environments.PIPENV_SPINNER_OK_TEXT.format("Installation Succeeded")
)
- # Update project settings with pre-release preference.
- if pre:
- project.update_settings({"allow_prereleases": pre})
- try:
- do_init(
+
+ # Update project settings with pre-release preference.
+ if pre:
+ project.update_settings({"allow_prereleases": pre})
+
+ # Use the update routine for new packages
+ if not skip_lock:
+ try:
+ do_update(
+ project,
+ dev=dev,
+ pre=pre,
+ packages=packages,
+ editable_packages=editable_packages,
+ pypi_mirror=pypi_mirror,
+ index_url=index,
+ extra_pip_args=extra_pip_args,
+ categories=categories,
+ )
+ except Exception:
+ for pkg_name, category in new_packages:
+ project.remove_package_from_pipfile(pkg_name, category)
+ raise
+
+ return new_packages
+
+
+def handle_lockfile(
+ project,
+ packages,
+ ignore_pipfile,
+ skip_lock,
+ system,
+ allow_global,
+ deploy,
+ pre,
+ pypi_mirror,
+ categories,
+):
+ if (project.lockfile_exists and not ignore_pipfile) and not skip_lock:
+ old_hash = project.get_lockfile_hash()
+ new_hash = project.calculate_pipfile_hash()
+ if new_hash != old_hash:
+ handle_outdated_lockfile(
+ project,
+ packages,
+ old_hash=old_hash,
+ new_hash=new_hash,
+ system=system,
+ allow_global=allow_global,
+ deploy=deploy,
+ pre=pre,
+ pypi_mirror=pypi_mirror,
+ categories=categories,
+ )
+ elif not project.lockfile_exists and not skip_lock:
+ handle_missing_lockfile(
+ project, system, allow_global, pre, pypi_mirror, categories
+ )
+
+
+def handle_outdated_lockfile(
+ project,
+ packages,
+ old_hash,
+ new_hash,
+ system,
+ allow_global,
+ deploy,
+ pre,
+ pypi_mirror,
+ categories,
+):
+ from pipenv.routines.update import do_update
+
+ if deploy:
+ console.print(
+ f"Your Pipfile.lock ({old_hash}) is out of date. Expected: ({new_hash}).",
+ style="red",
+ )
+ raise exceptions.DeployException
+ if (system or allow_global) and not (project.s.PIPENV_VIRTUALENV):
+ err.print(
+ f"Pipfile.lock ({old_hash}) out of date, but installation uses --system so"
+ f" re-building lockfile must happen in isolation."
+ f" Please rebuild lockfile in a virtualenv. Continuing anyway...",
+ style="yellow",
+ )
+ else:
+ if old_hash:
+ msg = (
+ "Pipfile.lock ({0}) out of date: run `pipenv lock` to update to ({1})..."
+ )
+ else:
+ msg = "Pipfile.lock is corrupt, replaced with ({1})..."
+ err.print(
+ msg.format(old_hash, new_hash),
+ style="bold yellow",
+ )
+ do_update(
project,
+ packages=packages,
+ pre=pre,
system=system,
- allow_global=system,
- requirements_dir=requirements_directory,
- deploy=deploy,
pypi_mirror=pypi_mirror,
- extra_pip_args=extra_pip_args,
categories=categories,
- skip_lock=skip_lock,
)
+
+
+def handle_missing_lockfile(project, system, allow_global, pre, pypi_mirror, categories):
+ if (system or allow_global) and not project.s.PIPENV_VIRTUALENV:
+ raise exceptions.PipenvOptionsError(
+ "--system",
+ "--system is intended to be used for Pipfile installation, "
+ "not installation of specific packages. Aborting.\n"
+ "See also: --deploy flag.",
+ )
+ else:
+ err.print(
+ "Pipfile.lock not found, creating...",
+ style="bold",
+ )
+ do_lock(
+ project,
+ system=system,
+ pre=pre,
+ write=True,
+ pypi_mirror=pypi_mirror,
+ categories=categories,
+ )
+
+
+def do_install(
+ project,
+ packages=False,
+ editable_packages=False,
+ index=False,
+ dev=False,
+ python=False,
+ pypi_mirror=None,
+ system=False,
+ ignore_pipfile=False,
+ requirementstxt=False,
+ pre=False,
+ deploy=False,
+ site_packages=None,
+ extra_pip_args=None,
+ categories=None,
+ skip_lock=False,
+):
+ requirements_directory = fileutils.create_tracked_tempdir(
+ suffix="-requirements", prefix="pipenv-"
+ )
+ warnings.filterwarnings("ignore", category=ResourceWarning)
+ packages = packages if packages else []
+ editable_packages = editable_packages if editable_packages else []
+ package_args = [p for p in packages if p] + [p for p in editable_packages if p]
+
+ do_init(
+ project,
+ package_args,
+ system=system,
+ allow_global=system,
+ deploy=deploy,
+ pypi_mirror=pypi_mirror,
+ categories=categories,
+ skip_lock=skip_lock,
+ site_packages=site_packages,
+ python=python,
+ )
+
+ do_install_validations(
+ project,
+ package_args,
+ requirements_directory,
+ dev=dev,
+ system=system,
+ ignore_pipfile=ignore_pipfile,
+ requirementstxt=requirementstxt,
+ pre=pre,
+ deploy=deploy,
+ categories=categories,
+ skip_lock=skip_lock,
+ )
+
+ new_packages = handle_new_packages(
+ project,
+ packages,
+ editable_packages,
+ dev=dev,
+ pre=pre,
+ system=system,
+ pypi_mirror=pypi_mirror,
+ extra_pip_args=extra_pip_args,
+ categories=categories,
+ skip_lock=skip_lock,
+ index=index,
+ )
+
+ try:
do_install_dependencies(
project,
dev=dev,
@@ -232,6 +317,7 @@ def do_install(
for pkg_name, category in new_packages:
project.remove_package_from_pipfile(pkg_name, category)
raise e
+
sys.exit(0)
@@ -355,14 +441,14 @@ def do_install_dependencies(
else:
categories = ["packages"]
- for category in categories:
+ for pipfile_category in categories:
lockfile = None
pipfile = None
if skip_lock:
ignore_hashes = True
if not bare:
console.print("Installing dependencies from Pipfile...", style="bold")
- pipfile = project.get_pipfile_section(category)
+ pipfile = project.get_pipfile_section(pipfile_category)
else:
lockfile = project.get_or_create_lockfile(categories=categories)
if not bare:
@@ -385,8 +471,13 @@ def do_install_dependencies(
)
)
else:
+ lockfile_category = get_lockfile_section_using_pipfile_category(
+ pipfile_category
+ )
deps_list = list(
- lockfile.get_requirements(dev=dev, only=False, categories=[category])
+ lockfile.get_requirements(
+ dev=dev, only=False, categories=[lockfile_category]
+ )
)
editable_or_vcs_deps = [
(dep, pip_line) for dep, pip_line in deps_list if (dep.link and dep.editable)
@@ -408,7 +499,9 @@ def do_install_dependencies(
if skip_lock:
lockfile_section = pipfile
else:
- lockfile_category = get_lockfile_section_using_pipfile_category(category)
+ lockfile_category = get_lockfile_section_using_pipfile_category(
+ pipfile_category
+ )
lockfile_section = lockfile[lockfile_category]
batch_install(
project,
@@ -558,104 +651,46 @@ def _cleanup_procs(project, procs):
def do_init(
project,
+ packages=None,
allow_global=False,
ignore_pipfile=False,
system=False,
deploy=False,
pre=False,
- requirements_dir=None,
pypi_mirror=None,
- extra_pip_args=None,
categories=None,
skip_lock=False,
- packages=None,
- editable_packages=None,
+ site_packages=None,
+ python=None,
):
- from pipenv.routines.update import do_update
-
- python = None
- if project.s.PIPENV_PYTHON is not None:
- python = project.s.PIPENV_PYTHON
- elif project.s.PIPENV_DEFAULT_PYTHON_VERSION is not None:
- python = project.s.PIPENV_DEFAULT_PYTHON_VERSION
- if categories is None:
- categories = []
+ ensure_project(
+ project,
+ python=python,
+ system=system,
+ warn=True,
+ deploy=deploy,
+ skip_requirements=False,
+ pypi_mirror=pypi_mirror,
+ site_packages=site_packages,
+ categories=categories,
+ )
- if not system and not project.s.PIPENV_USE_SYSTEM and not project.virtualenv_exists:
- try:
- do_create_virtualenv(project, python=python, pypi_mirror=pypi_mirror)
- except KeyboardInterrupt:
- cleanup_virtualenv(project, bare=False)
- sys.exit(1)
- # Ensure the Pipfile exists.
if not deploy:
ensure_pipfile(project, system=system)
- if not requirements_dir:
- requirements_dir = fileutils.create_tracked_tempdir(
- suffix="-requirements", prefix="pipenv-"
- )
- # Write out the lockfile if it doesn't exist, but not if the Pipfile is being ignored
- if (project.lockfile_exists and not ignore_pipfile) and not skip_lock:
- old_hash = project.get_lockfile_hash()
- new_hash = project.calculate_pipfile_hash()
- if new_hash != old_hash:
- if deploy:
- console.print(
- f"Your Pipfile.lock ({old_hash[-6:]}) is out of date. Expected: ({new_hash[-6:]}).",
- style="red",
- )
- raise exceptions.DeployException
- if (system or allow_global) and not (project.s.PIPENV_VIRTUALENV):
- err.print(
- f"Pipfile.lock ({old_hash[-6:]}) out of date, but installation uses --system so"
- f" re-building lockfile must happen in isolation."
- f" Please rebuild lockfile in a virtualenv. Continuing anyway...",
- style="yellow",
- )
- else:
- if old_hash:
- msg = "Pipfile.lock ({0}) out of date: run `pipenv lock` to update to ({1})..."
- else:
- msg = "Pipfile.lock is corrupt, replaced with ({1})..."
- err.print(
- msg.format(old_hash[-6:], new_hash[-6:]),
- style="bold yellow",
- )
- do_update(
- project,
- pre=pre,
- system=system,
- pypi_mirror=pypi_mirror,
- categories=categories,
- packages=packages,
- editable_packages=editable_packages,
- )
- # Write out the lockfile if it doesn't exist and skip_lock is False
- if not project.lockfile_exists and not skip_lock:
- # Unless we're in a virtualenv not managed by pipenv, abort if we're
- # using the system's python.
- if (system or allow_global) and not project.s.PIPENV_VIRTUALENV:
- raise exceptions.PipenvOptionsError(
- "--system",
- "--system is intended to be used for Pipfile installation, "
- "not installation of specific packages. Aborting.\n"
- "See also: --deploy flag.",
- )
- else:
- err.print(
- "Pipfile.lock not found, creating...",
- style="bold",
- )
- do_lock(
- project,
- system=system,
- pre=pre,
- write=True,
- pypi_mirror=pypi_mirror,
- categories=categories,
- )
- # Hint the user what to do to activate the virtualenv.
+ handle_lockfile(
+ project,
+ packages,
+ ignore_pipfile=ignore_pipfile,
+ skip_lock=skip_lock,
+ system=system,
+ allow_global=allow_global,
+ deploy=deploy,
+ pre=pre,
+ pypi_mirror=pypi_mirror,
+ categories=categories,
+ )
+
if not allow_global and not deploy and "PIPENV_ACTIVE" not in os.environ:
console.print(
"To activate this project's virtualenv, run [yellow]pipenv shell[/yellow].\n"
diff --git a/pipenv/routines/outdated.py b/pipenv/routines/outdated.py
index 9f9abdbb65..b34843f99c 100644
--- a/pipenv/routines/outdated.py
+++ b/pipenv/routines/outdated.py
@@ -66,11 +66,10 @@ def do_outdated(project, pypi_mirror=None, pre=False, clear=False):
name_in_pipfile = project.get_package_name_in_pipfile(
package, category=category
)
- if name_in_pipfile:
+ pipfile_section = project.get_pipfile_section(category)
+ if name_in_pipfile and name_in_pipfile in pipfile_section:
required = ""
- version = get_version(
- project.get_pipfile_section(category)[name_in_pipfile]
- )
+ version = get_version(pipfile_section[name_in_pipfile])
rdeps = reverse_deps.get(canonicalize_name(package))
if isinstance(rdeps, Mapping) and "required" in rdeps:
required = " {rdeps['required']} required"
diff --git a/pipenv/routines/sync.py b/pipenv/routines/sync.py
index b34176612a..84c66f0576 100644
--- a/pipenv/routines/sync.py
+++ b/pipenv/routines/sync.py
@@ -45,13 +45,11 @@ def do_sync(
do_init(
project,
allow_global=system,
- requirements_dir=requirements_dir,
ignore_pipfile=True, # Don't check if Pipfile and lock match.
skip_lock=True, # Don't re-lock
pypi_mirror=pypi_mirror,
deploy=deploy,
system=system,
- extra_pip_args=extra_pip_args,
categories=categories,
)
do_install_dependencies(
diff --git a/pipenv/routines/update.py b/pipenv/routines/update.py
index 1a0e8217aa..42762d7df2 100644
--- a/pipenv/routines/update.py
+++ b/pipenv/routines/update.py
@@ -117,9 +117,9 @@ def upgrade(
lockfile = project.lockfile()
if not pre:
pre = project.settings.get("allow_prereleases")
- if dev:
+ if dev or "dev-packages" in categories:
categories = ["develop"]
- elif not categories:
+ elif not categories or "packages" in categories:
categories = ["default"]
index_name = None
@@ -140,6 +140,7 @@ def upgrade(
install_req, _ = expansive_install_req_from_line(package, expand_env=True)
if index_name:
install_req.index = index_name
+
name, normalized_name, pipfile_entry = project.generate_package_pipfile_entry(
install_req, package, category=pipfile_category
)
@@ -149,11 +150,6 @@ def upgrade(
requested_packages[pipfile_category][normalized_name] = pipfile_entry
requested_install_reqs[pipfile_category][normalized_name] = install_req
- if project.pipfile_exists:
- packages = project.parsed_pipfile.get(pipfile_category, {})
- else:
- packages = project.get_pipfile_section(pipfile_category)
-
if not package_args:
click.echo("Nothing to upgrade!")
sys.exit(0)
@@ -164,7 +160,7 @@ def upgrade(
which=project._which,
project=project,
lockfile={},
- category="default",
+ category=pipfile_category,
pre=pre,
allow_global=system,
pypi_mirror=pypi_mirror,
@@ -173,14 +169,18 @@ def upgrade(
click.echo("Nothing to upgrade!")
sys.exit(0)
- for package_name, pipfile_entry in requested_packages[pipfile_category].items():
- if package_name not in packages:
- packages.append(package_name, pipfile_entry)
+ complete_packages = project.parsed_pipfile.get(pipfile_category, {})
+ for package_name in requested_packages[pipfile_category].keys():
+ pipfile_entry = project.get_pipfile_entry(
+ package_name, category=pipfile_category
+ )
+ if package_name not in complete_packages:
+ complete_packages.append(package_name, pipfile_entry)
else:
- packages[package_name] = pipfile_entry
+ complete_packages[package_name] = pipfile_entry
full_lock_resolution = venv_resolve_deps(
- packages,
+ complete_packages,
which=project._which,
project=project,
lockfile={},
@@ -193,6 +193,8 @@ def upgrade(
for package_name in upgrade_lock_data:
correct_package_lock = full_lock_resolution.get(package_name)
if correct_package_lock:
+ if category not in lockfile:
+ lockfile[category] = {}
lockfile[category][package_name] = correct_package_lock
lockfile.update({"_meta": project.get_lockfile_meta()})
diff --git a/pipenv/utils/dependencies.py b/pipenv/utils/dependencies.py
index 37b3c0552a..7fd12d2fa6 100644
--- a/pipenv/utils/dependencies.py
+++ b/pipenv/utils/dependencies.py
@@ -10,9 +10,10 @@
from functools import lru_cache
from pathlib import Path
from tempfile import NamedTemporaryFile, TemporaryDirectory
-from typing import Any, AnyStr, Dict, List, Mapping, Optional, Sequence, Union
+from typing import Any, AnyStr, Dict, List, Mapping, Optional, Sequence, Tuple, Union
from urllib.parse import urlparse, urlsplit, urlunparse, urlunsplit
+from pipenv.exceptions import PipenvUsageError
from pipenv.patched.pip._internal.models.link import Link
from pipenv.patched.pip._internal.network.download import Downloader
from pipenv.patched.pip._internal.req.constructors import (
@@ -1083,13 +1084,72 @@ def normalize_vcs_url(vcs_url):
return vcs_url, vcs_ref
-def install_req_from_pipfile(name, pipfile):
- """Creates an InstallRequirement from a name and a pipfile entry.
- Handles VCS, local & remote paths, and regular named requirements.
- "file" and "path" entries are treated the same.
+class VCSURLProcessor:
+ """Handles processing and environment variable expansion in VCS URLs."""
+
+ ENV_VAR_PATTERN = re.compile(r"\${([^}]+)}|\$([a-zA-Z_][a-zA-Z0-9_]*)")
+
+ @classmethod
+ def expand_env_vars(cls, value: str) -> str:
+ """
+ Expands environment variables in a string, with detailed error handling.
+ Supports both ${VAR} and $VAR syntax.
+ """
+
+ def _replace_var(match):
+ var_name = match.group(1) or match.group(2)
+ if var_name not in os.environ:
+ raise PipenvUsageError(
+ f"Environment variable '${var_name}' not found. "
+ "Please ensure all required environment variables are set."
+ )
+ return os.environ[var_name]
+
+ try:
+ return cls.ENV_VAR_PATTERN.sub(_replace_var, value)
+ except Exception as e:
+ raise PipenvUsageError(f"Error expanding environment variables: {str(e)}")
+
+ @classmethod
+ def process_vcs_url(cls, url: str) -> str:
+ """
+ Processes a VCS URL, expanding environment variables in individual components.
+ Handles URLs of the form: vcs+protocol://username:password@hostname/path
+ """
+ parsed = urlparse(url)
+
+ # Process each component separately
+ netloc_parts = parsed.netloc.split("@")
+ if len(netloc_parts) > 1:
+ # Handle auth information
+ auth, host = netloc_parts
+ if ":" in auth:
+ username, password = auth.split(":")
+ username = cls.expand_env_vars(username)
+ password = cls.expand_env_vars(password)
+ auth = f"{username}:{password}"
+ else:
+ auth = cls.expand_env_vars(auth)
+ netloc = f"{auth}@{host}"
+ else:
+ netloc = cls.expand_env_vars(parsed.netloc)
+
+ # Reconstruct URL with processed components
+ processed_parts = list(parsed)
+ processed_parts[1] = netloc # Update netloc
+ processed_parts[2] = cls.expand_env_vars(parsed.path) # Update path
+
+ return urlunparse(tuple(processed_parts))
+
+
+def install_req_from_pipfile(name: str, pipfile: Dict[str, Any]) -> Tuple[Any, Any, str]:
+ """
+ Creates an InstallRequirement from a name and a pipfile entry.
+ Enhanced to handle environment variables within VCS URLs.
"""
_pipfile = {}
vcs = None
+
if hasattr(pipfile, "keys"):
_pipfile = dict(pipfile).copy()
else:
@@ -1098,40 +1158,41 @@ def install_req_from_pipfile(name, pipfile):
_pipfile[vcs] = pipfile
extras = _pipfile.get("extras", [])
- extras_str = ""
- if extras:
- extras_str = f"[{','.join(extras)}]"
+ extras_str = f"[{','.join(extras)}]" if extras else ""
+
if not vcs:
vcs = next(iter([vcs for vcs in VCS_LIST if vcs in _pipfile]), None)
if vcs:
- vcs_url = _pipfile[vcs]
- subdirectory = _pipfile.get("subdirectory", "")
- if subdirectory:
- subdirectory = f"#subdirectory={subdirectory}"
- vcs_url, fallback_ref = normalize_vcs_url(vcs_url)
- req_str = f"{vcs_url}@{_pipfile.get('ref', fallback_ref)}{extras_str}"
- if not req_str.startswith(f"{vcs}+"):
- req_str = f"{vcs}+{req_str}"
- if f"{vcs}+file://" in req_str or _pipfile.get("editable", False):
- req_str = (
- f"-e {req_str}#egg={name}{extras_str}{subdirectory.replace('#', '&')}"
+ try:
+ vcs_url = _pipfile[vcs]
+ subdirectory = _pipfile.get("subdirectory", "")
+ if subdirectory:
+ subdirectory = f"#subdirectory={subdirectory}"
+
+ # Process VCS URL with environment variable handling
+ vcs_url, fallback_ref = normalize_vcs_url(vcs_url)
+ ref = _pipfile.get("ref", fallback_ref)
+
+ # Construct requirement string
+ req_str = f"{vcs_url}@{ref}{extras_str}"
+ if not req_str.startswith(f"{vcs}+"):
+ req_str = f"{vcs}+{req_str}"
+
+ if _pipfile.get("editable", False):
+ req_str = f"-e {name}{extras_str} @ {req_str}{subdirectory}"
+ else:
+ req_str = f"{name}{extras_str} @ {req_str}{subdirectory}"
+
+ except PipenvUsageError as e:
+ raise PipenvUsageError(
+ f"Error processing VCS URL for requirement '{name}': {str(e)}"
)
- else:
- req_str = f"{name}{extras_str}@ {req_str}{subdirectory}"
- elif "path" in _pipfile:
- req_str = file_path_from_pipfile(_pipfile["path"], _pipfile)
- elif "file" in _pipfile:
- req_str = file_path_from_pipfile(_pipfile["file"], _pipfile)
else:
- # We ensure version contains an operator. Default to equals (==)
- _pipfile["version"] = version = get_version(pipfile)
- if version and not is_star(version) and COMPARE_OP.match(version) is None:
- _pipfile["version"] = f"=={version}"
- if is_star(version) or version == "==*":
- version = ""
- req_str = f"{name}{extras_str}{version}"
+ # Handle non-VCS requirements (unchanged)
+ req_str = handle_non_vcs_requirement(name, _pipfile, extras_str)
+ # Create InstallRequirement
install_req, _ = expansive_install_req_from_line(
req_str,
comes_from=None,
@@ -1141,10 +1202,33 @@ def install_req_from_pipfile(name, pipfile):
constraint=False,
expand_env=True,
)
+
markers = PipenvMarkers.from_pipfile(name, _pipfile)
return install_req, markers, req_str
+def handle_non_vcs_requirement(
+ name: str, _pipfile: Dict[str, Any], extras_str: str
+) -> str:
+ """Helper function to handle non-VCS requirements."""
+ if "path" in _pipfile:
+ return file_path_from_pipfile(_pipfile["path"], _pipfile)
+ elif "file" in _pipfile:
+ return file_path_from_pipfile(_pipfile["file"], _pipfile)
+ else:
+ version = get_version(_pipfile)
+ if version and not is_star(version) and COMPARE_OP.match(version) is None:
+ version = f"=={version}"
+ if is_star(version) or version == "==*":
+ version = ""
+
+ req_str = f"{name}{extras_str}{version}"
+ markers = PipenvMarkers.from_pipfile(name, _pipfile)
+ if markers:
+ req_str = f"{req_str};{markers}"
+ return req_str
+
+
def from_pipfile(name, pipfile):
install_req, markers, req_str = install_req_from_pipfile(name, pipfile)
if markers:
diff --git a/pipenv/utils/resolver.py b/pipenv/utils/resolver.py
index 302f664d92..f44505fe3a 100644
--- a/pipenv/utils/resolver.py
+++ b/pipenv/utils/resolver.py
@@ -5,7 +5,7 @@
import sys
import tempfile
import warnings
-from functools import cached_property, lru_cache
+from functools import lru_cache
from pathlib import Path
from typing import Dict, List, Optional
@@ -221,6 +221,7 @@ def create(
markers_lookup[package_name] = install_req.markers
if is_constraint:
constraints.add(dep)
+ # raise Exception(constraints, original_deps, install_reqs, pipfile_entries)
resolver = Resolver(
set(),
req_dir,
@@ -284,10 +285,6 @@ def prepare_constraint_file(self):
return constraint_filename
@property
- def constraint_file(self):
- return self.prepare_constraint_file()
-
- @cached_property
def default_constraint_file(self):
default_constraints = get_constraints_from_deps(self.project.packages)
default_constraint_filename = prepare_constraint_file(
@@ -326,7 +323,7 @@ def prepare_index_lookup(self):
alt_index_lookup[req_name] = index_mapping[index]
return alt_index_lookup
- @cached_property
+ @property
def package_finder(self):
finder = get_package_finder(
install_cmd=self.pip_command,
@@ -344,18 +341,18 @@ def finder(self, ignore_compatibility=False):
finder._ignore_compatibility = ignore_compatibility
return finder
- @cached_property
+ @property
def parsed_constraints(self):
pip_options = self.pip_options
pip_options.extra_index_urls = []
return parse_requirements(
- self.constraint_file,
+ self.prepare_constraint_file(),
finder=self.finder(),
session=self.session,
options=pip_options,
)
- @cached_property
+ @property
def parsed_default_constraints(self):
pip_options = self.pip_options
pip_options.extra_index_urls = []
@@ -368,7 +365,7 @@ def parsed_default_constraints(self):
)
return set(parsed_default_constraints)
- @cached_property
+ @property
def default_constraints(self):
possible_default_constraints = [
install_req_from_parsed_requirement(
@@ -584,7 +581,7 @@ def collect_hashes(self, ireq):
return {self.project.get_hash_from_link(self.hash_cache, link)}
return set()
- @cached_property
+ @property
def resolve_hashes(self):
if self.results is not None:
for ireq in self.results:
diff --git a/pyproject.toml b/pyproject.toml
index 835202cc07..325381e01b 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -115,9 +115,12 @@ exclude = [
"get-pipenv.py",
"pipenv/patched/*",
"pipenv/vendor/*",
+ "tests/fixtures/*",
+ "tests/pypi/*",
+ "tests/test_artifacts/*",
]
-select = [
+lint.select = [
"ASYNC",
"B",
"C4",
@@ -136,30 +139,53 @@ select = [
"W",
"YTT",
]
-ignore = [
+lint.ignore = [
"B904",
"PIE790",
+ "PLR0912", # Too many branches
+ "PLR0913", # Too many arguments
+ "PLR2004", # Magic numbers
"PLR5501",
"PLW2901",
+ "TID252", # Relative imports
]
-pylint.allow-magic-value-types = [ "int", "str" ]
-pylint.max-args = 20
-pylint.max-branches = 38
-pylint.max-returns = 9
-pylint.max-statements = 155
-mccabe.max-complexity = 44
-per-file-ignores."docs/conf.py" = [ "E402", "E501" ]
-per-file-ignores."get-pipenv.py" = [ "E402" ]
-per-file-ignores."pipenv/__init__.py" = [ "E401" ]
-per-file-ignores."pipenv/cli/command.py" = [ "TID252" ]
-per-file-ignores."pipenv/utils/internet.py" = [ "PLW0603" ]
-per-file-ignores."pipenv/utils/resolver.py" = [ "B018" ]
-per-file-ignores."tests/*" = [ "E501", "F401", "I", "PLC1901", "S101" ]
-per-file-ignores."tests/integration/conftest.py" = [ "B003", "PIE800", "PLW0603" ]
-per-file-ignores."tests/integration/test_pipenv.py" = [ "E741" ]
-per-file-ignores."tests/integration/test_requirements.py" = [ "E741" ]
-per-file-ignores."tests/unit/test_funktools.py" = [ "B015" ]
-per-file-ignores."tests/unit/test_utils.py" = [ "F811" ]
+lint.per-file-ignores = { "pipenv/cli/command.py" = [
+ "F811",
+], "pipenv/__init__.py" = [
+ "E402",
+ "E501",
+], "pipenv/utils/shell.py" = [
+ "E402",
+], "pipenv/utils/internet.py" = [
+ "E401",
+], "pipenv/utils/dependencies.py" = [
+ "TID252",
+], "pipenv/vendor/requirementslib/models/requirements.py" = [
+ "PLW0603",
+], "pipenv/vendor/requirementslib/models/utils.py" = [
+ "B018",
+], "pipenv/project.py" = [
+ "E501",
+ "F401",
+ "I",
+ "PLC1901",
+ "S101",
+], "pipenv/cli/options.py" = [
+ "B003",
+ "PIE800",
+ "PLW0603",
+], "pipenv/utils/processes.py" = [
+ "E741",
+], "pipenv/vendor/vistir/misc.py" = [
+ "E741",
+], "pipenv/vendor/pythonfinder/models/python.py" = [
+ "B015",
+] }
+lint.mccabe.max-complexity = 44
+lint.pylint.max-args = 9
+lint.pylint.max-branches = 20
+lint.pylint.max-returns = 38
+lint.pylint.max-statements = 155
[tool.pyproject-fmt]
# after how many column width split arrays/dicts into multiple lines, 1 will force always
diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py
index 4175c21c03..a7dea47643 100644
--- a/tests/integration/conftest.py
+++ b/tests/integration/conftest.py
@@ -1,31 +1,29 @@
+import contextlib
import errno
import functools
import json
import logging
import os
import shutil
-from shutil import rmtree as _rmtree
+import subprocess
import sys
import warnings
from pathlib import Path
+from shutil import rmtree as _rmtree
from tempfile import TemporaryDirectory
-import subprocess
import pytest
-from pipenv.patched.pip._vendor import requests
-from pipenv.vendor import tomlkit
-from pipenv.utils.processes import subprocess_run
+from pipenv.patched.pip._vendor import requests
from pipenv.utils.funktools import handle_remove_readonly
+from pipenv.utils.processes import subprocess_run
from pipenv.utils.shell import temp_environ
-import contextlib
+from pipenv.vendor import tomlkit
log = logging.getLogger(__name__)
warnings.simplefilter("default", category=ResourceWarning)
-HAS_WARNED_GITHUB = False
-
DEFAULT_PRIVATE_PYPI_SERVER = os.environ.get(
"PIPENV_PYPI_SERVER", "http://localhost:8080/simple"
)
@@ -75,15 +73,6 @@ def check_github_ssh():
RuntimeWarning,
stacklevel=1,
)
- except Exception:
- pass
- global HAS_WARNED_GITHUB
- if not res and not HAS_WARNED_GITHUB:
- warnings.warn("Cannot connect to GitHub via SSH", RuntimeWarning, stacklevel=1)
- warnings.warn(
- "Will skip tests requiring SSH access to GitHub", RuntimeWarning, stacklevel=1
- )
- HAS_WARNED_GITHUB = True
return res
diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py
index a44b48abda..2ffc187e2f 100644
--- a/tests/integration/test_cli.py
+++ b/tests/integration/test_cli.py
@@ -2,8 +2,8 @@
import re
import sys
from pathlib import Path
-import pytest
+import pytest
from pipenv.utils.processes import subprocess_run
from pipenv.utils.shell import normalize_drive
diff --git a/tests/integration/test_import_requirements.py b/tests/integration/test_import_requirements.py
index 0f87bac316..5a7f6b3c36 100644
--- a/tests/integration/test_import_requirements.py
+++ b/tests/integration/test_import_requirements.py
@@ -5,9 +5,8 @@
import pytest
from pipenv.patched.pip._internal.operations.prepare import File
-
-from pipenv.utils.requirements import import_requirements
from pipenv.project import Project
+from pipenv.utils.requirements import import_requirements
@pytest.mark.cli
@@ -83,7 +82,9 @@ def test_auth_with_pw_are_variables_passed_to_pipfile(
requirements_file.close()
import_requirements(project, r=requirements_file.name)
os.unlink(requirements_file.name)
- assert p.pipfile["packages"]["myproject"] == {'git': 'https://${AUTH_USER}:${AUTH_PW}@github.com/user/myproject.git', 'ref': 'main'}
+ expected = {'git': 'https://${AUTH_USER}:${AUTH_PW}@github.com/user/myproject.git', 'ref': 'main'}
+ assert p.pipfile["packages"]["myproject"] == expected
+
@pytest.mark.cli
@pytest.mark.deploy
diff --git a/tests/integration/test_install_basic.py b/tests/integration/test_install_basic.py
index b6d33f8157..dd244306cb 100644
--- a/tests/integration/test_install_basic.py
+++ b/tests/integration/test_install_basic.py
@@ -379,9 +379,7 @@ def test_system_and_deploy_work(pipenv_instance_private_pypi):
@pytest.mark.basic
@pytest.mark.install
def test_install_creates_pipfile(pipenv_instance_pypi):
- with pipenv_instance_pypi() as p:
- if os.path.isfile(p.pipfile_path):
- os.unlink(p.pipfile_path)
+ with pipenv_instance_pypi(pipfile=False) as p:
assert not os.path.isfile(p.pipfile_path)
c = p.pipenv("install")
assert c.returncode == 0
diff --git a/tests/integration/test_install_markers.py b/tests/integration/test_install_markers.py
index a357c392e8..0fb82c22dd 100644
--- a/tests/integration/test_install_markers.py
+++ b/tests/integration/test_install_markers.py
@@ -3,7 +3,6 @@
import pytest
from flaky import flaky
-
from pipenv.project import Project
from pipenv.utils.shell import temp_environ
diff --git a/tests/integration/test_install_twists.py b/tests/integration/test_install_twists.py
index c6e30aff30..87268e8b37 100644
--- a/tests/integration/test_install_twists.py
+++ b/tests/integration/test_install_twists.py
@@ -317,17 +317,16 @@ def test_outdated_should_compare_postreleases_without_failing(
assert "out-of-date" in c.stdout
-@pytest.mark.skipif(
- sys.version_info >= (3, 12), reason="Package does not work with Python 3.12"
-)
+@pytest.mark.install
+@pytest.mark.needs_internet
def test_install_remote_wheel_file_with_extras(pipenv_instance_pypi):
with pipenv_instance_pypi() as p:
c = p.pipenv(
- "install fastapi[dev]@https://files.pythonhosted.org/packages/4e/1a/04887c641b67e6649bde845b9a631f73a7abfbe3afda83957e09b95d88eb/fastapi-0.95.2-py3-none-any.whl"
+ "install -v fastapi[standard]@https://files.pythonhosted.org/packages/c9/14/bbe7776356ef01f830f8085ca3ac2aea59c73727b6ffaa757abeb7d2900b/fastapi-0.115.2-py3-none-any.whl"
)
assert c.returncode == 0
- assert "ruff" in p.lockfile["default"]
- assert "pre-commit" in p.lockfile["default"]
+ assert "httpx" in p.lockfile["default"]
+ assert "jinja2" in p.lockfile["default"]
assert "uvicorn" in p.lockfile["default"]
diff --git a/tests/integration/test_install_uri.py b/tests/integration/test_install_uri.py
index 745138dd63..1c2e575b9b 100644
--- a/tests/integration/test_install_uri.py
+++ b/tests/integration/test_install_uri.py
@@ -12,7 +12,7 @@
@pytest.mark.needs_internet
def test_basic_vcs_install_with_env_var(pipenv_instance_pypi):
from pipenv.cli import cli
- from click.testing import (
+ from pipenv.vendor.click.testing import (
CliRunner,
) # not thread safe but macos and linux will expand the env var otherwise
@@ -143,9 +143,6 @@ def test_install_named_index_alias(pipenv_instance_private_pypi):
@pytest.mark.index
@pytest.mark.install
@pytest.mark.needs_internet
-@pytest.mark.skipif(
- sys.version_info >= (3, 12), reason="Package does not work with Python 3.12"
-)
def test_install_specifying_index_url(pipenv_instance_private_pypi):
with pipenv_instance_private_pypi() as p:
with open(p.pipfile_path, "w") as f:
diff --git a/tests/integration/test_lock.py b/tests/integration/test_lock.py
index 7f3ed3af20..1fb1cc2b0f 100644
--- a/tests/integration/test_lock.py
+++ b/tests/integration/test_lock.py
@@ -2,9 +2,8 @@
from pathlib import Path
import pytest
-
-
from flaky import flaky
+
from pipenv.utils.shell import temp_environ
diff --git a/tests/integration/test_lockfile.py b/tests/integration/test_lockfile.py
index 145c291d12..285e30465c 100644
--- a/tests/integration/test_lockfile.py
+++ b/tests/integration/test_lockfile.py
@@ -1,5 +1,6 @@
-from collections import defaultdict
import json
+from collections import defaultdict
+
import pytest
from pipenv.project import Project
diff --git a/tests/integration/test_pipenv.py b/tests/integration/test_pipenv.py
index e0ec23b1a0..a8c07d99c3 100644
--- a/tests/integration/test_pipenv.py
+++ b/tests/integration/test_pipenv.py
@@ -58,7 +58,7 @@ def test_update_locks(pipenv_instance_private_pypi):
c = p.pipenv("run pip freeze")
assert c.returncode == 0
lines = c.stdout.splitlines()
- assert "jdcal==1.4" in [l.strip() for l in lines]
+ assert "jdcal==1.4" in [line.strip() for line in lines]
@pytest.mark.project
diff --git a/tests/integration/test_project.py b/tests/integration/test_project.py
index eee575294f..b0a13cf9a5 100644
--- a/tests/integration/test_project.py
+++ b/tests/integration/test_project.py
@@ -4,9 +4,9 @@
import pytest
from pipenv.project import Project
+from pipenv.utils.fileutils import normalize_path
from pipenv.utils.shell import temp_environ
from pipenv.vendor.plette import Pipfile
-from pipenv.utils.fileutils import normalize_path
@pytest.mark.project
diff --git a/tests/integration/test_requirements.py b/tests/integration/test_requirements.py
index 8be73b7cac..1ff97ef670 100644
--- a/tests/integration/test_requirements.py
+++ b/tests/integration/test_requirements.py
@@ -1,9 +1,10 @@
import json
import os
+
import pytest
-from pipenv.utils.shell import temp_environ
from pipenv.utils.requirements import requirements_from_lockfile
+from pipenv.utils.shell import temp_environ
@pytest.mark.requirements
@@ -66,8 +67,8 @@ def test_requirements_generates_requirements_from_lockfile_multiple_sources(
{dev_packages[0]}= "=={dev_packages[1]}"
""".strip()
f.write(contents)
- l = p.pipenv("lock")
- assert l.returncode == 0
+ result = p.pipenv("lock")
+ assert result.returncode == 0
c = p.pipenv("requirements")
assert c.returncode == 0
@@ -101,8 +102,8 @@ def test_requirements_generates_requirements_from_lockfile_from_categories(
{doc_packages[0]}= "=={doc_packages[1]}"
""".strip()
f.write(contents)
- l = p.pipenv("lock")
- assert l.returncode == 0
+ result = p.pipenv("lock")
+ assert result.returncode == 0
c = p.pipenv("requirements --dev-only")
assert c.returncode == 0
diff --git a/tests/integration/test_uninstall.py b/tests/integration/test_uninstall.py
index 04c338bd90..5f998e8688 100644
--- a/tests/integration/test_uninstall.py
+++ b/tests/integration/test_uninstall.py
@@ -1,10 +1,11 @@
-import pytest
import sys
-from .conftest import DEFAULT_PRIVATE_PYPI_SERVER
+import pytest
from pipenv.utils.shell import temp_environ
+from .conftest import DEFAULT_PRIVATE_PYPI_SERVER
+
@pytest.mark.uninstall
@pytest.mark.install
diff --git a/tests/integration/test_upgrade.py b/tests/integration/test_upgrade.py
index c72e29ccf7..b4f4dbc9bb 100644
--- a/tests/integration/test_upgrade.py
+++ b/tests/integration/test_upgrade.py
@@ -46,7 +46,7 @@ def test_category_not_sorted_without_directive(pipenv_instance_private_pypi):
assert c.returncode == 0
assert list(p.pipfile["packages"].keys()) == [
"zipp",
- "six",
"colorama",
"atomicwrites",
+ "six",
]
diff --git a/tests/integration/test_windows.py b/tests/integration/test_windows.py
index 46a9434f89..c5cef275fd 100644
--- a/tests/integration/test_windows.py
+++ b/tests/integration/test_windows.py
@@ -1,8 +1,8 @@
import os
+import sys
from pathlib import Path
import pytest
-import sys
from pipenv.utils.processes import subprocess_run
diff --git a/tests/unit/test_core.py b/tests/unit/test_core.py
index f857697458..f96c6b976f 100644
--- a/tests/unit/test_core.py
+++ b/tests/unit/test_core.py
@@ -3,9 +3,9 @@
import pytest
-from pipenv.utils.virtualenv import warn_in_virtualenv
from pipenv.utils.environment import load_dot_env
from pipenv.utils.shell import temp_environ
+from pipenv.utils.virtualenv import warn_in_virtualenv
@pytest.mark.core
diff --git a/tests/unit/test_dependencies.py b/tests/unit/test_dependencies.py
index f63163e792..da853cec7a 100644
--- a/tests/unit/test_dependencies.py
+++ b/tests/unit/test_dependencies.py
@@ -1,6 +1,6 @@
-import pytest
from pipenv.utils.dependencies import clean_resolved_dep
+
def test_clean_resolved_dep_with_vcs_url():
project = {} # Mock project object, adjust as needed
dep = {
diff --git a/tests/unit/test_environments.py b/tests/unit/test_environments.py
index b66368d4c0..6b05670aa4 100644
--- a/tests/unit/test_environments.py
+++ b/tests/unit/test_environments.py
@@ -1,6 +1,8 @@
import itertools
-import pytest
import os
+
+import pytest
+
from pipenv import environments
from pipenv.utils.shell import temp_environ
diff --git a/tests/unit/test_funktools.py b/tests/unit/test_funktools.py
index 885a5274c7..7358abff1e 100644
--- a/tests/unit/test_funktools.py
+++ b/tests/unit/test_funktools.py
@@ -1,6 +1,6 @@
import pytest
-from pipenv.utils.funktools import dedup, unnest, _is_iterable
+from pipenv.utils.funktools import _is_iterable, dedup, unnest
def test_unnest():
@@ -9,7 +9,7 @@ def test_unnest():
(3456, 4398345, (234234)),
(2396, (928379, 29384, (293759, 2347, (2098, 7987, 27599)))),
)
- list(unnest(nested_iterable)) == [
+ assert list(unnest(nested_iterable)) == [
1234,
3456,
4398345,
diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py
index 73f5c6ec29..b95c285240 100644
--- a/tests/unit/test_utils.py
+++ b/tests/unit/test_utils.py
@@ -3,13 +3,8 @@
import pytest
-from pipenv.utils import dependencies
-from pipenv.utils import indexes
-from pipenv.utils import internet
-from pipenv.utils import shell
-from pipenv.utils import toml
from pipenv.exceptions import PipenvUsageError
-
+from pipenv.utils import dependencies, indexes, internet, shell, toml
# Pipfile format <-> requirements.txt format.
DEP_PIP_PAIRS = [
@@ -118,11 +113,11 @@ def test_convert_deps_to_pip_extras_no_version():
{
"FooProject": {
"version": "==1.2",
- "hash": "sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824",
+ "hashes": ["sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824"],
}
},
{
- "FooProject": "FooProject==1.2 --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824"
+ "FooProject": "FooProject==1.2 --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824"
},
),
(
@@ -130,11 +125,11 @@ def test_convert_deps_to_pip_extras_no_version():
"FooProject": {
"version": "==1.2",
"extras": ["stuff"],
- "hash": "sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824",
+ "hashes": ["sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824"],
}
},
{
- "FooProject": "FooProject[stuff]==1.2 --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824"
+ "FooProject": "FooProject[stuff]==1.2 --hash=sha256:2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824"
},
),
(
@@ -146,17 +141,17 @@ def test_convert_deps_to_pip_extras_no_version():
}
},
{
- "uvicorn": "git+https://github.com/encode/uvicorn.git@master#egg=uvicorn[standard]"
+ "uvicorn": "uvicorn[standard]@ git+https://github.com/encode/uvicorn.git@master"
},
),
],
)
def test_convert_deps_to_pip_one_way(deps, expected):
- assert dependencies.convert_deps_to_pip(deps) == [expected.lower()]
+ assert dependencies.convert_deps_to_pip(deps) == expected
@pytest.mark.utils
-def test_convert_deps_to_pip_one_way():
+def test_convert_deps_to_pip_one_way_uvicorn():
deps = {"uvicorn": {}}
expected = {"uvicorn": "uvicorn"}
assert dependencies.convert_deps_to_pip(deps) == expected
@@ -270,7 +265,8 @@ def test_python_version_from_non_python(self):
("Python 3.6.2 :: Continuum Analytics, Inc.", "3.6.2"),
("Python 3.6.20 :: Continuum Analytics, Inc.", "3.6.20"),
(
- "Python 3.5.3 (3f6eaa010fce78cc7973bdc1dfdb95970f08fed2, Jan 13 2018, 18:14:01)\n[PyPy 5.10.1 with GCC 4.2.1 Compatible Apple LLVM 9.0.0 (clang-900.0.39.2)]",
+ "Python 3.5.3 (3f6eaa010fce78cc7973bdc1dfdb95970f08fed2, "
+ "Jan 13 2018, 18:14:01)\n[PyPy 5.10.1 with GCC 4.2.1 Compatible Apple LLVM 9.0.0 (clang-900.0.39.2)]",
"3.5.3",
),
],
diff --git a/tests/unit/test_utils_windows_executable.py b/tests/unit/test_utils_windows_executable.py
index a4ffe71bbb..970a3ccc9b 100644
--- a/tests/unit/test_utils_windows_executable.py
+++ b/tests/unit/test_utils_windows_executable.py
@@ -1,11 +1,10 @@
import os
-
from unittest import mock
+
import pytest
from pipenv.utils import shell
-
# This module is run only on Windows.
pytestmark = pytest.mark.skipif(
os.name != "nt",
diff --git a/tests/unit/test_vcs.py b/tests/unit/test_vcs.py
new file mode 100644
index 0000000000..fa6072b04a
--- /dev/null
+++ b/tests/unit/test_vcs.py
@@ -0,0 +1,315 @@
+import os
+
+import pytest
+
+from pipenv.exceptions import PipenvUsageError
+from pipenv.utils.dependencies import VCSURLProcessor, install_req_from_pipfile, normalize_vcs_url
+
+
+def test_vcs_url_processor_basic_expansion():
+ """Test basic environment variable expansion in URLs."""
+ os.environ['TEST_HOST'] = 'github.com'
+ os.environ['TEST_USER'] = 'testuser'
+ os.environ['TEST_REPO'] = 'testrepo'
+
+ url = "https://${TEST_HOST}/${TEST_USER}/${TEST_REPO}.git"
+ processed = VCSURLProcessor.process_vcs_url(url)
+
+ assert processed == "https://github.com/testuser/testrepo.git"
+
+
+def test_vcs_url_processor_auth_handling():
+ """Test handling of authentication components in URLs."""
+ os.environ['GIT_USER'] = 'myuser'
+ os.environ['GIT_TOKEN'] = 'mytoken'
+
+ url = "https://${GIT_USER}:${GIT_TOKEN}@github.com/org/repo.git"
+ processed = VCSURLProcessor.process_vcs_url(url)
+
+ assert processed == "https://myuser:mytoken@github.com/org/repo.git"
+
+
+def test_vcs_url_processor_missing_env_var():
+ """Test error handling for missing environment variables."""
+ with pytest.raises(PipenvUsageError) as exc:
+ VCSURLProcessor.process_vcs_url("https://${NONEXISTENT_VAR}@github.com/org/repo.git")
+
+ assert "Environment variable" in str(exc.value)
+ assert "NONEXISTENT_VAR" in str(exc.value)
+
+
+def test_install_req_from_pipfile_vcs_with_env_vars():
+ """Test creation of install requirement from Pipfile entry with environment variables."""
+ os.environ.update({
+ 'GIT_HOST': 'github.com',
+ 'GIT_ORG': 'testorg',
+ 'GIT_REPO': 'testrepo'
+ })
+
+ pipfile = {
+ 'git': 'https://${GIT_HOST}/${GIT_ORG}/${GIT_REPO}.git',
+ 'ref': 'master',
+ 'extras': ['test']
+ }
+
+ install_req, markers, req_str = install_req_from_pipfile("package-name", pipfile)
+
+ # Environment variables should be preserved in the requirement string
+ assert '${GIT_HOST}' in req_str
+ assert '${GIT_ORG}' in req_str
+ assert '${GIT_REPO}' in req_str
+ assert 'master' in req_str
+ assert '[test]' in req_str
+
+
+def test_install_req_from_pipfile_with_auth():
+ """Test install requirement creation with authentication in URL."""
+ os.environ.update({
+ 'GIT_USER': 'testuser',
+ 'GIT_TOKEN': 'testtoken'
+ })
+
+ pipfile = {
+ 'git': 'https://${GIT_USER}:${GIT_TOKEN}@github.com/org/repo.git',
+ 'ref': 'main'
+ }
+
+ install_req, markers, req_str = install_req_from_pipfile("package-name", pipfile)
+
+ # Environment variables should be preserved
+ assert '${GIT_USER}' in req_str
+ assert '${GIT_TOKEN}' in req_str
+ assert 'main' in req_str
+
+
+def test_install_req_from_pipfile_editable():
+ """Test handling of editable installs with environment variables."""
+ os.environ['REPO_URL'] = 'github.com/org/repo'
+
+ pipfile = {
+ 'git': 'https://${REPO_URL}.git',
+ 'editable': True,
+ 'ref': 'develop'
+ }
+
+ install_req, markers, req_str = install_req_from_pipfile("package-name", pipfile)
+
+ assert req_str.startswith("-e")
+ assert '${REPO_URL}' in req_str
+ assert 'develop' in req_str
+
+
+def test_install_req_from_pipfile_subdirectory():
+ """Test handling of subdirectory specification with environment variables."""
+ os.environ['REPO_PATH'] = 'myorg/myrepo'
+
+ pipfile = {
+ 'git': 'https://github.com/${REPO_PATH}.git',
+ 'subdirectory': 'subdir',
+ 'ref': 'main'
+ }
+
+ install_req, markers, req_str = install_req_from_pipfile("package-name", pipfile)
+
+ assert '${REPO_PATH}' in req_str
+ assert '#subdirectory=subdir' in req_str
+
+
+@pytest.mark.parametrize("url_format,expected_url,expected_req", [
+ (
+ "git+https://${HOST}/${REPO}.git",
+ "https://github.com/org/repo.git",
+ "package-name @ git+https://${HOST}/${REPO}.git@main"
+ ),
+ (
+ "git+ssh://${USER}@${HOST}:${REPO}.git",
+ "git+ssh://git@${HOST}:${REPO}.git",
+ "package-name @ git+ssh://${USER}@${HOST}:${REPO}.git@main"
+ ),
+ # Note: Removing git+git@ test case as it's handled differently
+])
+def test_various_vcs_url_formats(url_format, expected_url, expected_req):
+ """Test different VCS URL formats with environment variables."""
+ os.environ.update({
+ 'HOST': 'github.com',
+ 'REPO': 'org/repo',
+ 'USER': 'git'
+ })
+
+ # When testing VCSURLProcessor directly
+ processed = VCSURLProcessor.process_vcs_url(url_format)
+ if 'github.com' in expected_url:
+ assert 'github.com' in processed
+ if 'org/repo' in expected_url:
+ assert 'org/repo' in processed
+
+ # When testing through install_req_from_pipfile
+ pipfile = {'git': url_format, 'ref': 'main'}
+ _, _, req_str = install_req_from_pipfile("package-name", pipfile)
+
+ # Verify the format matches expected_req pattern
+ req_str = req_str.strip()
+ assert '${HOST}' in req_str
+ assert '${REPO}' in req_str
+ if '${USER}' in url_format:
+ assert '${USER}' in req_str
+
+
+def test_git_ssh_shorthand_format():
+ """Test the git+git@ SSH shorthand format specifically."""
+ url = "git@${HOST}:${REPO}.git"
+ pipfile = {'git': url, 'ref': 'main'}
+
+ os.environ.update({
+ 'HOST': 'github.com',
+ 'REPO': 'org/repo'
+ })
+
+ # First test direct VCSURLProcessor
+ processed = VCSURLProcessor.process_vcs_url(url)
+ assert "git@github.com:org/repo.git" == processed
+
+ # Then test requirement string generation
+ _, _, req_str = install_req_from_pipfile("package-name", pipfile)
+
+ # The actual format might be different than other VCS URLs
+ # We need to verify the essential parts are there
+ assert 'git' in req_str
+ assert 'main' in req_str
+ assert 'package-name' in req_str
+
+
+def test_git_url_format_variations():
+ """Test different git URL format variations."""
+ test_cases = [
+ {
+ 'git': 'https://${HOST}/${REPO}.git',
+ 'expected_vars': ['${HOST}', '${REPO}']
+ },
+ {
+ 'git': 'git+https://${HOST}/${REPO}.git',
+ 'expected_vars': ['${HOST}', '${REPO}']
+ },
+ {
+ 'git': 'git+ssh://${USER}@${HOST}/${REPO}.git',
+ 'expected_vars': ['${USER}', '${HOST}', '${REPO}']
+ },
+ {
+ 'git': 'ssh://git@${HOST}/${REPO}.git',
+ 'expected_vars': ['${HOST}', '${REPO}']
+ }
+ ]
+
+ for case in test_cases:
+ pipfile = {'git': case['git'], 'ref': 'main'}
+ _, _, req_str = install_req_from_pipfile("package-name", pipfile)
+
+ for var in case['expected_vars']:
+ assert var in req_str, f"Expected {var} in {req_str}"
+
+
+def test_ssh_protocol_variations():
+ """Test various SSH protocol formats."""
+ test_cases = [
+ "git+ssh://git@${HOST}/${REPO}.git",
+ "ssh://git@${HOST}/${REPO}.git",
+ "git@${{HOST}}:${{REPO}}.git"
+ ]
+
+ os.environ.update({
+ 'HOST': 'github.com',
+ 'REPO': 'org/repo'
+ })
+
+ for url in test_cases:
+ pipfile = {'git': url, 'ref': 'main'}
+ _, _, req_str = install_req_from_pipfile("package-name", pipfile)
+
+ # Verify we get a valid requirement string
+ assert 'package-name' in req_str
+ assert 'main' in req_str
+ # Don't assert specific URL format as it may vary
+
+
+@pytest.mark.parametrize("url_input,expected_ref", [
+ ("https://github.com/org/repo.git", ""),
+ ("https://github.com/org/repo.git@dev", "dev"),
+ ("https://github.com/org/repo.git@feature", "feature")
+])
+def test_normalize_vcs_url_ref_handling(url_input, expected_ref):
+ """Test reference handling in normalize_vcs_url."""
+ normalized_url, ref = normalize_vcs_url(url_input)
+ assert ref == expected_ref
+
+
+def test_complex_ssh_url_handling():
+ """Test handling of complex SSH URLs."""
+ pipfile = {
+ 'git': 'git+ssh://git@${HOST}:${PORT}/${REPO}.git',
+ 'ref': 'main'
+ }
+
+ os.environ.update({
+ 'HOST': 'github.com',
+ 'PORT': '22',
+ 'REPO': 'org/repo'
+ })
+
+ _, _, req_str = install_req_from_pipfile("package-name", pipfile)
+
+ # Verify environment variables are preserved
+ assert '${HOST}' in req_str
+ assert '${PORT}' in req_str
+ assert '${REPO}' in req_str
+ assert 'main' in req_str
+
+
+def test_git_protocol_handling():
+ """Test handling of git:// protocol URLs."""
+ pipfile = {
+ 'git': 'git://${HOST}/${REPO}.git',
+ 'ref': 'main'
+ }
+
+ os.environ.update({
+ 'HOST': 'github.com',
+ 'REPO': 'org/repo'
+ })
+
+ _, _, req_str = install_req_from_pipfile("package-name", pipfile)
+
+ assert '${HOST}' in req_str
+ assert '${REPO}' in req_str
+ assert 'main' in req_str
+
+
+@pytest.mark.parametrize("vcs_prefix", ["git+", "git+https://", "git+ssh://", "git+git://"])
+def test_vcs_prefix_handling(vcs_prefix):
+ """Test handling of different VCS URL prefixes."""
+ url = f"{vcs_prefix}${{HOST}}/${{REPO}}.git"
+ pipfile = {'git': url, 'ref': 'main'}
+
+ os.environ.update({
+ 'HOST': 'github.com',
+ 'REPO': 'org/repo'
+ })
+
+ _, _, req_str = install_req_from_pipfile("package-name", pipfile)
+
+ # Verify the VCS prefix is handled correctly
+ assert '${HOST}' in req_str
+ assert '${REPO}' in req_str
+ assert 'main' in req_str
+ assert req_str.startswith('package-name @')
+
+
+def test_normalize_vcs_url_with_env_vars():
+ """Test normalize_vcs_url function with environment variables."""
+ os.environ['GIT_ORG'] = 'testorg'
+ url = "https://github.com/${GIT_ORG}/repo.git@main"
+
+ normalized_url, ref = normalize_vcs_url(url)
+
+ # Environment variables should be preserved
+ assert '${GIT_ORG}' in normalized_url
+ assert ref == "main"