diff --git a/micropip/package_index.py b/micropip/package_index.py index 8b78726..2772b7b 100644 --- a/micropip/package_index.py +++ b/micropip/package_index.py @@ -13,6 +13,7 @@ from ._compat import fetch_string_and_headers from ._utils import is_package_compatible, parse_version from .externals.mousebender.simple import from_project_details_html +from .wheelinfo import WheelInfo DEFAULT_INDEX_URLS = ["https://pypi.org/simple"] INDEX_URLS = DEFAULT_INDEX_URLS @@ -20,19 +21,6 @@ _formatter = string.Formatter() -# TODO: Merge this class with WheelInfo -@dataclass -class ProjectInfoFile: - filename: str # Name of the file - url: str # URL to download the file - version: Version # Version of the package - sha256: str | None # SHA256 hash of the file - - # Size of the file in bytes, if available (PEP 700) - # This key is not available in the Simple API HTML response, so this field may be None - size: int | None = None - - @dataclass class ProjectInfo: """ @@ -46,7 +34,7 @@ class ProjectInfo: # List of releases available for the package, sorted in ascending order by version. # For each version, list of wheels compatible with the current platform are stored. # If no such wheel is available, the list is empty. - releases: dict[Version, Generator[ProjectInfoFile, None, None]] + releases: dict[Version, Generator[WheelInfo, None, None]] @staticmethod def from_json_api(data: str | bytes | dict[str, Any]) -> "ProjectInfo": @@ -148,7 +136,7 @@ def _compatible_only( def _compatible_wheels( files: list[dict[str, Any]], version: Version - ) -> Generator[ProjectInfoFile, None, None]: + ) -> Generator[WheelInfo, None, None]: for file in files: filename = file["filename"] @@ -162,12 +150,17 @@ def _compatible_wheels( hashes = file["digests"] if "digests" in file else file["hashes"] sha256 = hashes.get("sha256") - yield ProjectInfoFile( + # Size of the file in bytes, if available (PEP 700) + # This key is not available in the Simple API HTML response, so this field may be None + size = file.get("size") + + yield WheelInfo.from_package_index( + name=name, filename=filename, url=file["url"], version=version, sha256=sha256, - size=file.get("size"), + size=size, ) releases_compatible = { diff --git a/micropip/transaction.py b/micropip/transaction.py index c2fffbd..ee95a3a 100644 --- a/micropip/transaction.py +++ b/micropip/transaction.py @@ -1,174 +1,25 @@ import asyncio -import hashlib import importlib.metadata -import json import logging import warnings from dataclasses import dataclass, field from importlib.metadata import PackageNotFoundError -from pathlib import Path -from typing import IO, Any -from urllib.parse import ParseResult, urlparse -from zipfile import ZipFile +from urllib.parse import urlparse from packaging.requirements import Requirement -from packaging.tags import Tag from packaging.utils import canonicalize_name -from packaging.version import Version from . import package_index -from ._compat import ( - REPODATA_PACKAGES, - fetch_bytes, - get_dynlibs, - loadDynlib, - loadedPackages, - wheel_dist_info_dir, -) -from ._utils import best_compatible_tag_index, check_compatible, parse_wheel_filename +from ._compat import REPODATA_PACKAGES +from ._utils import best_compatible_tag_index, check_compatible from .constants import FAQ_URLS -from .externals.pip._internal.utils.wheel import pkg_resources_distribution_for_wheel from .package import PackageMetadata -from .package_index import ProjectInfo, ProjectInfoFile +from .package_index import ProjectInfo +from .wheelinfo import WheelInfo logger = logging.getLogger("micropip") -@dataclass -class WheelInfo: - name: str - version: Version - filename: str - build: tuple[int, str] | tuple[()] - tags: frozenset[Tag] - url: str - parsed_url: ParseResult - project_name: str | None = None - sha256: str | None = None - data: IO[bytes] | None = None - _dist: Any = None - dist_info: Path | None = None - _requires: list[Requirement] | None = None - - @staticmethod - def from_url(url: str) -> "WheelInfo": - """Parse wheels URL and extract available metadata - - See https://www.python.org/dev/peps/pep-0427/#file-name-convention - """ - parsed_url = urlparse(url) - file_name = Path(parsed_url.path).name - name, version, build, tags = parse_wheel_filename(file_name) - return WheelInfo( - name=name, - version=version, - filename=file_name, - build=build, - tags=tags, - url=url, - parsed_url=parsed_url, - ) - - @staticmethod - def from_project_info_file(project_info_file: ProjectInfoFile) -> "WheelInfo": - """Extract available metadata from response received from package index""" - wheel_info = WheelInfo.from_url(project_info_file.url) - wheel_info.sha256 = project_info_file.sha256 - - return wheel_info - - async def _fetch_bytes(self, fetch_kwargs): - try: - return await fetch_bytes(self.url, fetch_kwargs) - except OSError as e: - if self.parsed_url.hostname in [ - "files.pythonhosted.org", - "cdn.jsdelivr.net", - ]: - raise e - else: - raise ValueError( - f"Can't fetch wheel from '{self.url}'. " - "One common reason for this is when the server blocks " - "Cross-Origin Resource Sharing (CORS). " - "Check if the server is sending the correct 'Access-Control-Allow-Origin' header." - ) from e - - async def download(self, fetch_kwargs): - data = await self._fetch_bytes(fetch_kwargs) - self.data = data - with ZipFile(data) as zip_file: - self._dist = pkg_resources_distribution_for_wheel( - zip_file, self.name, "???" - ) - - self.project_name = self._dist.project_name - if self.project_name == "UNKNOWN": - self.project_name = self.name - - def validate(self): - if self.sha256 is None: - # No checksums available, e.g. because installing - # from a different location than PyPI. - return - - assert self.data - sha256_actual = _generate_package_hash(self.data) - if sha256_actual != self.sha256: - raise ValueError("Contents don't match hash") - - def extract(self, target: Path) -> None: - assert self.data - with ZipFile(self.data) as zf: - zf.extractall(target) - dist_info_name: str = wheel_dist_info_dir(ZipFile(self.data), self.name) - self.dist_info = target / dist_info_name - - def requires(self, extras: set[str]) -> list[str]: - if not self._dist: - raise RuntimeError( - "Micropip internal error: attempted to access wheel 'requires' before downloading it?" - ) - requires = self._dist.requires(extras) - self._requires = requires - return requires - - def write_dist_info(self, file: str, content: str) -> None: - assert self.dist_info - (self.dist_info / file).write_text(content) - - def set_installer(self) -> None: - assert self.data - wheel_source = "pypi" if self.sha256 is not None else self.url - - self.write_dist_info("PYODIDE_SOURCE", wheel_source) - self.write_dist_info("PYODIDE_URL", self.url) - self.write_dist_info("PYODIDE_SHA256", _generate_package_hash(self.data)) - self.write_dist_info("INSTALLER", "micropip") - if self._requires: - self.write_dist_info( - "PYODIDE_REQUIRES", json.dumps(sorted(x.name for x in self._requires)) - ) - name = self.project_name - assert name - setattr(loadedPackages, name, wheel_source) - - async def load_libraries(self, target: Path) -> None: - assert self.data - dynlibs = get_dynlibs(self.data, ".whl", target) - await asyncio.gather(*map(lambda dynlib: loadDynlib(dynlib, False), dynlibs)) - - async def install(self, target: Path) -> None: - if not self.data: - raise RuntimeError( - "Micropip internal error: attempted to install wheel before downloading it?" - ) - self.validate() - self.extract(target) - await self.load_libraries(target) - self.set_installer() - - @dataclass class Transaction: ctx: dict[str, str] @@ -417,9 +268,8 @@ def find_wheel(metadata: ProjectInfo, req: Requirement) -> WheelInfo: best_wheel = None best_tag_index = float("infinity") - files = releases[ver] - for fileinfo in files: - wheel = WheelInfo.from_project_info_file(fileinfo) + wheels = releases[ver] + for wheel in wheels: tag_index = best_compatible_tag_index(wheel.tags) if tag_index is not None and tag_index < best_tag_index: best_wheel = wheel @@ -434,11 +284,3 @@ def find_wheel(metadata: ProjectInfo, req: Requirement) -> WheelInfo: "You can use `await micropip.install(..., keep_going=True)` " "to get a list of all packages with missing wheels." ) - - -def _generate_package_hash(data: IO[bytes]) -> str: - sha256_hash = hashlib.sha256() - data.seek(0) - while chunk := data.read(4096): - sha256_hash.update(chunk) - return sha256_hash.hexdigest() diff --git a/micropip/wheelinfo.py b/micropip/wheelinfo.py new file mode 100644 index 0000000..8d39e55 --- /dev/null +++ b/micropip/wheelinfo.py @@ -0,0 +1,221 @@ +import asyncio +import hashlib +import json +from dataclasses import dataclass +from pathlib import Path +from typing import IO, Any +from urllib.parse import ParseResult, urlparse +from zipfile import ZipFile + +from packaging.requirements import Requirement +from packaging.tags import Tag +from packaging.version import Version + +from ._compat import ( + fetch_bytes, + get_dynlibs, + loadDynlib, + loadedPackages, + wheel_dist_info_dir, +) +from ._utils import parse_wheel_filename +from .externals.pip._internal.utils.wheel import pkg_resources_distribution_for_wheel +from .externals.pip._vendor.pkg_resources import Distribution + + +@dataclass +class WheelInfo: + """ + WheelInfo represents a wheel file and its metadata (e.g. URL and hash) + """ + + name: str + version: Version + filename: str + build: tuple[int, str] | tuple[()] + tags: frozenset[Tag] + url: str + parsed_url: ParseResult + sha256: str | None = None + size: int | None = None # Size in bytes, if available (PEP 700) + + # Fields below are only available after downloading the wheel, i.e. after calling `download()`. + + _data: IO[bytes] | None = None # Wheel file contents. + _dist: Distribution | None = None # pkg_resources.Distribution object. + _requires: list[Requirement] | None = None # List of requirements. + + # Note: `_project_name`` is taken from the wheel metadata, while `name` is taken from the wheel filename or metadata of the package index. + # They are mostly the same, but can be different in some weird cases (e.g. a user manually renaming the wheel file), so just to be safe we store both. + _project_name: str | None = None # Project name. + + # Path to the .dist-info directory. This is only available after extracting the wheel, i.e. after calling `extract()`. + _dist_info: Path | None = None + + @classmethod + def from_url(cls, url: str) -> "WheelInfo": + """Parse wheels URL and extract available metadata + + See https://www.python.org/dev/peps/pep-0427/#file-name-convention + """ + parsed_url = urlparse(url) + file_name = Path(parsed_url.path).name + name, version, build, tags = parse_wheel_filename(file_name) + return WheelInfo( + name=name, + version=version, + filename=file_name, + build=build, + tags=tags, + url=url, + parsed_url=parsed_url, + ) + + @classmethod + def from_package_index( + cls, + name: str, + filename: str, + url: str, + version: Version, + sha256: str | None, + size: int | None, + ) -> "WheelInfo": + """Extract available metadata from response received from package index""" + parsed_url = urlparse(url) + _, _, build, tags = parse_wheel_filename(filename) + + return WheelInfo( + name=name, + version=version, + filename=filename, + build=build, + tags=tags, + url=url, + parsed_url=parsed_url, + sha256=sha256, + size=size, + ) + + async def install(self, target: Path) -> None: + """ + Install the wheel to the target directory. + + The installation process is as follows: + 0. A wheel needs to be downloaded before it can be installed. This is done by calling `download()`. + 1. The wheel is validated by comparing its hash with the one provided by the package index. + 2. The wheel is extracted to the target directory. + 3. The wheel's shared libraries are loaded. + 4. The wheel's metadata is set. + """ + if not self._data: + raise RuntimeError( + "Micropip internal error: attempted to install wheel before downloading it?" + ) + self._validate() + self._extract(target) + await self._load_libraries(target) + self._set_installer() + + async def download(self, fetch_kwargs: dict[str, Any]): + if self._data is not None: + return + + self._data = await self._fetch_bytes(fetch_kwargs) + with ZipFile(self._data) as zip_file: + self._dist = pkg_resources_distribution_for_wheel( + zip_file, self.name, "???" + ) + + self._project_name = self._dist.project_name + if self._project_name == "UNKNOWN": + self._project_name = self.name + + def requires(self, extras: set[str]) -> list[str]: + """ + Get a list of requirements for the wheel. + """ + if not self._dist: + raise RuntimeError( + "Micropip internal error: attempted to access wheel 'requires' before downloading it?" + ) + requires = self._dist.requires(extras) + self._requires = requires + return requires + + async def _fetch_bytes(self, fetch_kwargs: dict[str, Any]): + try: + return await fetch_bytes(self.url, fetch_kwargs) + except OSError as e: + if self.parsed_url.hostname in [ + "files.pythonhosted.org", + "cdn.jsdelivr.net", + ]: + raise e + else: + raise ValueError( + f"Can't fetch wheel from '{self.url}'. " + "One common reason for this is when the server blocks " + "Cross-Origin Resource Sharing (CORS). " + "Check if the server is sending the correct 'Access-Control-Allow-Origin' header." + ) from e + + def _validate(self): + if self.sha256 is None: + # No checksums available, e.g. because installing + # from a different location than PyPI. + return + + assert self._data + sha256_actual = _generate_package_hash(self._data) + if sha256_actual != self.sha256: + raise ValueError("Contents don't match hash") + + def _extract(self, target: Path) -> None: + assert self._data + with ZipFile(self._data) as zf: + zf.extractall(target) + self._dist_info = target / wheel_dist_info_dir(zf, self.name) + + def _set_installer(self) -> None: + """ + Set the installer metadata in the wheel's .dist-info directory. + """ + assert self._data + wheel_source = "pypi" if self.sha256 is not None else self.url + + self._write_dist_info("PYODIDE_SOURCE", wheel_source) + self._write_dist_info("PYODIDE_URL", self.url) + self._write_dist_info("PYODIDE_SHA256", _generate_package_hash(self._data)) + self._write_dist_info("INSTALLER", "micropip") + if self._requires: + self._write_dist_info( + "PYODIDE_REQUIRES", json.dumps(sorted(x.name for x in self._requires)) + ) + + name = self._project_name or self.name + setattr(loadedPackages, name, wheel_source) + + def _write_dist_info(self, file: str, content: str) -> None: + assert self._dist_info + (self._dist_info / file).write_text(content) + + async def _load_libraries(self, target: Path) -> None: + """ + Compiles shared libraries (WASM modules) in the wheel and loads them. + TODO: integrate with pyodide's dynamic library loading mechanism. + """ + assert self._data + dynlibs = get_dynlibs(self._data, ".whl", target) + await asyncio.gather(*map(lambda dynlib: loadDynlib(dynlib, False), dynlibs)) + + +def _generate_package_hash(data: IO[bytes]) -> str: + """ + Generate a SHA256 hash of the package data. + """ + sha256_hash = hashlib.sha256() + data.seek(0) + while chunk := data.read(4096): + sha256_hash.update(chunk) + return sha256_hash.hexdigest() diff --git a/tests/conftest.py b/tests/conftest.py index 2f2647a..c456b12 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -20,13 +20,14 @@ def pytest_addoption(parser): ) -SNOWBALL_WHEEL = "snowballstemmer-2.0.0-py2.py3-none-any.whl" - EMSCRIPTEN_VER = "3.1.14" PLATFORM = f"emscripten_{EMSCRIPTEN_VER.replace('.', '_')}_wasm32" CPVER = f"cp{sys.version_info.major}{sys.version_info.minor}" TEST_PYPI_RESPONSE_DIR = Path(__file__).parent / "test_data" / "pypi_response" +TEST_WHEEL_DIR = Path(__file__).parent / "test_data" / "wheel" +SNOWBALL_WHEEL = "snowballstemmer-2.0.0-py2.py3-none-any.whl" +PYTEST_WHEEL = "pytest-7.2.2-py3-none-any.whl" def _read_pypi_response(file: Path) -> bytes: @@ -192,7 +193,12 @@ def add_pkg_version( if top_level is None: top_level = [] if name not in self.releases_map: - self.releases_map[name] = {"releases": {}} + self.releases_map[name] = { + "info": { + "name": name, + }, + "releases": {}, + } releases = self.releases_map[name]["releases"] filename = self._make_wheel_filename(name, version, platform) releases[version] = [ @@ -257,11 +263,11 @@ def write_file(filename, contents): @pytest.fixture def mock_fetch(monkeypatch, mock_importlib): pytest.importorskip("packaging") - from micropip import package_index, transaction + from micropip import package_index, wheelinfo result = mock_fetch_cls() monkeypatch.setattr(package_index, "query_package", result.query_package) - monkeypatch.setattr(transaction, "fetch_bytes", result._fetch_bytes) + monkeypatch.setattr(wheelinfo, "fetch_bytes", result._fetch_bytes) return result diff --git a/tests/test_data/wheel/pytest-7.2.2-py3-none-any.whl b/tests/test_data/wheel/pytest-7.2.2-py3-none-any.whl new file mode 100644 index 0000000..896bd02 Binary files /dev/null and b/tests/test_data/wheel/pytest-7.2.2-py3-none-any.whl differ diff --git a/tests/dist/snowballstemmer-2.0.0-py2.py3-none-any.whl b/tests/test_data/wheel/snowballstemmer-2.0.0-py2.py3-none-any.whl similarity index 100% rename from tests/dist/snowballstemmer-2.0.0-py2.py3-none-any.whl rename to tests/test_data/wheel/snowballstemmer-2.0.0-py2.py3-none-any.whl diff --git a/tests/test_install.py b/tests/test_install.py index de6e068..38508bc 100644 --- a/tests/test_install.py +++ b/tests/test_install.py @@ -1,7 +1,5 @@ -from pathlib import Path - import pytest -from conftest import SNOWBALL_WHEEL, mock_fetch_cls +from conftest import SNOWBALL_WHEEL, TEST_WHEEL_DIR, mock_fetch_cls from packaging.utils import parse_wheel_filename from pytest_pyodide import run_in_pyodide, spawn_web_server @@ -34,7 +32,7 @@ def test_install_simple(selenium_standalone_micropip): def test_install_custom_url(selenium_standalone_micropip, base_url): selenium = selenium_standalone_micropip - with spawn_web_server(Path(__file__).parent / "dist") as server: + with spawn_web_server(TEST_WHEEL_DIR) as server: server_hostname, server_port, _ = server base_url = f"http://{server_hostname}:{server_port}/" url = base_url + SNOWBALL_WHEEL @@ -253,12 +251,12 @@ async def test_install_pre( async def test_fetch_wheel_fail(monkeypatch, wheel_base): pytest.importorskip("packaging") import micropip - from micropip import transaction + from micropip import wheelinfo def _mock_fetch_bytes(arg, *args, **kwargs): raise OSError(f"Request for {arg} failed with status 404: Not Found") - monkeypatch.setattr(transaction, "fetch_bytes", _mock_fetch_bytes) + monkeypatch.setattr(wheelinfo, "fetch_bytes", _mock_fetch_bytes) msg = "Access-Control-Allow-Origin" with pytest.raises(ValueError, match=msg): @@ -316,7 +314,7 @@ async def test_load_binary_wheel2(selenium): def test_emfs(selenium_standalone_micropip): - with spawn_web_server(Path(__file__).parent / "dist") as server: + with spawn_web_server(TEST_WHEEL_DIR) as server: server_hostname, server_port, _ = server url = f"http://{server_hostname}:{server_port}/" @@ -344,7 +342,7 @@ async def run_test(selenium, url, wheel_name): def test_logging(selenium_standalone_micropip): # TODO: make a fixture for this, it's used in a few places - with spawn_web_server(Path(__file__).parent / "dist") as server: + with spawn_web_server(TEST_WHEEL_DIR) as server: server_hostname, server_port, _ = server url = f"http://{server_hostname}:{server_port}/" wheel_url = url + SNOWBALL_WHEEL @@ -385,9 +383,9 @@ async def _mock_fetch_bytes(url, *args): _wheel_url = url return BytesIO(b"fake wheel") - from micropip import transaction + from micropip import wheelinfo - monkeypatch.setattr(transaction, "fetch_bytes", _mock_fetch_bytes) + monkeypatch.setattr(wheelinfo, "fetch_bytes", _mock_fetch_bytes) try: await micropip.install( diff --git a/tests/test_list.py b/tests/test_list.py index 32e0e3e..16d45ce 100644 --- a/tests/test_list.py +++ b/tests/test_list.py @@ -1,7 +1,5 @@ -from pathlib import Path - import pytest -from conftest import SNOWBALL_WHEEL, mock_fetch_cls +from conftest import SNOWBALL_WHEEL, TEST_WHEEL_DIR, mock_fetch_cls from pytest_pyodide import spawn_web_server import micropip @@ -45,7 +43,7 @@ async def test_list_wheel_name_mismatch(mock_fetch: mock_fetch_cls) -> None: def test_list_load_package_from_url(selenium_standalone_micropip): - with spawn_web_server(Path(__file__).parent / "dist") as server: + with spawn_web_server(TEST_WHEEL_DIR) as server: server_hostname, server_port, _ = server base_url = f"http://{server_hostname}:{server_port}/" url = base_url + SNOWBALL_WHEEL diff --git a/tests/test_transaction.py b/tests/test_transaction.py index b2ebb1d..3e878e1 100644 --- a/tests/test_transaction.py +++ b/tests/test_transaction.py @@ -1,7 +1,5 @@ -from pathlib import Path - import pytest -from conftest import SNOWBALL_WHEEL +from conftest import SNOWBALL_WHEEL, TEST_WHEEL_DIR from packaging.tags import Tag from pytest_pyodide import spawn_web_server @@ -71,7 +69,7 @@ async def test_add_requirement(): pytest.importorskip("packaging") from micropip.transaction import Transaction - with spawn_web_server(Path(__file__).parent / "dist") as server: + with spawn_web_server(TEST_WHEEL_DIR) as server: server_hostname, server_port, _ = server base_url = f"http://{server_hostname}:{server_port}/" url = base_url + SNOWBALL_WHEEL diff --git a/tests/test_uninstall.py b/tests/test_uninstall.py index 0590382..6958d63 100644 --- a/tests/test_uninstall.py +++ b/tests/test_uninstall.py @@ -1,9 +1,8 @@ # isort: skip_file -from pathlib import Path import pytest from pytest_pyodide import run_in_pyodide, spawn_web_server -from conftest import SNOWBALL_WHEEL +from conftest import SNOWBALL_WHEEL, TEST_WHEEL_DIR from packaging.utils import parse_wheel_filename TEST_PACKAGE_NAME = "test_wheel_uninstall" @@ -249,7 +248,7 @@ async def run(selenium): def test_logging(selenium_standalone_micropip): # TODO: make a fixture for this, it's used in a few places - with spawn_web_server(Path(__file__).parent / "dist") as server: + with spawn_web_server(TEST_WHEEL_DIR) as server: server_hostname, server_port, _ = server url = f"http://{server_hostname}:{server_port}/" wheel_url = url + SNOWBALL_WHEEL diff --git a/tests/test_wheelinfo.py b/tests/test_wheelinfo.py new file mode 100644 index 0000000..0688fce --- /dev/null +++ b/tests/test_wheelinfo.py @@ -0,0 +1,128 @@ +from io import BytesIO + +import pytest +from conftest import PYTEST_WHEEL, TEST_WHEEL_DIR + +from micropip.wheelinfo import WheelInfo + + +@pytest.fixture +def dummy_wheel(): + yield WheelInfo.from_url(f"https://test.com/{PYTEST_WHEEL}") + + +@pytest.fixture +def dummy_wheel_content(): + yield BytesIO((TEST_WHEEL_DIR / PYTEST_WHEEL).read_bytes()) + + +@pytest.fixture +def dummy_wheel_url(httpserver): + httpserver.expect_request(f"/{PYTEST_WHEEL}").respond_with_data( + (TEST_WHEEL_DIR / PYTEST_WHEEL).read_bytes(), + content_type="application/zip", + headers={"Access-Control-Allow-Origin": "*"}, + ) + return httpserver.url_for(f"/{PYTEST_WHEEL}") + + +def test_from_url(): + url = "https://test.com/dummy_module-0.0.1-py3-none-any.whl" + wheel = WheelInfo.from_url(url) + + assert wheel.name == "dummy-module" + assert str(wheel.version) == "0.0.1" + assert wheel.url == url + assert wheel.filename == "dummy_module-0.0.1-py3-none-any.whl" + assert wheel.size is None + assert wheel.sha256 is None + + +def test_from_package_index(): + name = "dummy-module" + filename = "dummy_module-0.0.1-py3-none-any.whl" + url = "https://test.com/dummy_module-0.0.1-py3-none-any.whl" + version = "0.0.1" + sha256 = "dummy-sha256" + size = 1234 + + wheel = WheelInfo.from_package_index(name, filename, url, version, sha256, size) + + assert wheel.name == name + assert str(wheel.version) == version + assert wheel.url == url + assert wheel.filename == filename + assert wheel.size == size + assert wheel.sha256 == sha256 + + +def test_validate(dummy_wheel): + import hashlib + + dummy_wheel.sha256 = None + dummy_wheel._data = BytesIO(b"dummy-data") + + # Should succeed when sha256 is None + dummy_wheel._validate() + + # Should fail when checksum is different + dummy_wheel.sha256 = "dummy-sha256" + with pytest.raises(ValueError, match="Contents don't match hash"): + dummy_wheel._validate() + + # Should succeed when checksum is the same + dummy_wheel.sha256 = hashlib.sha256(b"dummy-data").hexdigest() + dummy_wheel._validate() + + +def test_extract(dummy_wheel, dummy_wheel_content, tmp_path): + dummy_wheel._data = dummy_wheel_content + dummy_wheel._extract(tmp_path) + + assert dummy_wheel._dist_info is not None + assert dummy_wheel._dist_info.is_dir() + + +def test_set_installer(dummy_wheel, dummy_wheel_content, tmp_path): + dummy_wheel._data = dummy_wheel_content + dummy_wheel._extract(tmp_path) + + dummy_wheel._set_installer() + + assert (dummy_wheel._dist_info / "INSTALLER").read_text() == "micropip" + assert (dummy_wheel._dist_info / "PYODIDE_SOURCE").read_text() == dummy_wheel.url + assert (dummy_wheel._dist_info / "PYODIDE_URL").read_text() == dummy_wheel.url + assert (dummy_wheel._dist_info / "PYODIDE_SHA256").exists() + + +def test_install(): + pass + + +@pytest.mark.asyncio +async def test_download(dummy_wheel_url): + wheel = WheelInfo.from_url(dummy_wheel_url) + + assert wheel._project_name is None + assert wheel._dist is None + + await wheel.download({}) + + assert wheel._project_name == "pytest" + assert wheel._dist is not None + + +@pytest.mark.asyncio +async def test_requires(dummy_wheel_url, tmp_path): + wheel = WheelInfo.from_url(dummy_wheel_url) + await wheel.download({}) + + wheel._extract(tmp_path) + + requirements_default = [str(r.name) for r in wheel.requires(set())] + assert "pluggy" in requirements_default + assert "hypothesis" not in requirements_default + + requirements_extra_testing = [str(r.name) for r in wheel.requires({"testing"})] + assert "pluggy" in requirements_extra_testing + assert "hypothesis" in requirements_extra_testing