Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for the no_proxy env var mechanism in the HTTP client #5556

Merged
merged 1 commit into from
Apr 14, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGES/4431.bugfix
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fixed HTTP client requests to honor ``no_proxy`` environment variables.
1 change: 1 addition & 0 deletions CONTRIBUTORS.txt
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,7 @@ Stanislav Prokop
Stefan Tjarks
Stepan Pletnev
Stephan Jaensch
Stephen Cirelli
Stephen Granade
Steven Seguin
Sunghyun Hwang
Expand Down
10 changes: 4 additions & 6 deletions aiohttp/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import sys
import traceback
import warnings
from contextlib import suppress
from types import SimpleNamespace, TracebackType
from typing import (
Any,
Expand Down Expand Up @@ -76,8 +77,8 @@
BasicAuth,
TimeoutHandle,
ceil_timeout,
get_env_proxy_for_url,
get_running_loop,
proxies_from_env,
sentinel,
strip_auth_from_url,
)
Expand Down Expand Up @@ -483,11 +484,8 @@ async def _request(
if proxy is not None:
proxy = URL(proxy)
elif self._trust_env:
for scheme, proxy_info in proxies_from_env().items():
if scheme == url.scheme:
proxy = proxy_info.proxy
proxy_auth = proxy_info.proxy_auth
break
with suppress(LookupError):
proxy, proxy_auth = get_env_proxy_for_url(url)

req = self._request_class(
method,
Expand Down
16 changes: 15 additions & 1 deletion aiohttp/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
cast,
)
from urllib.parse import quote
from urllib.request import getproxies
from urllib.request import getproxies, proxy_bypass

import async_timeout
import attr
Expand Down Expand Up @@ -297,6 +297,20 @@ def isasyncgenfunction(obj: Any) -> bool:
return False


def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
"""Get a permitted proxy for the given URL from the env."""
if url.host is not None and proxy_bypass(url.host):
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")

proxies_in_env = proxies_from_env()
try:
proxy_info = proxies_in_env[url.scheme]
except KeyError:
raise LookupError(f"No proxies found for `{url!s}` in the env")
else:
return proxy_info.proxy, proxy_info.proxy_auth


@attr.s(auto_attribs=True, frozen=True, slots=True)
Copy link
Contributor Author

@scirelli scirelli Mar 23, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The merge added a @dataclasses.dataclass(frozen=True) here. But since I didn't see it used anywhere else in this file in the 3.8 code I removed it.

class MimeType:
type: str
Expand Down
91 changes: 91 additions & 0 deletions tests/test_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import tempfile
from math import isclose, modf
from unittest import mock
from urllib.request import getproxies_environment

import pytest
from multidict import MultiDict
Expand Down Expand Up @@ -497,6 +498,96 @@ async def test_get_running_loop_ok(loop) -> None:
assert helpers.get_running_loop() is loop


# --------------------- get_env_proxy_for_url ------------------------------


@pytest.fixture
def proxy_env_vars(monkeypatch, request):
for schema in getproxies_environment().keys():
monkeypatch.delenv(f"{schema}_proxy", False)

for proxy_type, proxy_list in request.param.items():
monkeypatch.setenv(proxy_type, proxy_list)

return request.param


@pytest.mark.parametrize(
("proxy_env_vars", "url_input", "expected_err_msg"),
(
(
{"no_proxy": "aiohttp.io"},
"http://aiohttp.io/path",
r"Proxying is disallowed for `'aiohttp.io'`",
),
(
{"no_proxy": "aiohttp.io,proxy.com"},
"http://aiohttp.io/path",
r"Proxying is disallowed for `'aiohttp.io'`",
),
(
{"http_proxy": "http://example.com"},
"https://aiohttp.io/path",
r"No proxies found for `https://aiohttp.io/path` in the env",
),
(
{"https_proxy": "https://example.com"},
"http://aiohttp.io/path",
r"No proxies found for `http://aiohttp.io/path` in the env",
),
(
{},
"https://aiohttp.io/path",
r"No proxies found for `https://aiohttp.io/path` in the env",
),
(
{"https_proxy": "https://example.com"},
"",
r"No proxies found for `` in the env",
),
),
indirect=["proxy_env_vars"],
ids=(
"url_matches_the_no_proxy_list",
"url_matches_the_no_proxy_list_multiple",
"url_scheme_does_not_match_http_proxy_list",
"url_scheme_does_not_match_https_proxy_list",
"no_proxies_are_set",
"url_is_empty",
),
)
@pytest.mark.usefixtures("proxy_env_vars")
def test_get_env_proxy_for_url_negative(url_input, expected_err_msg) -> None:
url = URL(url_input)
with pytest.raises(LookupError, match=expected_err_msg):
helpers.get_env_proxy_for_url(url)


@pytest.mark.parametrize(
("proxy_env_vars", "url_input"),
(
({"http_proxy": "http://example.com"}, "http://aiohttp.io/path"),
({"https_proxy": "http://example.com"}, "https://aiohttp.io/path"),
(
{"http_proxy": "http://example.com,http://proxy.org"},
"http://aiohttp.io/path",
),
),
indirect=["proxy_env_vars"],
ids=(
"url_scheme_match_http_proxy_list",
"url_scheme_match_https_proxy_list",
"url_scheme_match_http_proxy_list_multiple",
),
)
def test_get_env_proxy_for_url(proxy_env_vars, url_input) -> None:
url = URL(url_input)
proxy, proxy_auth = helpers.get_env_proxy_for_url(url)
proxy_list = proxy_env_vars[url.scheme + "_proxy"]
assert proxy == URL(proxy_list)
assert proxy_auth is None


# ------------- set_result / set_exception ----------------------


Expand Down