Skip to content
This repository has been archived by the owner on Sep 26, 2022. It is now read-only.

Commit

Permalink
Support generic headers and HTTP timeouts
Browse files Browse the repository at this point in the history
Instead of solely urllib3 header support, there is now the ability to pass generic headers such as "Authorization" in addition to the urllib3 headers (e.g. "basic_auth").  Furthermore, http timeouts are honored in the FileDownloader.
  • Loading branch information
mcsimps2 committed Jan 10, 2020
1 parent 1abe00d commit d0e29f4
Show file tree
Hide file tree
Showing 6 changed files with 50 additions and 22 deletions.
2 changes: 1 addition & 1 deletion docs/api.md
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ progress_hooks (list): List of callbacks

data_dir (str): Path to custom update folder

headers (dict): A urllib3.utils.make_headers compatible dictionary
headers (dict): A dictionary of generic and/or urllib3.utils.make_headers compatible headers

test (bool): Used to initialize a test client

Expand Down
18 changes: 9 additions & 9 deletions pyupdater/client/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ class Client(object):
data_dir (str): Path to custom update folder
headers (dict): A urllib3.utils.make_headers compatible dictionary
headers (dict): A dictionary of generic and/or urllib3.utils.make_headers compatible headers
test (bool): Used to initialize a test client
Expand Down Expand Up @@ -130,8 +130,6 @@ def __init__(self, obj, **kwargs):
raise SyntaxError("progress_hooks must be provided as a list.")
self.progress_hooks += progress_hooks

obj.URLLIB3_HEADERS = headers

# A super dict used to save config info & be dot accessed
config = _Config()
config.from_object(obj)
Expand Down Expand Up @@ -198,8 +196,8 @@ def __init__(self, obj, **kwargs):
# The name of the key file to download
self.key_file = settings.KEY_FILE_FILENAME

# urllib3 headers
self.urllib3_headers = obj.URLLIB3_HEADERS
# headers
self.headers = headers

# Creating data & update directories
self._setup()
Expand Down Expand Up @@ -244,7 +242,7 @@ def _gen_file_downloader_options(self):
"http_timeout": self.http_timeout,
"max_download_retries": self.max_download_retries,
"progress_hooks": self.progress_hooks,
"urllib3_headers": self.urllib3_headers,
"headers": self.headers,
"verify": self.verify,
}

Expand Down Expand Up @@ -317,7 +315,7 @@ def _update_check(self, name, version, channel, strict):
"verify": self.verify,
"max_download_retries": self.max_download_retries,
"progress_hooks": list(set(self.progress_hooks)),
"urllib3_headers": self.urllib3_headers,
"headers": self.headers,
"downloader": self.downloader,
}

Expand Down Expand Up @@ -436,7 +434,8 @@ def _get_manifest_from_http(self):
vf,
self.update_urls,
verify=self.verify,
urllb3_headers=self.urllib3_headers,
headers=self.headers,
http_timeout=self.http_timeout
)
data = fd.download_verify_return()
try:
Expand Down Expand Up @@ -468,7 +467,8 @@ def _get_key_data(self):
self.key_file,
self.update_urls,
verify=self.verify,
urllb3_headers=self.urllib3_headers,
headers=self.headers,
http_timeout=self.http_timeout
)
data = fd.download_verify_return()
try:
Expand Down
21 changes: 17 additions & 4 deletions pyupdater/client/downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
# ------------------------------------------------------------------------------
from __future__ import unicode_literals
import hashlib
import inspect
import logging
import os
import time
Expand Down Expand Up @@ -131,7 +132,9 @@ def __init__(self, *args, **kwargs):
self.content_length = None

# Extra headers
self.headers = kwargs.get("urllb3_headers")
self.headers = kwargs.get("headers")

self.http_timeout = kwargs.get("http_timeout")

if self.verify is True:
self.http_pool = self._get_http_pool()
Expand All @@ -141,15 +144,25 @@ def __init__(self, *args, **kwargs):
def _get_http_pool(self, secure=True):
if secure:
_http = urllib3.PoolManager(
cert_reqs=str("CERT_REQUIRED"), ca_certs=certifi.where()
cert_reqs=str("CERT_REQUIRED"), ca_certs=certifi.where(), timeout=self.http_timeout
)
else:
_http = urllib3.PoolManager()
_http = urllib3.PoolManager(timeout=self.http_timeout)

if self.headers:
_headers = urllib3.util.make_headers(**self.headers)
if six.PY3:
# Python3
urllib_keys = inspect.getfullargspec(urllib3.util.make_headers).args
else:
# Python2 fallback
urllib_keys = inspect.getargspec(urllib3.util.make_headers).args
urllib_headers = {header: value for header, value in six.iteritems(self.headers) if header in urllib_keys}
other_headers = {header: value for header, value in six.iteritems(self.headers) if header not in urllib_keys}
_headers = urllib3.util.make_headers(**urllib_headers)
_headers.update(other_headers)
_http.headers.update(_headers)
log.debug(_http.headers)
log.debug("HTTP Timeout is " + str(self.http_timeout))
return _http

def download_verify_write(self):
Expand Down
10 changes: 7 additions & 3 deletions pyupdater/client/patcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,9 @@ class Patcher(object):
max_download_retries (int): Number of times to retry a download
urllib3_headers (dict): Headers to be used with http request
headers (dict): Headers to be used with http request. Accepts urllib3 and generic headers.
http_timeout (int): HTTP timeout or None
"""

def __init__(self, **kwargs):
Expand All @@ -81,8 +83,9 @@ def __init__(self, **kwargs):
self.update_urls = kwargs.get("update_urls", [])
self.verify = kwargs.get("verify", True)
self.max_download_retries = kwargs.get("max_download_retries")
self.urllib3_headers = kwargs.get("urllib3_headers")
self.headers = kwargs.get("headers")
self.downloader = kwargs.get("downloader")
self.http_timeout = kwargs.get("http_timeout")

# Progress hooks to be called
self.progress_hooks = kwargs.get("progress_hooks", [])
Expand Down Expand Up @@ -302,7 +305,8 @@ def _download_verify_patches(self):
hexdigest=p["patch_hash"],
verify=self.verify,
max_download_retries=self.max_download_retries,
urllb3_headers=self.urllib3_headers,
headers=self.headers,
http_timeout=self.http_timeout
)

# Attempt to download resource
Expand Down
7 changes: 4 additions & 3 deletions pyupdater/client/updates.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,8 +418,8 @@ def __init__(self, data=None):
# Weather or not the verify the https connection
self.verify = data.get("verify", True)

# Extra headers to pass to urllib3
self.urllib3_headers = data.get("urllib3_headers")
# Extra headers
self.headers = data.get("headers")

# The amount of times to retry a url before giving up
self.max_download_retries = data.get("max_download_retries")
Expand Down Expand Up @@ -684,7 +684,8 @@ def _full_update(self):
verify=self.verify,
progress_hooks=self.progress_hooks,
max_download_retries=self.max_download_retries,
urllb3_headers=self.urllib3_headers,
headers=self.headers,
http_timeout=self.http_timeout
)
result = fd.download_verify_write()
if result:
Expand Down
14 changes: 12 additions & 2 deletions tests/test_downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,20 @@ def test_return_fail(self, download_max_size):


@pytest.mark.usefixtue("cleandir")
class TestBasicAuth(object):
class TestBasicAuthUrlLib(object):
def test_basic_auth(self):
headers = {"basic_auth": "user:pass"}
fd = FileDownloader("test", ["test"], urllb3_headers=headers)
fd = FileDownloader("test", ["test"], headers=headers)
http = fd._get_http_pool(secure=True)
sc = http.request("GET", "https://httpbin.org/basic-auth/user/pass").status
assert sc == 200


@pytest.mark.usefixtue("cleandir")
class TestAuthorizationHeader(object):
def test_auth_header(self):
headers = {"Authorization": "Basic dXNlcjpwYXNz"}
fd = FileDownloader("test", ["test"], headers=headers)
http = fd._get_http_pool(secure=True)
sc = http.request("GET", "https://httpbin.org/basic-auth/user/pass").status
assert sc == 200
Expand Down

0 comments on commit d0e29f4

Please sign in to comment.