Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor tests and improve test coverage #9

Merged
merged 1 commit into from
Jul 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions cf_speedtest/speedtest.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@

def percentile(data: list, percentile: int) -> float:
size = len(data)
if percentile == 0:
return min(data)
return sorted(data)[int(math.ceil((size * percentile) / 100)) - 1]

# returns ms of how long cloudflare took to process the request, this is in the Server-Timing header
Expand Down
1 change: 1 addition & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
pre-commit
pytest
requests[socks]
types-requests
25 changes: 0 additions & 25 deletions tests/all_test.py

This file was deleted.

18 changes: 18 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from __future__ import annotations

from unittest.mock import patch

import pytest


@pytest.fixture
def mock_time():
with patch('cf_speedtest.speedtest.time') as mock_time:
mock_time.time.return_value = 1234567890.0
yield mock_time


@pytest.fixture
def mock_requests_session():
with patch('cf_speedtest.speedtest.REQ_SESSION') as mock_session:
yield mock_session
58 changes: 58 additions & 0 deletions tests/integration_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
from __future__ import annotations

import csv
import os

import pytest

from cf_speedtest import speedtest


@pytest.mark.integration
def test_country():
country = speedtest.get_our_country()
assert isinstance(country, str)
assert len(country) == 2 # Assuming country codes are always 2 characters


@pytest.mark.integration
def test_preamble():
preamble_text = speedtest.preamble()
assert isinstance(preamble_text, str)
assert 'Your IP:' in preamble_text
assert 'Server loc:' in preamble_text


@pytest.mark.integration
def test_main():
assert speedtest.main() == 0


@pytest.mark.integration
@pytest.mark.skip(reason='will fail without proxy')
def test_proxy():
assert speedtest.main(['--proxy', '100.24.216.83:80']) == 0


@pytest.mark.integration
def test_nossl():
assert speedtest.main(['--verifyssl', 'False']) == 0


@pytest.mark.integration
def test_csv_output():
temp_file = 'test_output.csv'

assert speedtest.main(['--output', temp_file]) == 0

assert os.path.exists(temp_file)
assert os.path.getsize(temp_file) > 0

with open(temp_file) as csvfile:
try:
csv.reader(csvfile)
next(csv.reader(csvfile))
except csv.Error:
pytest.fail('The output file is not a valid CSV')

os.remove(temp_file)
37 changes: 37 additions & 0 deletions tests/network_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
from __future__ import annotations

from unittest.mock import MagicMock
from unittest.mock import patch

import pytest

from cf_speedtest import speedtest


@pytest.fixture
def mock_requests_session():
with patch('cf_speedtest.speedtest.REQ_SESSION') as mock_session:
yield mock_session


def test_get_our_country(mock_requests_session):
mock_response = MagicMock()
mock_response.text = 'loc=GB\nother=value'
mock_requests_session.get.return_value = mock_response

assert speedtest.get_our_country() == 'GB'


def test_preamble_unit(mock_requests_session):
mock_response = MagicMock()
mock_response.headers = {
'cf-meta-ip': '1.2.3.4',
'cf-meta-colo': 'LAX',
}
mock_requests_session.get.return_value = mock_response

with patch('cf_speedtest.speedtest.get_our_country', return_value='US'):
result = speedtest.preamble()
assert '1.2.3.4' in result
assert 'LAX' in result
assert 'US' in result
49 changes: 49 additions & 0 deletions tests/options_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
from __future__ import annotations

import pytest

from cf_speedtest import options
from cf_speedtest import speedtest


@pytest.mark.parametrize(
'input_str, expected', [
('yes', True),
('no', False),
('true', True),
('false', False),
('1', True),
('0', False),
('YES', True),
('NO', False),
('True', True),
('False', False),
('y', True),
('n', False),
],
)
def test_str_to_bool_valid(input_str, expected):
assert options.str_to_bool(input_str) == expected


@pytest.mark.parametrize('input_str', ['invalid', 'maybe', '2', '-1'])
def test_str_to_bool_invalid(input_str):
with pytest.raises(speedtest.argparse.ArgumentTypeError):
options.str_to_bool(input_str)


@pytest.mark.parametrize(
'input_str, expected', [
('0', 0),
('50', 50),
('100', 100),
],
)
def test_valid_percentile_valid(input_str, expected):
assert options.valid_percentile(input_str) == expected


@pytest.mark.parametrize('input_str', ['-1', '101', 'invalid', '50.5'])
def test_valid_percentile_invalid(input_str):
with pytest.raises(speedtest.argparse.ArgumentTypeError):
options.valid_percentile(input_str)
130 changes: 130 additions & 0 deletions tests/speedtest_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
from __future__ import annotations

import os
from unittest.mock import patch

import pytest

from cf_speedtest import speedtest


@pytest.mark.parametrize(
'test_type, bytes_to_xfer, iteration_count, expected_len', [
('down', 1000, 3, 3),
('up', 1000, 5, 5),
('invalid', 1000, 2, 0),
],
)
def test_run_tests(test_type, bytes_to_xfer, iteration_count, expected_len):
with patch('cf_speedtest.speedtest.download_test', return_value=(1000, 0.1)), \
patch('cf_speedtest.speedtest.upload_test', return_value=(1000, 0.2)):

results = speedtest.run_tests(
test_type, bytes_to_xfer, iteration_count,
)
assert len(results) == expected_len

if test_type == 'down':
assert results[0] == 80000 # (1000 / 0.1) * 8
elif test_type == 'up':
assert results[0] == 40000 # (1000 / 0.2) * 8


@patch('cf_speedtest.speedtest.run_tests')
@patch('cf_speedtest.speedtest.latency_test')
@patch('cf_speedtest.speedtest.preamble')
def test_run_standard_test(mock_preamble, mock_latency_test, mock_run_tests):
mock_latency_test.return_value = 0.05
mock_run_tests.side_effect = [
[100000000, 200000000], # download
[50000000, 100000000], # upload
]

results = speedtest.run_standard_test(
[1000000], measurement_percentile=90, verbose=True,
)

assert results['download_speed'] == 200000000
assert results['upload_speed'] == 100000000
assert len(results['latency_measurements']) == 20


@pytest.mark.parametrize(
'args, expected_exit_code', [
(['--percentile', '90', '--verifyssl', 'False', '--testpatience', '10'], 0),
(['--proxy', '100.24.216.83:80'], 0),
(['--output', 'test_output.csv'], 0),
],
)
def test_main_unit(args, expected_exit_code, mock_requests_session):
with patch('cf_speedtest.speedtest.run_standard_test') as mock_run_test:
mock_run_test.return_value = {
'download_speed': 100000000,
'upload_speed': 50000000,
'download_stdev': 1000000,
'upload_stdev': 500000,
'latency_measurements': [10, 20, 30],
'download_measurements': [90000000, 100000000, 110000000],
'upload_measurements': [45000000, 50000000, 55000000],
}

assert speedtest.main(args) == expected_exit_code


@pytest.mark.parametrize(
'proxy, expected_dict', [
(
'100.24.216.83:80', {
'http': 'http://100.24.216.83:80', 'https': 'http://100.24.216.83:80',
},
),
(
'socks5://127.0.0.1:9150',
{'http': 'socks5://127.0.0.1:9150', 'https': 'socks5://127.0.0.1:9150'},
),
(
'http://user:[email protected]:3128',
{
'http': 'http://user:[email protected]:3128',
'https': 'http://user:[email protected]:3128',
},
),
],
)
def test_proxy_unit(proxy, expected_dict):
with patch('cf_speedtest.speedtest.run_standard_test') as mock_run_test:
mock_run_test.return_value = {
'download_speed': 100000000,
'upload_speed': 50000000,
'download_stdev': 1000000,
'upload_stdev': 500000,
'latency_measurements': [10, 20, 30],
'download_measurements': [90000000, 100000000, 110000000],
'upload_measurements': [45000000, 50000000, 55000000],
}

speedtest.main(['--proxy', proxy])
assert speedtest.PROXY_DICT == expected_dict


def test_output_file(mock_time):
output_file = 'test_output.csv'

with patch('cf_speedtest.speedtest.run_standard_test') as mock_run_test, \
patch('builtins.open', create=True) as mock_open:
mock_run_test.return_value = {
'download_speed': 100000000,
'upload_speed': 50000000,
'download_stdev': 1000000,
'upload_stdev': 500000,
'latency_measurements': [10, 20, 30],
'download_measurements': [90000000, 100000000, 110000000],
'upload_measurements': [45000000, 50000000, 55000000],
}

speedtest.main(['--output', output_file])

mock_open.assert_called_with(output_file, 'w')

if os.path.exists(output_file):
os.remove(output_file)
30 changes: 30 additions & 0 deletions tests/utils_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from __future__ import annotations

import pytest

from cf_speedtest import speedtest


@pytest.mark.parametrize(
'data, percentile, expected', [
([1, 2, 3, 4, 5], 50, 3),
([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 90, 9),
([1, 1, 1, 1, 1], 100, 1),
([1, 2, 3, 4, 5], 0, 1),
],
)
def test_percentile(data, percentile, expected):
assert speedtest.percentile(data, percentile) == expected


@pytest.mark.parametrize(
'server_timing, expected', [
('dur=1234.5', 1.2345),
('key=value;dur=5678.9', 5.6789),
('invalid', 0.0),
('dur=1000', 1.0),
('start=0;dur=500;desc="Backend"', 0.5),
],
)
def test_get_server_timing(server_timing, expected):
assert speedtest.get_server_timing(server_timing) == expected
Loading