Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
306 changes: 265 additions & 41 deletions cf_speedtest/locations.py

Large diffs are not rendered by default.

16 changes: 14 additions & 2 deletions cf_speedtest/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,22 @@ def add_run_options(parser: argparse.ArgumentParser) -> argparse.ArgumentParser:
)

parser.add_argument(
'--testpatience',
'--testpatience', '-t',
type=int,
default=20,
help='The longest time to wait for an individual test to run',
help='The longest time to wait for an individual test to run. NOTICE: When used with --disableskipping, --testpatience will be ignored',
)

parser.add_argument(
'--disableskipping', '-s',
action='store_true',
help='Dont skip any speed test. This will ignore any --testpatience setting ',
)

parser.add_argument(
'--json', '-j',
action='store_true',
help='Output results of tests in JSON format. NOTICE: When using this option, output will be delayed until the execution is finished',
)

return parser
97 changes: 69 additions & 28 deletions cf_speedtest/speedtest.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import argparse
import math
import statistics
import json
import time
from timeit import default_timer as timer

Expand All @@ -29,23 +30,32 @@
PROXY_DICT = None
VERIFY_SSL = True
OUTPUT_FILE = None
JSON_STDOUT = {}

# Could use python's statistics library, but quantiles are only available
# in version 3.8 and above


def percentile(data: list, percentile: int) -> float:
size = len(data)
if percentile == 0:
return min(data)
return sorted(data)[int(math.ceil((size * percentile) / 100)) - 1]

# returns ms of how long cloudflare took to process the request, this is in the Server-Timing header


def get_server_timing(server_timing: str) -> float:
split = server_timing.split(';')
for part in split:
for part in server_timing.split(';'):
if 'dur=' in part:
return float(part.split('=')[1]) / 1000
try:
return float(part.split('=')[1]) / 1000
except (IndexError, ValueError):
try:
return float(part.split(',')[0].split('=')[1]) / 1000
except (IndexError, ValueError):
pass
return 0.0

# given an amount of bytes, upload it and return the elapsed seconds taken

Expand Down Expand Up @@ -135,7 +145,7 @@ def get_our_country() -> str:
return cgi_dict.get('loc') or 'Unknown'


def preamble() -> str:
def preamble(json_output) -> str:
r = REQ_SESSION.get(
DOWNLOAD_ENDPOINT.format(
0,
Expand All @@ -153,9 +163,12 @@ def preamble() -> str:
) or 'Unknown' for loc in locations.SERVER_LOCATIONS if loc['iata'] == colo.upper()
), 'Unknown',
)
preamble_str = f'Your IP:\t{our_ip} ({get_our_country()})\nServer loc:\t{server_city} ({colo}) - ({server_country})'

return preamble_str
if json_output:
JSON_STDOUT['location'] = {'my_ip_addr': our_ip, "country": get_our_country(), 'server_city': server_city,
'colocation': colo, 'server_country': server_country}
else:
preamble_str = f'Your IP:\t{our_ip} ({get_our_country()})\nServer loc:\t{server_city} ({colo}) - ({server_country})'
return preamble_str

# runs x amount of y-byte tests, given a test_type ("down" or "up")
# returns a list of measurements in bits per second
Expand Down Expand Up @@ -183,6 +196,8 @@ def run_tests(test_type: str, bytes_to_xfer: int, iteration_count: int = 8) -> l

def run_standard_test(
measurement_sizes: list,
disable_tests_skip: bool,
json_output: bool,
measurement_percentile: int = 90,
verbose: bool = False,
test_patience: int = 15,
Expand All @@ -192,7 +207,10 @@ def run_standard_test(
UPLOAD_MEASUREMENTS = []

if verbose:
print(preamble(), '\n')
if json_output:
preamble(json_output)
else:
print(preamble(json_output), '\n')

latency_test() # ignore first request as it contains http connection setup
for i in range(0, 20):
Expand All @@ -202,9 +220,13 @@ def run_standard_test(
latency = percentile(LATENCY_MEASUREMENTS, 50)
jitter = statistics.stdev(LATENCY_MEASUREMENTS)
if verbose:
print(f"{'Latency:':<16} {latency:.2f} ms")
print(f"{'Jitter:':<16} {jitter:.2f} ms")
print('Running speed tests...\n')
if json_output:
JSON_STDOUT['latency'] = f"{latency:.2f}"
JSON_STDOUT['jitter'] = f"{jitter:.2f}"
else:
print(f"{'Latency:':<16} {latency:.2f} ms")
print(f"{'Jitter:':<16} {jitter:.2f} ms")
print('Running speed tests...\n')

first_dl_test, first_ul_test = True, True
continue_dl_test, continue_ul_test = True, True
Expand All @@ -218,9 +240,11 @@ def run_standard_test(
upload_test_count = (-2 * i + 10) # this is how the website does it
total_download_bytes = measurement * download_test_count
total_upload_bytes = measurement * upload_test_count
if not 'tests' in JSON_STDOUT:
JSON_STDOUT['tests'] = {}

if not first_dl_test:
if current_down_speed_mbps * test_patience < total_download_bytes / 125000:
if (current_down_speed_mbps * test_patience < total_download_bytes / 125000) and (not disable_tests_skip):
continue_dl_test = False
else:
first_dl_test = False
Expand All @@ -235,14 +259,19 @@ def run_standard_test(
DOWNLOAD_MEASUREMENTS, measurement_percentile,
) / 1_000_000
if verbose:
# print(f"Current down: {current_down_speed_mbps:.2f} Mbit/sec")
print(
f"{'Current speeds:':<24} {'Down: '}{current_down_speed_mbps:.2f} Mbit/sec\t"
f"{'Up: '}{current_up_speed_mbps:.2f} Mbit/sec",
)
if json_output:
if not str(measurement) in JSON_STDOUT['tests']:
JSON_STDOUT['tests'][str(measurement)] = {}
JSON_STDOUT['tests'][str(measurement)]['download'] = f"{current_down_speed_mbps:.2f}"
else:
# print(f"Current down: {current_down_speed_mbps:.2f} Mbit/sec")
print(
f"{'Current speeds:':<24} {'Down: '}{current_down_speed_mbps:.2f} Mbit/sec\t"
f"{'Up: '}{current_up_speed_mbps:.2f} Mbit/sec",
)

if not first_ul_test:
if current_up_speed_mbps * test_patience < total_upload_bytes / 125_000:
if (current_up_speed_mbps * test_patience < total_upload_bytes / 125_000) and (not disable_tests_skip):
continue_ul_test = False
else:
first_ul_test = False
Expand All @@ -257,11 +286,16 @@ def run_standard_test(
UPLOAD_MEASUREMENTS, measurement_percentile,
) / 1_000_000
if verbose:
# print(f"Current up: {current_up_speed_mbps:.2f} Mbit/sec")
print(
f"{'Current speeds:':<24} {'Down: '}{current_down_speed_mbps:.2f} Mbit/sec\t"
f"{'Up: '}{current_up_speed_mbps:.2f} Mbit/sec",
)
if json_output:
if not str(measurement) in JSON_STDOUT['tests']:
JSON_STDOUT['tests'][str(measurement)] = {}
JSON_STDOUT['tests'][str(measurement)]['upload'] = f"{current_up_speed_mbps:.2f}"
else:
# print(f"Current up: {current_up_speed_mbps:.2f} Mbit/sec")
print(
f"{'Current speeds:':<24} {'Down: '}{current_down_speed_mbps:.2f} Mbit/sec\t"
f"{'Up: '}{current_up_speed_mbps:.2f} Mbit/sec",
)

# all raw measurements are in bits per second
pctile_download = percentile(DOWNLOAD_MEASUREMENTS, measurement_percentile)
Expand Down Expand Up @@ -293,6 +327,8 @@ def main(argv=None) -> int:
VERIFY_SSL = args.verifyssl
OUTPUT_FILE = args.output
patience = args.testpatience
disable_tests_skip = args.disableskipping
json_output = args.json
proxy = args.proxy

# clear the output file
Expand Down Expand Up @@ -325,17 +361,22 @@ def main(argv=None) -> int:
250_000_000,
]

speeds = run_standard_test(measurement_sizes, percentile, True, patience)
speeds = run_standard_test(measurement_sizes, disable_tests_skip, json_output, percentile, True, patience)

d = speeds['download_speed']
u = speeds['upload_speed']
d_s = speeds['download_stdev'] # noqa
u_s = speeds['upload_stdev'] # noqa

print(
f"{args.percentile}{'th percentile results:':<24} Down: {d/1_000_000:.2f} Mbit/sec\t"
f'Up: {u/1_000_000:.2f} Mbit/sec',
)
if json_output:
JSON_STDOUT[f"{args.percentile}_percentile"] = {'download': f"{d/1_000_000:.2f}", 'upload': f"{u/1_000_000:.2f}"}
json_string = json.dumps(JSON_STDOUT, indent=4)
print(json_string)
else:
print(
f"{args.percentile}{'th percentile results:':<24} Down: {d/1_000_000:.2f} Mbit/sec\t"
f'Up: {u/1_000_000:.2f} Mbit/sec',
)

return 0

Expand Down
1 change: 1 addition & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
pre-commit
pytest
requests[socks]
types-requests
7 changes: 4 additions & 3 deletions setup.cfg
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[metadata]
name = cf_speedtest
version = 0.1.7
version = 0.1.9
description = Command-line internet speed test
long_description = README.md
long_description_content_type = text/markdown
Expand All @@ -17,6 +17,7 @@ classifiers =
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: 3.12

[options]
python_requires = >=3.6.0
Expand All @@ -33,5 +34,5 @@ exclude =

[options.entry_points]
console_scripts =
cf-speedtest = cf_speedtest.cf_speedtest:main
cf_speedtest = cf_speedtest.cf_speedtest:main
cf-speedtest = cf_speedtest.speedtest:main
cf_speedtest = cf_speedtest.speedtest:main
25 changes: 0 additions & 25 deletions tests/all_test.py

This file was deleted.

18 changes: 18 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from __future__ import annotations

from unittest.mock import patch

import pytest


@pytest.fixture
def mock_time():
with patch('cf_speedtest.speedtest.time') as mock_time:
mock_time.time.return_value = 1234567890.0
yield mock_time


@pytest.fixture
def mock_requests_session():
with patch('cf_speedtest.speedtest.REQ_SESSION') as mock_session:
yield mock_session
58 changes: 58 additions & 0 deletions tests/integration_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
from __future__ import annotations

import csv
import os

import pytest

from cf_speedtest import speedtest


@pytest.mark.integration
def test_country():
country = speedtest.get_our_country()
assert isinstance(country, str)
assert len(country) == 2 # Assuming country codes are always 2 characters


@pytest.mark.integration
def test_preamble():
preamble_text = speedtest.preamble(False)
assert isinstance(preamble_text, str)
assert 'Your IP:' in preamble_text
assert 'Server loc:' in preamble_text


@pytest.mark.integration
def test_main():
assert speedtest.main() == 0


@pytest.mark.integration
@pytest.mark.skip(reason='will fail without proxy')
def test_proxy():
assert speedtest.main(['--proxy', '100.24.216.83:80']) == 0


@pytest.mark.integration
def test_nossl():
assert speedtest.main(['--verifyssl', 'False']) == 0


@pytest.mark.integration
def test_csv_output():
temp_file = 'test_output.csv'

assert speedtest.main(['--output', temp_file]) == 0

assert os.path.exists(temp_file)
assert os.path.getsize(temp_file) > 0

with open(temp_file) as csvfile:
try:
csv.reader(csvfile)
next(csv.reader(csvfile))
except csv.Error:
pytest.fail('The output file is not a valid CSV')

os.remove(temp_file)
Loading