Skip to content

Commit

Permalink
e2e: test for /tokens/metadata and router/routes (#207)
Browse files Browse the repository at this point in the history
* e2e test: foundation for getting desired testing tokens based on pool type, liquidity and volume

* make astroport and transmuter helpers more useful

* clean up

* clean up

* clean up

* clean up pool types

* clean up precomputed mappings

* clean up token choice

* liq filtering with pool types and more clean up

* space

* e2e: test for /tokens/metadata and router/routes

* fetch expected number of routes from config
  • Loading branch information
p0mvn authored May 10, 2024
1 parent 9cb0a68 commit cb60ee7
Show file tree
Hide file tree
Showing 10 changed files with 264 additions and 3 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ build
go.work.sum

# Tests reports
*/__pycache__

tests/venv/*
tests/*/report.txt

24 changes: 24 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -177,3 +177,27 @@ test-prices-mainnet:
-timeout 300s \
-run TestPricingWorkerTestSuite/TestGetPrices_Chain_FindUnsupportedTokens \
github.com/osmosis-labs/sqs/tokens/usecase/pricing/worker -v -count=1

### E2E Test

# Run E2E tests in verbose mode (-s)
e2e-run-dev:
pytest -s

#### E2E Python Setup

# Setup virtual environment for e2e tests
e2e-setup-venv:
python3 -m venv tests/venv

# Activate virtual environment for e2e tests
e2e-source-venv:
source tests/venv/bin/activate

#
e2e-install-requirements:
pip install -r tests/requirements.txt

# Persist any new dependencies to requirements.txt
e2e-update-requirements:
pip freeze > tests/requirements.txt
26 changes: 26 additions & 0 deletions tests/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# E2E Test

## Setup

```bash
make e2e-setup-venv

make e2e-source-venv

make e2e-install-requirements
```

## Running

```bash
# Runs the testsuite in verbose mode
make e2e-run-dev
```

## Persisting New Dependencies

In Python, we need to persist any new dependencies to the `requirements.txt` file. To do this, run the following command:

```bash
make e2e-update-requirements
```
11 changes: 11 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import pytest
from sqs_service import *

# Define the environment URLs
# All tests will be run against these URLs
@pytest.fixture(params=[
SQS_STAGE,
])

def environment_url(request):
return request.param
5 changes: 5 additions & 0 deletions tests/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
import setup

# Misc constants
UOSMO = "uosmo"
USDC = setup.display_to_data_map["usdc"]["denom"]
11 changes: 11 additions & 0 deletions tests/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
certifi==2024.2.2
charset-normalizer==3.3.2
exceptiongroup==1.2.1
idna==3.7
iniconfig==2.0.0
packaging==24.0
pluggy==1.5.0
pytest==8.2.0
requests==2.31.0
tomli==2.0.1
urllib3==2.2.1
6 changes: 3 additions & 3 deletions tests/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,6 @@ class E2EPoolType(IntEnum):
NumiaPoolType.CONCENTRATED.value: E2EPoolType.CONCENTRATED
}

# Misc constants
UOSMO = "uosmo"


def get_e2e_pool_type_from_numia_pool(pool):
"""Gets an e2e pool type from a Numia pool."""
Expand Down Expand Up @@ -242,3 +239,6 @@ def chain_denoms_to_display(chain_denoms):

# Create a map of pool type to pool data
pool_type_to_denoms = map_pool_type_to_pool_data(all_pools_data)

# Create a map of pool ID to pool data
pool_by_id_map = {pool.get('pool_id'): pool for pool in all_pools_data}
64 changes: 64 additions & 0 deletions tests/sqs_service.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import requests

SQS_STAGE = "https://sqs.stage.osmosis.zone"

ROUTER_ROUTES_URL = "/router/routes"

TOKENS_METADATA_URL = "/tokens/metadata"

CONFIG_URL = "/config"

class SQSService:
def __init__(self, url):
self.url = url
self.tokens_metadata = None
self.config = None

def get_config(self):
"""
Fetches the config from the specified endpoint and returns it.
Caches it internally to avoid fetching it multiple times.
Raises error if non-200 is returned from the endpoint.
"""
if self.config:
return self.config

response = requests.get(self.url + CONFIG_URL)

if response.status_code != 200:
raise Exception(f"Error fetching config: {response.text}")

self.config = response.json()

return self.config

def get_candidate_routes(self, denom_in, denom_out, human_denoms="false"):
# Set the query parameters
params = {
"tokenIn": denom_in,
"tokenOutDenom": denom_out,
"humanDenoms": human_denoms
}

# Send the GET request
return requests.get(self.url + ROUTER_ROUTES_URL, params=params)

def get_tokens_metadata(self):
"""
Fetches tokens metadata from the specified endpoint and returns them.
Caches them internally to avoid fetching them multiple times.
Raises error if non-200 is returned from the endpoint.
"""
if self.tokens_metadata:
return self.tokens_metadata

response = requests.get(self.url + TOKENS_METADATA_URL)

if response.status_code != 200:
raise Exception(f"Error fetching tokens metadata: {response.text}")

self.tokens_metadata = response.json()

return self.tokens_metadata
103 changes: 103 additions & 0 deletions tests/test_candidate_routes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
import requests
import setup
import time

from sqs_service import *
import constants

SQS_STAGE = "https://sqs.stage.osmosis.zone"

ROUTES_URL = "/router/routes"

# Test suite for the /router/routes endpoint
class TestCandidateRoutes:
# Sanity check to ensure the test setup is correct
# before continunig with more complex test cases.
def test_usdc_uosmo(self, environment_url):
# Get max routes value from deployment config to expect the same number of candidate routes
# to be found
config = SQSService(environment_url).get_config()
expected_num_routes = config['Router']['MaxRoutes']
# Arbitrary choice based on performance at the time of test writing
expected_latency_upper_bound_ms = 300

self.run_candidate_routes_test(environment_url, constants.USDC, constants.UOSMO, expected_latency_upper_bound_ms, expected_min_routes=expected_num_routes, expected_max_routes=expected_num_routes)

# Switch token in and out denoms compared to test_usdc_uosmo
def test_uosmo_usdc(self, environment_url):
# Get max routes value from deployment config to expect the same number of candidate routes
# to be found
config = SQSService(environment_url).get_config()
expected_num_routes = config['Router']['MaxRoutes']
# Arbitrary choice based on performance at the time of test writing
expected_latency_upper_bound_ms = 300

self.run_candidate_routes_test(environment_url, constants.UOSMO, constants.USDC, expected_latency_upper_bound_ms, expected_min_routes=expected_num_routes, expected_max_routes=expected_num_routes)

def run_candidate_routes_test(self, environment_url, token_in, token_out, expected_latency_upper_bound_ms, expected_min_routes, expected_max_routes):
"""
Runs a test for the /router/routes endpoint with the given input parameters.
Returns routes for additional validation if needed by client
Validates:
- The number of routes returned
- Following pools in each route, all tokens within these pools are present and valid
- The latency is under the given bound
"""

sqs_service = SQSService(environment_url)

start_time = time.time()
response = sqs_service.get_candidate_routes(token_in, token_out)
elapsed_time_ms = (time.time() - start_time) * 1000

assert response.status_code == 200, f"Error: {response.text}"
assert expected_latency_upper_bound_ms > elapsed_time_ms, f"Error: latency {elapsed_time_ms} exceeded {expected_latency_upper_bound_ms} ms, token in {token_in} and token out {token_out}"

response_json = response.json()
routes = response_json['Routes']

self.validate_candidate_routes(routes, token_in, token_out, expected_min_routes, expected_max_routes)

# Return routes in case additional validation is desired
return routes

def validate_candidate_routes(self, routes, token_in, token_out, expected_min_routes, expected_max_routes):
"""
Validates the given routes.
Validates:
- Following pools in each route, all tokens within these pools are present and valid
- The number of routes is within the expected range
"""
assert len(routes) <= expected_max_routes, f"Error: found more than {expected_max_routes} routes with token in {token_in} and token out {token_out}"
assert len(routes) >= expected_min_routes, f"Error: found fewer than {expected_min_routes} routes with token in {token_in} and token out {token_out}"

for route in routes:
cur_token_in = token_in

pools = route['Pools']

assert len(pools) > 0, f"Error: no pools found in route {route}"
for pool in pools:
pool_id = pool['ID']

expected_pool_data = setup.pool_by_id_map.get(pool_id)

assert expected_pool_data, f"Error: pool ID {pool_id} not found in test data"

# Extract denoms using a helper function
pool_tokens = expected_pool_data.get("pool_tokens")
denoms = setup.get_denoms_from_pool_tokens(pool_tokens)

found_denom = cur_token_in in denoms

assert found_denom, f"Error: token in {cur_token_in} not found in pool denoms {denoms}"

cur_token_out = pool['TokenOutDenom']

cur_token_in = cur_token_out

# Last token in must be equal to token out
assert cur_token_in == token_out, f"Error: last token out {cur_token_in} not equal to token out {token_out}"
15 changes: 15 additions & 0 deletions tests/test_tokens_metadata.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
from sqs_service import *
import pytest

# Minimum number of supported tokens expected
# It should grow as we list more assets
EXPECTED_MIN_NUM_TOKENS = 250

# Tests the /tokens/metadata endpoint
class TestTokensMetadata:
def test_token_metadata_count_above_min(self, environment_url):
sqs_service = SQSService(environment_url)

tokens_metadata = sqs_service.get_tokens_metadata()

assert len(tokens_metadata) > EXPECTED_MIN_NUM_TOKENS, f"Token metadata count was {len(tokens_metadata)} - expected at least {EXPECTED_MIN_NUM_TOKENS}"

0 comments on commit cb60ee7

Please sign in to comment.