From 5eb8385aa3db217588ff1b3c316f4d961ce1fa69 Mon Sep 17 00:00:00 2001 From: Roman Date: Wed, 8 May 2024 00:13:58 +0000 Subject: [PATCH 01/19] e2e test: foundation for getting desired testing tokens based on pool type, liquidity and volume --- tests/data_service.py | 56 ++++++++++++ tests/main.py | 14 +++ tests/setup.py | 194 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 264 insertions(+) create mode 100644 tests/data_service.py create mode 100644 tests/main.py create mode 100644 tests/setup.py diff --git a/tests/data_service.py b/tests/data_service.py new file mode 100644 index 000000000..cfecee903 --- /dev/null +++ b/tests/data_service.py @@ -0,0 +1,56 @@ +import requests + +# Endpoint URLs +NUMIA_API_URL = 'https://data.numia-stage.osmosis.zone' +TOKENS_ENDPOINT = '/tokens/v2/all' +POOLS_ENDPOINT = '/stream/pool/v1/all' + +def fetch_tokens(): + """Fetches all tokens from the specified endpoint and returns them.""" + url = NUMIA_API_URL + TOKENS_ENDPOINT + try: + response = requests.get(url) + response.raise_for_status() # Raise an error for unsuccessful requests + tokens = response.json() + return tokens + except requests.exceptions.RequestException as e: + print(f"Error fetching data from Numia: {e}") + return [] + +def fetch_pools(): + """Fetches all pools by iterating through paginated results and caches the results in memory.""" + global cached_pool_data + url = NUMIA_API_URL + POOLS_ENDPOINT + all_pools = [] + next_offset = 0 + batch_size = 100 # Adjust if a different pagination size is needed + + while True: + params = {'offset': next_offset, 'limit': batch_size} + try: + response = requests.get(url, params=params) + response.raise_for_status() + data = response.json() + pools = data.get('pools', []) + pagination = data.get('pagination', {}) + + # Add this batch to the accumulated pool data + all_pools.extend(pools) + + # Determine if more pools are available + next_offset = pagination.get('next_offset') + if not next_offset: + break + + except requests.exceptions.RequestException as e: + print(f"Error fetching data from Numia: {e}") + break + + # retunr the pool data + return all_pools + +# Fetch all token data once +all_tokens_data = fetch_tokens() + +# Fetch all pools data once +all_pools_data = fetch_pools() diff --git a/tests/main.py b/tests/main.py new file mode 100644 index 000000000..6ed648c21 --- /dev/null +++ b/tests/main.py @@ -0,0 +1,14 @@ +from setup import * + +# DEMO - to be removed in a subsequent PR +print("Transmuter ", chain_denoms_to_display(choose_transmuter_tokens_by_liq_asc())) + +print("Astroport PCL ", chain_denoms_to_display(choose_pcl_tokens_by_liq_asc())) + +print("Top Liquidity Token ", chain_denoms_to_display(choose_tokens_liq_range())) + +print("Top Volume Token ", chain_denoms_to_display(choose_tokens_volume_range())) + +print("2 Low Liquidity Tokens ", chain_denoms_to_display(choose_tokens_liq_range(2, 5000, 10000))) + +print("3 Low Volume Tokens ", chain_denoms_to_display(choose_tokens_volume_range(3, 5000, 10000))) diff --git a/tests/setup.py b/tests/setup.py new file mode 100644 index 000000000..e6505cdc5 --- /dev/null +++ b/tests/setup.py @@ -0,0 +1,194 @@ +import copy +from data_service import all_tokens_data, all_pools_data + +# Numia pool type constants +NUMIA_POOL_TYPE_BALANCER = 'osmosis.gamm.v1beta1.Pool' +NUMIA_POOL_TYPE_STABLESWAP = 'osmosis.gamm.poolmodels.stableswap.v1beta1.Pool' +NUMIA_POOL_TYPE_CONCENTRATED = 'osmosis.concentratedliquidity.v1beta1.Pool' +NUMIA_POOL_TYPE_COSMWASM = 'osmosis.cosmwasmpool.v1beta1.CosmWasmPool' + +# Cosmwasm pool code IDs +TRANSMUTER_CODE_ID = 148 +ASTROPORT_CODE_ID = 773 + +# Local e2e pool types +# We define our own pool types for convinience and consistency across e2e tests +E2E_POOL_TYPE_BALANCER = 0 +E2E_POOL_TYPE_STABLESWAP = 1 +E2E_POOL_TYPE_CONCENTRATED = 2 +E2E_POOL_TYPE_COSMWASM_MISC = 3 +E2E_POOL_TYPE_COSMWASM_TRANSMUTER_V1 = 4 +E2E_POOL_TYPE_COSMWASM_ASTROPORT = 5 + +# Misc constants +UOSMO = "uosmo" + +def get_e2e_pool_type_from_numia_pool(pool): + """Gets an e2e pool type from Numia pool.""" + + numia_pool_type = pool.get("type") + + e2e_pool_type = None + if numia_pool_type == NUMIA_POOL_TYPE_BALANCER: + e2e_pool_type = E2E_POOL_TYPE_BALANCER + elif numia_pool_type == NUMIA_POOL_TYPE_STABLESWAP: + e2e_pool_type = E2E_POOL_TYPE_STABLESWAP + elif numia_pool_type == NUMIA_POOL_TYPE_CONCENTRATED: + e2e_pool_type = E2E_POOL_TYPE_CONCENTRATED + elif numia_pool_type == NUMIA_POOL_TYPE_COSMWASM: + pool_code_id = int(pool.get('code_id')) + if pool_code_id == TRANSMUTER_CODE_ID: + e2e_pool_type = E2E_POOL_TYPE_COSMWASM_TRANSMUTER_V1 + elif pool_code_id == ASTROPORT_CODE_ID: + e2e_pool_type = E2E_POOL_TYPE_COSMWASM_ASTROPORT + else: + e2e_pool_type = E2E_POOL_TYPE_COSMWASM_MISC + else: + raise ValueError(f"Unknown pool type: {numia_pool_type}") + + return e2e_pool_type + +def map_pool_type_to_denoms(pool_data): + """Returns a dictionary mapping each pool type to associated denoms from pool_tokens.""" + if not pool_data: + return {} + + # Create the mapping + pool_type_to_denoms = {} + for pool in pool_data: + # Convert the Numia pool type to an e2e pool type + e2e_pool_type = get_e2e_pool_type_from_numia_pool(pool) + + denoms = [] + + # Check if `pool_tokens` is a list or dictionary + pool_tokens = pool.get("pool_tokens") + if isinstance(pool_tokens, dict): # Handles the dictionary case (asset0/asset1) + if 'asset0' in pool_tokens: + denoms.append(pool_tokens['asset0'].get('denom')) + if 'asset1' in pool_tokens: + denoms.append(pool_tokens['asset1'].get('denom')) + elif isinstance(pool_tokens, list): # Handles the list case (array of assets) + denoms = [token.get('denom') for token in pool_tokens if 'denom' in token] + + # Add or update the set of denoms for this pool type + if e2e_pool_type not in pool_type_to_denoms: + pool_type_to_denoms[e2e_pool_type] = set() + pool_type_to_denoms[e2e_pool_type].update(denoms) + + + # Precompute a lookup of denom to liquidity values + denom_liquidity = { + denom: float(data.get('liquidity', 0)) + for denom, data in chain_denom_to_data_map.items() + } + + # Sort denoms within each pool type using precomputed liquidity values + for pool_type, denoms in pool_type_to_denoms.items(): + # Create a list of (denom, liquidity) tuples for sorting + denom_liquidity_pairs = [(denom, denom_liquidity.get(denom, 0)) for denom in denoms] + + # Sort by liquidity in descending order + sorted_denoms = [denom for denom, _ in sorted(denom_liquidity_pairs, key=lambda x: x[1], reverse=True)] + pool_type_to_denoms[pool_type] = sorted_denoms + + return pool_type_to_denoms + +def filter_neq(source_list, neq_value): + """Keeps items that are not equal to value.""" + return [element for element in source_list if element != neq_value] + +def create_display_to_data_map(tokens_data): + """Function to map display field to the data of that token.""" + + display_map = {} + for token in tokens_data: + display_field = token.get('display') + if display_field: + display_map[display_field] = token + return display_map + +def create_chain_denom_to_data_map(tokens_data): + """Function to map chain denom the data of that token.""" + + display_map = {} + for token in tokens_data: + display_field = token.get('denom') + if display_field: + display_map[display_field] = token + return display_map + +def get_token_data_copy(): + """Return deep copy of all tokens.""" + return copy.deepcopy(all_tokens_data) + + +def choose_tokens_liq_range(num_tokens=1, min_liq=0, max_liq=float('inf'), asc=False): + """Function to choose tokens based on liquidity.""" + tokens = get_token_data_copy() + filtered_tokens = [ + t['denom'] for t in tokens if 'liquidity' in t and t['liquidity'] is not None and min_liq <= t['liquidity'] <= max_liq + ] + sorted_tokens = sorted( + filtered_tokens, key=lambda x: next(t['liquidity'] for t in tokens if t['denom'] == x), reverse=not asc + ) + return sorted_tokens[:num_tokens] + +def choose_tokens_volume_range(num_tokens=1, min_vol=0, max_vol=float('inf'), asc=False): + """Function to choose tokens based on volume.""" + + tokens = get_token_data_copy() + filtered_tokens = [ + t['denom'] for t in tokens if 'volume_24h' in t and t['volume_24h'] is not None and min_vol <= t['volume_24h'] <= max_vol + ] + sorted_tokens = sorted( + filtered_tokens, key=lambda x: next(t['volume_24h'] for t in tokens if t['denom'] == x), reverse=not asc + ) + return sorted_tokens[:num_tokens] + +def choose_pool_type_tokens_by_liq_asc(pool_type, num_tokens=1, min_liq=0, max_liq=float('inf'), asc=False): + """Function to choose tokens associated with a specific pool type based on liquidity.""" + + # Choose non-UOSMO denoms + pcl_denoms = filter_neq(pool_type_to_denoms.get(pool_type), UOSMO) + + tokens = get_token_data_copy() + + pcl_denoms_set = set(pcl_denoms) + + # Compile a list of tokens with their liquidities that are in the PCL denoms set + tokens_liq = [] + for token in tokens: + token_denom = token['denom'] + if token_denom in pcl_denoms_set: + tokens_liq.append([token_denom, token['liquidity']]) + + # Sort the tokens by liquidity in ascending order + sorted_tokens = sorted(tokens_liq, key=lambda x: x[1], reverse=not asc) + + return [pair[0] for pair in sorted_tokens[:num_tokens]] + +def choose_transmuter_tokens_by_liq_asc(num_tokens=1, min_liq=0, max_liq=float('inf'), asc=False): + """Function to choose tokens associated with a transmuter V1 pool type based on liquidity.""" + return choose_pool_type_tokens_by_liq_asc(E2E_POOL_TYPE_COSMWASM_TRANSMUTER_V1, num_tokens, min_liq, max_liq, asc) + +def choose_pcl_tokens_by_liq_asc(num_tokens=1, min_liq=0, max_liq=float('inf'), asc=False): + """Function to choose tokens associated with an Astroport PCL pool type based on liquidity.""" + return choose_pool_type_tokens_by_liq_asc(E2E_POOL_TYPE_COSMWASM_ASTROPORT, num_tokens, min_liq, max_liq, asc) + +def chain_denom_to_display(chain_denom): + """Function to map chain denom to display.""" + return chain_denom_to_data_map.get(chain_denom, {}).get('display', chain_denom) + +def chain_denoms_to_display(chain_denoms): + """Function to map chain denoms to display.""" + return [chain_denom_to_display(denom) for denom in chain_denoms] + +# Create a map of display to token data +display_to_data_map = create_display_to_data_map(all_tokens_data) + +# Create a map of chain denom to token data +chain_denom_to_data_map = create_chain_denom_to_data_map(all_tokens_data) + +# Create a map of pool type to denoms +pool_type_to_denoms = map_pool_type_to_denoms(all_pools_data) From cba3d92cc213213a449cb9a19735f6a92c2c790f Mon Sep 17 00:00:00 2001 From: Roman Date: Wed, 8 May 2024 00:57:31 +0000 Subject: [PATCH 02/19] make astroport and transmuter helpers more useful --- tests/main.py | 6 ++-- tests/setup.py | 81 +++++++++++++++++++------------------------------- 2 files changed, 35 insertions(+), 52 deletions(-) diff --git a/tests/main.py b/tests/main.py index 6ed648c21..06cc1adc0 100644 --- a/tests/main.py +++ b/tests/main.py @@ -1,9 +1,11 @@ from setup import * # DEMO - to be removed in a subsequent PR -print("Transmuter ", chain_denoms_to_display(choose_transmuter_tokens_by_liq_asc())) +transmuter_token_pairs = choose_transmuter_pool_tokens_by_liq_asc(1) +print("Transmuter ", [chain_denoms_to_display(pool_tokens[1]) for pool_tokens in transmuter_token_pairs]) -print("Astroport PCL ", chain_denoms_to_display(choose_pcl_tokens_by_liq_asc())) +astroport_token_pairs = choose_pcl_pool_tokens_by_liq_asc(2) +print("Astroport PCL ", [chain_denoms_to_display(pool_tokens[1]) for pool_tokens in astroport_token_pairs]) print("Top Liquidity Token ", chain_denoms_to_display(choose_tokens_liq_range())) diff --git a/tests/setup.py b/tests/setup.py index e6505cdc5..4a77a405c 100644 --- a/tests/setup.py +++ b/tests/setup.py @@ -48,13 +48,14 @@ def get_e2e_pool_type_from_numia_pool(pool): return e2e_pool_type -def map_pool_type_to_denoms(pool_data): - """Returns a dictionary mapping each pool type to associated denoms from pool_tokens.""" +def map_pool_type_to_pool_data(pool_data): + """Returns a dictionary mapping each pool type to associated ID, liquidity and tokens. + Returns {pool_type: [[pool_id, liquidity, [denoms]]]}.""" if not pool_data: return {} - # Create the mapping - pool_type_to_denoms = {} + # Create the mapping from pool type to data + pool_type_to_data = {} for pool in pool_data: # Convert the Numia pool type to an e2e pool type e2e_pool_type = get_e2e_pool_type_from_numia_pool(pool) @@ -71,28 +72,18 @@ def map_pool_type_to_denoms(pool_data): elif isinstance(pool_tokens, list): # Handles the list case (array of assets) denoms = [token.get('denom') for token in pool_tokens if 'denom' in token] + # Get the pool ID and liquidity + pool_id = pool.get('pool_id') + liquidity = pool.get('liquidity') + # Add or update the set of denoms for this pool type - if e2e_pool_type not in pool_type_to_denoms: - pool_type_to_denoms[e2e_pool_type] = set() - pool_type_to_denoms[e2e_pool_type].update(denoms) - - - # Precompute a lookup of denom to liquidity values - denom_liquidity = { - denom: float(data.get('liquidity', 0)) - for denom, data in chain_denom_to_data_map.items() - } - - # Sort denoms within each pool type using precomputed liquidity values - for pool_type, denoms in pool_type_to_denoms.items(): - # Create a list of (denom, liquidity) tuples for sorting - denom_liquidity_pairs = [(denom, denom_liquidity.get(denom, 0)) for denom in denoms] - - # Sort by liquidity in descending order - sorted_denoms = [denom for denom, _ in sorted(denom_liquidity_pairs, key=lambda x: x[1], reverse=True)] - pool_type_to_denoms[pool_type] = sorted_denoms - - return pool_type_to_denoms + if e2e_pool_type not in pool_type_to_data: + pool_type_to_data[e2e_pool_type] = list() + + # Append the pool data to the list of pools for this pool type + pool_type_to_data[e2e_pool_type].append([pool_id, liquidity, denoms]) + + return pool_type_to_data def filter_neq(source_list, neq_value): """Keeps items that are not equal to value.""" @@ -146,35 +137,25 @@ def choose_tokens_volume_range(num_tokens=1, min_vol=0, max_vol=float('inf'), as ) return sorted_tokens[:num_tokens] -def choose_pool_type_tokens_by_liq_asc(pool_type, num_tokens=1, min_liq=0, max_liq=float('inf'), asc=False): - """Function to choose tokens associated with a specific pool type based on liquidity.""" +def choose_pool_type_tokens_by_liq_asc(pool_type, num_pairs=1, min_liq=0, max_liq=float('inf'), asc=False): + """Function to choose pool ID and tokens associated with a specific pool type based on liquidity. + Returns [pool ID, [tokens]]""" - # Choose non-UOSMO denoms - pcl_denoms = filter_neq(pool_type_to_denoms.get(pool_type), UOSMO) - - tokens = get_token_data_copy() - - pcl_denoms_set = set(pcl_denoms) - - # Compile a list of tokens with their liquidities that are in the PCL denoms set - tokens_liq = [] - for token in tokens: - token_denom = token['denom'] - if token_denom in pcl_denoms_set: - tokens_liq.append([token_denom, token['liquidity']]) + pools_tokens_of_type = pool_type_to_denoms.get(pool_type) - # Sort the tokens by liquidity in ascending order - sorted_tokens = sorted(tokens_liq, key=lambda x: x[1], reverse=not asc) + sorted_pools = sorted(pools_tokens_of_type, key=lambda x: x[1], reverse=not asc) - return [pair[0] for pair in sorted_tokens[:num_tokens]] + return [[pool_data[1], pool_data[2]] for pool_data in sorted_pools[:num_pairs]] -def choose_transmuter_tokens_by_liq_asc(num_tokens=1, min_liq=0, max_liq=float('inf'), asc=False): - """Function to choose tokens associated with a transmuter V1 pool type based on liquidity.""" - return choose_pool_type_tokens_by_liq_asc(E2E_POOL_TYPE_COSMWASM_TRANSMUTER_V1, num_tokens, min_liq, max_liq, asc) +def choose_transmuter_pool_tokens_by_liq_asc(num_pairs=1, min_liq=0, max_liq=float('inf'), asc=False): + """Function to choose pool ID and tokens associated with a transmuter V1 pool type based on liquidity. + Returns [pool ID, [tokens]]""" + return choose_pool_type_tokens_by_liq_asc(E2E_POOL_TYPE_COSMWASM_TRANSMUTER_V1, num_pairs, min_liq, max_liq, asc) -def choose_pcl_tokens_by_liq_asc(num_tokens=1, min_liq=0, max_liq=float('inf'), asc=False): - """Function to choose tokens associated with an Astroport PCL pool type based on liquidity.""" - return choose_pool_type_tokens_by_liq_asc(E2E_POOL_TYPE_COSMWASM_ASTROPORT, num_tokens, min_liq, max_liq, asc) +def choose_pcl_pool_tokens_by_liq_asc(num_pairs=1, min_liq=0, max_liq=float('inf'), asc=False): + """Function to choose pool ID and tokens associated with a Astroport PCL pool type based on liquidity. + Returns [pool ID, [tokens]]""" + return choose_pool_type_tokens_by_liq_asc(E2E_POOL_TYPE_COSMWASM_ASTROPORT, num_pairs, min_liq, max_liq, asc) def chain_denom_to_display(chain_denom): """Function to map chain denom to display.""" @@ -191,4 +172,4 @@ def chain_denoms_to_display(chain_denoms): chain_denom_to_data_map = create_chain_denom_to_data_map(all_tokens_data) # Create a map of pool type to denoms -pool_type_to_denoms = map_pool_type_to_denoms(all_pools_data) +pool_type_to_denoms = map_pool_type_to_pool_data(all_pools_data) From 3972e63360749ff278007b933cf4ce895126345a Mon Sep 17 00:00:00 2001 From: Roman Date: Wed, 8 May 2024 01:01:55 +0000 Subject: [PATCH 03/19] clean up --- tests/data_service.py | 1 - tests/setup.py | 8 ++------ 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/tests/data_service.py b/tests/data_service.py index cfecee903..1490429cf 100644 --- a/tests/data_service.py +++ b/tests/data_service.py @@ -19,7 +19,6 @@ def fetch_tokens(): def fetch_pools(): """Fetches all pools by iterating through paginated results and caches the results in memory.""" - global cached_pool_data url = NUMIA_API_URL + POOLS_ENDPOINT all_pools = [] next_offset = 0 diff --git a/tests/setup.py b/tests/setup.py index 4a77a405c..d4bd1346f 100644 --- a/tests/setup.py +++ b/tests/setup.py @@ -85,10 +85,6 @@ def map_pool_type_to_pool_data(pool_data): return pool_type_to_data -def filter_neq(source_list, neq_value): - """Keeps items that are not equal to value.""" - return [element for element in source_list if element != neq_value] - def create_display_to_data_map(tokens_data): """Function to map display field to the data of that token.""" @@ -100,7 +96,7 @@ def create_display_to_data_map(tokens_data): return display_map def create_chain_denom_to_data_map(tokens_data): - """Function to map chain denom the data of that token.""" + """Function to map chain denom to the data of that token.""" display_map = {} for token in tokens_data: @@ -171,5 +167,5 @@ def chain_denoms_to_display(chain_denoms): # Create a map of chain denom to token data chain_denom_to_data_map = create_chain_denom_to_data_map(all_tokens_data) -# Create a map of pool type to denoms +# Create a map of pool type to pool data pool_type_to_denoms = map_pool_type_to_pool_data(all_pools_data) From 7e40e5e024bd0bc854f183e31a49c8de41f929ca Mon Sep 17 00:00:00 2001 From: Roman Date: Wed, 8 May 2024 01:13:18 +0000 Subject: [PATCH 04/19] clean up --- tests/data_service.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/data_service.py b/tests/data_service.py index 1490429cf..685926c39 100644 --- a/tests/data_service.py +++ b/tests/data_service.py @@ -18,7 +18,7 @@ def fetch_tokens(): return [] def fetch_pools(): - """Fetches all pools by iterating through paginated results and caches the results in memory.""" + """Fetches all pools by iterating through paginated results.""" url = NUMIA_API_URL + POOLS_ENDPOINT all_pools = [] next_offset = 0 @@ -45,7 +45,6 @@ def fetch_pools(): print(f"Error fetching data from Numia: {e}") break - # retunr the pool data return all_pools # Fetch all token data once From c0abf6204d78a729ea0b4241d30df062d71003a5 Mon Sep 17 00:00:00 2001 From: Roman Date: Wed, 8 May 2024 01:16:44 +0000 Subject: [PATCH 05/19] clean up --- tests/setup.py | 57 +++++++++++++++++++++++++++++++++----------------- 1 file changed, 38 insertions(+), 19 deletions(-) diff --git a/tests/setup.py b/tests/setup.py index d4bd1346f..35a68b3f0 100644 --- a/tests/setup.py +++ b/tests/setup.py @@ -48,39 +48,58 @@ def get_e2e_pool_type_from_numia_pool(pool): return e2e_pool_type +def get_denoms_from_pool_tokens(pool_tokens): + """ + Extracts and returns the list of denoms from the `pool_tokens` field, + handling both dictionary (asset0/asset1) and list formats. + """ + denoms = [] + + # Concentrated pool type + if isinstance(pool_tokens, dict): # Dictionary case + denoms.extend([pool_tokens.get('asset0', {}).get('denom'), pool_tokens.get('asset1', {}).get('denom')]) + denoms = [denom for denom in denoms if denom] # Remove None values + + # All other types + elif isinstance(pool_tokens, list): # List case + denoms = [token.get('denom') for token in pool_tokens if 'denom' in token] + + return denoms + + def map_pool_type_to_pool_data(pool_data): - """Returns a dictionary mapping each pool type to associated ID, liquidity and tokens. - Returns {pool_type: [[pool_id, liquidity, [denoms]]]}.""" + """ + Returns a dictionary mapping each pool type to associated ID, liquidity, and tokens. + + Example output: + { + "e2e_pool_type_1": [[pool_id, liquidity, [denoms]]], + "e2e_pool_type_2": [[pool_id, liquidity, [denoms]], ...], + ... + } + """ if not pool_data: return {} - # Create the mapping from pool type to data pool_type_to_data = {} + for pool in pool_data: - # Convert the Numia pool type to an e2e pool type + # Convert Numia pool type to e2e pool type e2e_pool_type = get_e2e_pool_type_from_numia_pool(pool) - denoms = [] - - # Check if `pool_tokens` is a list or dictionary + # Extract denoms using a helper function pool_tokens = pool.get("pool_tokens") - if isinstance(pool_tokens, dict): # Handles the dictionary case (asset0/asset1) - if 'asset0' in pool_tokens: - denoms.append(pool_tokens['asset0'].get('denom')) - if 'asset1' in pool_tokens: - denoms.append(pool_tokens['asset1'].get('denom')) - elif isinstance(pool_tokens, list): # Handles the list case (array of assets) - denoms = [token.get('denom') for token in pool_tokens if 'denom' in token] - - # Get the pool ID and liquidity + denoms = get_denoms_from_pool_tokens(pool_tokens) + + # Extract pool ID and liquidity pool_id = pool.get('pool_id') liquidity = pool.get('liquidity') - # Add or update the set of denoms for this pool type + # Initialize the pool type if not already done if e2e_pool_type not in pool_type_to_data: - pool_type_to_data[e2e_pool_type] = list() + pool_type_to_data[e2e_pool_type] = [] - # Append the pool data to the list of pools for this pool type + # Append the pool data to the list for this pool type pool_type_to_data[e2e_pool_type].append([pool_id, liquidity, denoms]) return pool_type_to_data From c4ac9636177c05a4af65b11375d74989f236249e Mon Sep 17 00:00:00 2001 From: Roman Date: Wed, 8 May 2024 01:21:50 +0000 Subject: [PATCH 06/19] clean up pool types --- tests/setup.py | 71 +++++++++++++++++++++++++++----------------------- 1 file changed, 38 insertions(+), 33 deletions(-) diff --git a/tests/setup.py b/tests/setup.py index 35a68b3f0..99f708ee8 100644 --- a/tests/setup.py +++ b/tests/setup.py @@ -1,52 +1,57 @@ import copy from data_service import all_tokens_data, all_pools_data +from enum import Enum, IntEnum -# Numia pool type constants -NUMIA_POOL_TYPE_BALANCER = 'osmosis.gamm.v1beta1.Pool' -NUMIA_POOL_TYPE_STABLESWAP = 'osmosis.gamm.poolmodels.stableswap.v1beta1.Pool' -NUMIA_POOL_TYPE_CONCENTRATED = 'osmosis.concentratedliquidity.v1beta1.Pool' -NUMIA_POOL_TYPE_COSMWASM = 'osmosis.cosmwasmpool.v1beta1.CosmWasmPool' +# Numia pool type constants using an Enum +class NumiaPoolType(Enum): + BALANCER = 'osmosis.gamm.v1beta1.Pool' + STABLESWAP = 'osmosis.gamm.poolmodels.stableswap.v1beta1.Pool' + CONCENTRATED = 'osmosis.concentratedliquidity.v1beta1.Pool' + COSMWASM = 'osmosis.cosmwasmpool.v1beta1.CosmWasmPool' -# Cosmwasm pool code IDs +# Cosmwasm pool code IDs using standard constants TRANSMUTER_CODE_ID = 148 ASTROPORT_CODE_ID = 773 -# Local e2e pool types -# We define our own pool types for convinience and consistency across e2e tests -E2E_POOL_TYPE_BALANCER = 0 -E2E_POOL_TYPE_STABLESWAP = 1 -E2E_POOL_TYPE_CONCENTRATED = 2 -E2E_POOL_TYPE_COSMWASM_MISC = 3 -E2E_POOL_TYPE_COSMWASM_TRANSMUTER_V1 = 4 -E2E_POOL_TYPE_COSMWASM_ASTROPORT = 5 +# Local e2e pool types using an IntEnum for convenience +class E2EPoolType(IntEnum): + BALANCER = 0 + STABLESWAP = 1 + CONCENTRATED = 2 + COSMWASM_MISC = 3 + COSMWASM_TRANSMUTER_V1 = 4 + COSMWASM_ASTROPORT = 5 + +# Mapping from Numia pool types to e2e pool types +NUMIA_TO_E2E_MAP = { + NumiaPoolType.BALANCER.value: E2EPoolType.BALANCER, + NumiaPoolType.STABLESWAP.value: E2EPoolType.STABLESWAP, + NumiaPoolType.CONCENTRATED.value: E2EPoolType.CONCENTRATED +} # Misc constants UOSMO = "uosmo" def get_e2e_pool_type_from_numia_pool(pool): - """Gets an e2e pool type from Numia pool.""" - + """Gets an e2e pool type from a Numia pool.""" numia_pool_type = pool.get("type") - e2e_pool_type = None - if numia_pool_type == NUMIA_POOL_TYPE_BALANCER: - e2e_pool_type = E2E_POOL_TYPE_BALANCER - elif numia_pool_type == NUMIA_POOL_TYPE_STABLESWAP: - e2e_pool_type = E2E_POOL_TYPE_STABLESWAP - elif numia_pool_type == NUMIA_POOL_TYPE_CONCENTRATED: - e2e_pool_type = E2E_POOL_TYPE_CONCENTRATED - elif numia_pool_type == NUMIA_POOL_TYPE_COSMWASM: + # Direct mapping for common pool types + if numia_pool_type in NUMIA_TO_E2E_MAP: + return NUMIA_TO_E2E_MAP[numia_pool_type] + + # Special handling for CosmWasm pools based on code_id + if numia_pool_type == NumiaPoolType.COSMWASM.value: pool_code_id = int(pool.get('code_id')) if pool_code_id == TRANSMUTER_CODE_ID: - e2e_pool_type = E2E_POOL_TYPE_COSMWASM_TRANSMUTER_V1 + return E2EPoolType.COSMWASM_TRANSMUTER_V1 elif pool_code_id == ASTROPORT_CODE_ID: - e2e_pool_type = E2E_POOL_TYPE_COSMWASM_ASTROPORT + return E2EPoolType.COSMWASM_ASTROPORT else: - e2e_pool_type = E2E_POOL_TYPE_COSMWASM_MISC - else: - raise ValueError(f"Unknown pool type: {numia_pool_type}") - - return e2e_pool_type + return E2EPoolType.COSMWASM_MISC + + # Raise an error for unknown pool types + raise ValueError(f"Unknown pool type: {numia_pool_type}") def get_denoms_from_pool_tokens(pool_tokens): """ @@ -165,12 +170,12 @@ def choose_pool_type_tokens_by_liq_asc(pool_type, num_pairs=1, min_liq=0, max_li def choose_transmuter_pool_tokens_by_liq_asc(num_pairs=1, min_liq=0, max_liq=float('inf'), asc=False): """Function to choose pool ID and tokens associated with a transmuter V1 pool type based on liquidity. Returns [pool ID, [tokens]]""" - return choose_pool_type_tokens_by_liq_asc(E2E_POOL_TYPE_COSMWASM_TRANSMUTER_V1, num_pairs, min_liq, max_liq, asc) + return choose_pool_type_tokens_by_liq_asc(E2EPoolType.COSMWASM_TRANSMUTER_V1, num_pairs, min_liq, max_liq, asc) def choose_pcl_pool_tokens_by_liq_asc(num_pairs=1, min_liq=0, max_liq=float('inf'), asc=False): """Function to choose pool ID and tokens associated with a Astroport PCL pool type based on liquidity. Returns [pool ID, [tokens]]""" - return choose_pool_type_tokens_by_liq_asc(E2E_POOL_TYPE_COSMWASM_ASTROPORT, num_pairs, min_liq, max_liq, asc) + return choose_pool_type_tokens_by_liq_asc(E2EPoolType.COSMWASM_ASTROPORT, num_pairs, min_liq, max_liq, asc) def chain_denom_to_display(chain_denom): """Function to map chain denom to display.""" From 3bfb7300c9952ac17aaf262488676d9ab57b7e48 Mon Sep 17 00:00:00 2001 From: Roman Date: Wed, 8 May 2024 01:23:29 +0000 Subject: [PATCH 07/19] clean up precomputed mappings --- tests/setup.py | 33 ++++++++++++++++++--------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/tests/setup.py b/tests/setup.py index 99f708ee8..18029060d 100644 --- a/tests/setup.py +++ b/tests/setup.py @@ -109,25 +109,28 @@ def map_pool_type_to_pool_data(pool_data): return pool_type_to_data -def create_display_to_data_map(tokens_data): - """Function to map display field to the data of that token.""" +def create_field_to_data_map(tokens_data, key_field): + """Maps a specified key field to the data of that token. + + Args: + tokens_data (list): List of token data dictionaries. + key_field (str): The field to use as the mapping key. - display_map = {} + Returns: + dict: A dictionary mapping the specified key field to the token data. + """ + mapping = {} for token in tokens_data: - display_field = token.get('display') - if display_field: - display_map[display_field] = token - return display_map + key_value = token.get(key_field) + if key_value: + mapping[key_value] = token + return mapping -def create_chain_denom_to_data_map(tokens_data): - """Function to map chain denom to the data of that token.""" +def create_display_to_data_map(tokens_data): + return create_field_to_data_map(tokens_data, 'display') - display_map = {} - for token in tokens_data: - display_field = token.get('denom') - if display_field: - display_map[display_field] = token - return display_map +def create_chain_denom_to_data_map(tokens_data): + return create_field_to_data_map(tokens_data, 'denom') def get_token_data_copy(): """Return deep copy of all tokens.""" From 7ba93c5bc0b4ddf241407104e6d041d8c8c5ad8d Mon Sep 17 00:00:00 2001 From: Roman Date: Wed, 8 May 2024 01:25:34 +0000 Subject: [PATCH 08/19] clean up token choice --- tests/setup.py | 44 +++++++++++++++++++++++++++++++------------- 1 file changed, 31 insertions(+), 13 deletions(-) diff --git a/tests/setup.py b/tests/setup.py index 18029060d..ccadb5f72 100644 --- a/tests/setup.py +++ b/tests/setup.py @@ -137,28 +137,46 @@ def get_token_data_copy(): return copy.deepcopy(all_tokens_data) -def choose_tokens_liq_range(num_tokens=1, min_liq=0, max_liq=float('inf'), asc=False): - """Function to choose tokens based on liquidity.""" - tokens = get_token_data_copy() +def choose_tokens_generic(tokens, filter_key, min_value, max_value, sort_key, num_tokens=1, asc=False): + """ + A generic function to choose tokens based on given criteria. + + Args: + tokens (list): The list of token data dictionaries. + filter_key (str): The field name used to filter tokens. + min_value (float): The minimum value for filtering. + max_value (float): The maximum value for filtering. + sort_key (str): The field name used for sorting tokens. + num_tokens (int): The number of tokens to return. + asc (bool): Whether to sort in ascending order. + + Returns: + list: A list of denoms matching the given criteria. + """ + # Filter tokens based on the specified filter_key range filtered_tokens = [ - t['denom'] for t in tokens if 'liquidity' in t and t['liquidity'] is not None and min_liq <= t['liquidity'] <= max_liq + t['denom'] for t in tokens if filter_key in t and t[filter_key] is not None and min_value <= t[filter_key] <= max_value ] + + # Sort tokens based on the specified sort_key sorted_tokens = sorted( - filtered_tokens, key=lambda x: next(t['liquidity'] for t in tokens if t['denom'] == x), reverse=not asc + filtered_tokens, key=lambda x: next(t[sort_key] for t in tokens if t['denom'] == x), reverse=not asc ) + return sorted_tokens[:num_tokens] + +def choose_tokens_liq_range(num_tokens=1, min_liq=0, max_liq=float('inf'), asc=False): + """Function to choose tokens based on liquidity.""" + tokens = get_token_data_copy() + return choose_tokens_generic(tokens, 'liquidity', min_liq, max_liq, 'liquidity', num_tokens, asc) + + def choose_tokens_volume_range(num_tokens=1, min_vol=0, max_vol=float('inf'), asc=False): """Function to choose tokens based on volume.""" - tokens = get_token_data_copy() - filtered_tokens = [ - t['denom'] for t in tokens if 'volume_24h' in t and t['volume_24h'] is not None and min_vol <= t['volume_24h'] <= max_vol - ] - sorted_tokens = sorted( - filtered_tokens, key=lambda x: next(t['volume_24h'] for t in tokens if t['denom'] == x), reverse=not asc - ) - return sorted_tokens[:num_tokens] + return choose_tokens_generic(tokens, 'volume_24h', min_vol, max_vol, 'volume_24h', num_tokens, asc) + def choose_pool_type_tokens_by_liq_asc(pool_type, num_pairs=1, min_liq=0, max_liq=float('inf'), asc=False): """Function to choose pool ID and tokens associated with a specific pool type based on liquidity. From 066523d75e56bc5de1531daf828ad9f77354fee9 Mon Sep 17 00:00:00 2001 From: Roman Date: Wed, 8 May 2024 01:31:18 +0000 Subject: [PATCH 09/19] liq filtering with pool types and more clean up --- tests/main.py | 2 +- tests/setup.py | 34 ++++++++++++++++++++++++++++------ 2 files changed, 29 insertions(+), 7 deletions(-) diff --git a/tests/main.py b/tests/main.py index 06cc1adc0..b004f8c69 100644 --- a/tests/main.py +++ b/tests/main.py @@ -4,7 +4,7 @@ transmuter_token_pairs = choose_transmuter_pool_tokens_by_liq_asc(1) print("Transmuter ", [chain_denoms_to_display(pool_tokens[1]) for pool_tokens in transmuter_token_pairs]) -astroport_token_pairs = choose_pcl_pool_tokens_by_liq_asc(2) +astroport_token_pairs = choose_pcl_pool_tokens_by_liq_asc(2, 1000, 10000) print("Astroport PCL ", [chain_denoms_to_display(pool_tokens[1]) for pool_tokens in astroport_token_pairs]) print("Top Liquidity Token ", chain_denoms_to_display(choose_tokens_liq_range())) diff --git a/tests/setup.py b/tests/setup.py index ccadb5f72..0c7afa7bc 100644 --- a/tests/setup.py +++ b/tests/setup.py @@ -179,29 +179,51 @@ def choose_tokens_volume_range(num_tokens=1, min_vol=0, max_vol=float('inf'), as def choose_pool_type_tokens_by_liq_asc(pool_type, num_pairs=1, min_liq=0, max_liq=float('inf'), asc=False): - """Function to choose pool ID and tokens associated with a specific pool type based on liquidity. - Returns [pool ID, [tokens]]""" - - pools_tokens_of_type = pool_type_to_denoms.get(pool_type) + """ + Function to choose pool ID and tokens associated with a specific pool type based on liquidity. + + Args: + pool_type (E2EPoolType): The pool type to filter by. + num_pairs (int): The number of pool pairs to return. + min_liq (float): The minimum liquidity value. + max_liq (float): The maximum liquidity value. + asc (bool): Whether to sort in ascending or descending order. + + Returns: + list: [[pool ID, [tokens]], ...] + """ + # Retrieve pools associated with the specified pool type + pools_tokens_of_type = pool_type_to_denoms.get(pool_type, []) + + # Filter pools based on the provided min_liq and max_liq values + filtered_pools = [ + pool for pool in pools_tokens_of_type if min_liq <= pool[1] <= max_liq + ] + + # Sort the filtered pools based on liquidity + sorted_pools = sorted(filtered_pools, key=lambda x: x[1], reverse=not asc) - sorted_pools = sorted(pools_tokens_of_type, key=lambda x: x[1], reverse=not asc) + # Extract only the required number of pairs + return [[pool_data[0], pool_data[2]] for pool_data in sorted_pools[:num_pairs]] - return [[pool_data[1], pool_data[2]] for pool_data in sorted_pools[:num_pairs]] def choose_transmuter_pool_tokens_by_liq_asc(num_pairs=1, min_liq=0, max_liq=float('inf'), asc=False): """Function to choose pool ID and tokens associated with a transmuter V1 pool type based on liquidity. Returns [pool ID, [tokens]]""" return choose_pool_type_tokens_by_liq_asc(E2EPoolType.COSMWASM_TRANSMUTER_V1, num_pairs, min_liq, max_liq, asc) + def choose_pcl_pool_tokens_by_liq_asc(num_pairs=1, min_liq=0, max_liq=float('inf'), asc=False): """Function to choose pool ID and tokens associated with a Astroport PCL pool type based on liquidity. Returns [pool ID, [tokens]]""" return choose_pool_type_tokens_by_liq_asc(E2EPoolType.COSMWASM_ASTROPORT, num_pairs, min_liq, max_liq, asc) + def chain_denom_to_display(chain_denom): """Function to map chain denom to display.""" return chain_denom_to_data_map.get(chain_denom, {}).get('display', chain_denom) + def chain_denoms_to_display(chain_denoms): """Function to map chain denoms to display.""" return [chain_denom_to_display(denom) for denom in chain_denoms] From b159f475406821c764c86cc9a565db0fb3dac870 Mon Sep 17 00:00:00 2001 From: Roman Date: Wed, 8 May 2024 01:33:26 +0000 Subject: [PATCH 10/19] space --- tests/setup.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/setup.py b/tests/setup.py index 0c7afa7bc..02aaaae5f 100644 --- a/tests/setup.py +++ b/tests/setup.py @@ -32,6 +32,7 @@ class E2EPoolType(IntEnum): # Misc constants UOSMO = "uosmo" + def get_e2e_pool_type_from_numia_pool(pool): """Gets an e2e pool type from a Numia pool.""" numia_pool_type = pool.get("type") @@ -53,6 +54,7 @@ def get_e2e_pool_type_from_numia_pool(pool): # Raise an error for unknown pool types raise ValueError(f"Unknown pool type: {numia_pool_type}") + def get_denoms_from_pool_tokens(pool_tokens): """ Extracts and returns the list of denoms from the `pool_tokens` field, @@ -109,6 +111,7 @@ def map_pool_type_to_pool_data(pool_data): return pool_type_to_data + def create_field_to_data_map(tokens_data, key_field): """Maps a specified key field to the data of that token. @@ -126,12 +129,15 @@ def create_field_to_data_map(tokens_data, key_field): mapping[key_value] = token return mapping + def create_display_to_data_map(tokens_data): return create_field_to_data_map(tokens_data, 'display') + def create_chain_denom_to_data_map(tokens_data): return create_field_to_data_map(tokens_data, 'denom') + def get_token_data_copy(): """Return deep copy of all tokens.""" return copy.deepcopy(all_tokens_data) From 88bf9aa1a9c603ac7658c0fb0bb6e115e9699eaf Mon Sep 17 00:00:00 2001 From: Roman Date: Wed, 8 May 2024 18:41:54 +0000 Subject: [PATCH 11/19] e2e: test for /tokens/metadata and router/routes --- .gitignore | 2 + Makefile | 24 +++++++++ tests/README.md | 26 +++++++++ tests/conftest.py | 11 ++++ tests/constants.py | 5 ++ tests/requirements.txt | 11 ++++ tests/setup.py | 8 +-- tests/sqs_service.py | 42 +++++++++++++++ tests/test_candidate_routes.py | 99 ++++++++++++++++++++++++++++++++++ tests/test_tokens_metadata.py | 15 ++++++ 10 files changed, 239 insertions(+), 4 deletions(-) create mode 100644 tests/README.md create mode 100644 tests/conftest.py create mode 100644 tests/constants.py create mode 100644 tests/requirements.txt create mode 100644 tests/sqs_service.py create mode 100644 tests/test_candidate_routes.py create mode 100644 tests/test_tokens_metadata.py diff --git a/.gitignore b/.gitignore index 24b75702d..11db36194 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,8 @@ build go.work.sum # Tests reports +*/__pycache__ +tests/venv/* tests/*/report.txt diff --git a/Makefile b/Makefile index bb46db19e..043d24b93 100644 --- a/Makefile +++ b/Makefile @@ -177,3 +177,27 @@ test-prices-mainnet: -timeout 300s \ -run TestPricingWorkerTestSuite/TestGetPrices_Chain_FindUnsupportedTokens \ github.com/osmosis-labs/sqs/tokens/usecase/pricing/worker -v -count=1 + +### E2E Test + +# Run E2E tests in verbose mode (-s) +e2e-run-dev: + pytest -s + +#### E2E Python Setup + +# Setup virtual environment for e2e tests +e2e-setup-venv: + python3 -m venv tests/venv + +# Activate virtual environment for e2e tests +e2e-source-venv: + source tests/venv/bin/activate + +# +e2e-install-requirements: + pip install -r tests/requirements.txt + +# Persist any new dependencies to requirements.txt +e2e-update-requirements: + pip freeze > tests/requirements.txt diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 000000000..13e40ea84 --- /dev/null +++ b/tests/README.md @@ -0,0 +1,26 @@ +# E2E Test + +## Setup + +```bash +make e2e-setup-venv + +make e2e-source-venv + +make e2e-install-requirements +``` + +## Running + +```bash +# Runs the testsuite in verbose mode +make e2e-run-dev +``` + +## Persisting New Dependencies + +In Python, we need to persist any new dependencies to the `requirements.txt` file. To do this, run the following command: + +```bash +make e2e-update-requirements +``` \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 000000000..522ebdde0 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,11 @@ +import pytest +from sqs_service import * + +# Define the environment URLs +# All tests will be run against these URLs +@pytest.fixture(params=[ + SQS_STAGE, +]) + +def environment_url(request): + return request.param \ No newline at end of file diff --git a/tests/constants.py b/tests/constants.py new file mode 100644 index 000000000..584f82d2f --- /dev/null +++ b/tests/constants.py @@ -0,0 +1,5 @@ +import setup + +# Misc constants +UOSMO = "uosmo" +USDC = setup.display_to_data_map["usdc"]["denom"] diff --git a/tests/requirements.txt b/tests/requirements.txt new file mode 100644 index 000000000..df3c49320 --- /dev/null +++ b/tests/requirements.txt @@ -0,0 +1,11 @@ +certifi==2024.2.2 +charset-normalizer==3.3.2 +exceptiongroup==1.2.1 +idna==3.7 +iniconfig==2.0.0 +packaging==24.0 +pluggy==1.5.0 +pytest==8.2.0 +requests==2.31.0 +tomli==2.0.1 +urllib3==2.2.1 diff --git a/tests/setup.py b/tests/setup.py index 02aaaae5f..9ef0289d9 100644 --- a/tests/setup.py +++ b/tests/setup.py @@ -29,9 +29,6 @@ class E2EPoolType(IntEnum): NumiaPoolType.CONCENTRATED.value: E2EPoolType.CONCENTRATED } -# Misc constants -UOSMO = "uosmo" - def get_e2e_pool_type_from_numia_pool(pool): """Gets an e2e pool type from a Numia pool.""" @@ -149,7 +146,7 @@ def choose_tokens_generic(tokens, filter_key, min_value, max_value, sort_key, nu Args: tokens (list): The list of token data dictionaries. - filter_key (str): The field name used to filter tokens. + filter_key (str): The field name sed to filter tokens. min_value (float): The minimum value for filtering. max_value (float): The maximum value for filtering. sort_key (str): The field name used for sorting tokens. @@ -242,3 +239,6 @@ def chain_denoms_to_display(chain_denoms): # Create a map of pool type to pool data pool_type_to_denoms = map_pool_type_to_pool_data(all_pools_data) + +# Create a map of pool ID to pool data +pool_by_id_map = {pool.get('pool_id'): pool for pool in all_pools_data} diff --git a/tests/sqs_service.py b/tests/sqs_service.py new file mode 100644 index 000000000..7f6127dc4 --- /dev/null +++ b/tests/sqs_service.py @@ -0,0 +1,42 @@ +import requests + +SQS_STAGE = "https://sqs.stage.osmosis.zone" + +ROUTER_ROUTES_URL = "/router/routes" + +TOKENS_METADATA_URL = "/tokens/metadata" + +class SQSService: + def __init__(self, url): + self.url = url + self.tokens_metadata = None + + def get_candidate_routes(self, denom_in, denom_out, human_denoms="false"): + # Set the query parameters + params = { + "tokenIn": denom_in, + "tokenOutDenom": denom_out, + "humanDenoms": human_denoms + } + + # Send the GET request + return requests.get(self.url + ROUTER_ROUTES_URL, params=params) + + def get_tokens_metadata(self): + """ + Fetches tokens metadata from the specified endpoint and returns them. + Caches them internally to avoid fetching them multiple times. + + Raises error if non-200 is returned from the endpoint. + """ + if self.tokens_metadata: + return self.tokens_metadata + + response = requests.get(self.url + TOKENS_METADATA_URL) + + if response.status_code != 200: + raise Exception(f"Error fetching tokens metadata: {response.text}") + + self.tokens_metadata = response.json() + + return self.tokens_metadata \ No newline at end of file diff --git a/tests/test_candidate_routes.py b/tests/test_candidate_routes.py new file mode 100644 index 000000000..da22c5fd2 --- /dev/null +++ b/tests/test_candidate_routes.py @@ -0,0 +1,99 @@ +import requests +import setup +import time + +from sqs_service import * +import constants + +SQS_STAGE = "https://sqs.stage.osmosis.zone" + +ROUTES_URL = "/router/routes" + +# Test suite for the /router/routes endpoint +class TestCandidateRoutes: + # Sanity check to ensure the test setup is correct + # before continunig with more complex test cases. + def test_usdc_uosmo(self, environment_url): + # Defined by the config.json. + # TODO: read max-routes config from /config endpoint to get this value + expected_num_routes = 20 + expected_latency_upper_bound_ms = 300 + + self.run_candidate_routes_test(environment_url, constants.USDC, constants.UOSMO, expected_latency_upper_bound_ms, expected_min_routes=expected_num_routes, expected_max_routes=expected_num_routes) + + # Switch token in and out denoms compared to test_usdc_uosmo + def test_uosmo_usdc(self, environment_url): + # Defined by the config.json. + # TODO: read max-routes config from /config endpoint to get this value + expected_num_routes = 20 + expected_latency_upper_bound_ms = 300 + + self.run_candidate_routes_test(environment_url, constants.UOSMO, constants.USDC, expected_latency_upper_bound_ms, expected_min_routes=expected_num_routes, expected_max_routes=expected_num_routes) + + def run_candidate_routes_test(self, environment_url, token_in, token_out, expected_latency_upper_bound_ms, expected_min_routes, expected_max_routes): + """ + Runs a test for the /router/routes endpoint with the given input parameters. + + Returns routes for additional validation if needed by client + + Validates: + - The number of routes returned + - Following pools in each route, all tokens within these pools are present and valid + - The latency is under the given bound + """ + + sqs_service = SQSService(environment_url) + + start_time = time.time() + response = sqs_service.get_candidate_routes(token_in, token_out) + elapsed_time_ms = (time.time() - start_time) * 1000 + + assert response.status_code == 200, f"Error: {response.text}" + assert expected_latency_upper_bound_ms > elapsed_time_ms, f"Error: latency {elapsed_time_ms} exceeded {expected_latency_upper_bound_ms} ms, token in {token_in} and token out {token_out}" + + response_json = response.json() + routes = response_json['Routes'] + + self.validate_candidate_routes(routes, token_in, token_out, expected_min_routes, expected_max_routes) + + # Return routes in case additional validation is desired + return routes + + def validate_candidate_routes(self, routes, token_in, token_out, expected_min_routes, expected_max_routes): + """ + Validates the given routes. + + Validates: + - Following pools in each route, all tokens within these pools are present and valid + - The number of routes is within the expected range + """ + assert len(routes) <= expected_max_routes, f"Error: found more than {expected_max_routes} routes with token in {token_in} and token out {token_out}" + assert len(routes) >= expected_min_routes, f"Error: found fewer than {expected_min_routes} routes with token in {token_in} and token out {token_out}" + + for route in routes: + cur_token_in = token_in + + pools = route['Pools'] + + assert len(pools) > 0, f"Error: no pools found in route {route}" + for pool in pools: + pool_id = pool['ID'] + + expected_pool_data = setup.pool_by_id_map.get(pool_id) + + assert expected_pool_data, f"Error: pool ID {pool_id} not found in test data" + + # Extract denoms using a helper function + pool_tokens = expected_pool_data.get("pool_tokens") + denoms = setup.get_denoms_from_pool_tokens(pool_tokens) + + found_denom = cur_token_in in denoms + + assert found_denom, f"Error: token in {cur_token_in} not found in pool denoms {denoms}" + + cur_token_out = pool['TokenOutDenom'] + + cur_token_in = cur_token_out + + # Last token in must be equal to token out + assert cur_token_in == token_out, f"Error: last token out {cur_token_in} not equal to token out {token_out}" diff --git a/tests/test_tokens_metadata.py b/tests/test_tokens_metadata.py new file mode 100644 index 000000000..00c6aff25 --- /dev/null +++ b/tests/test_tokens_metadata.py @@ -0,0 +1,15 @@ +from sqs_service import * +import pytest + +# Minimum number of supported tokens expected +# It should grow as we list more assets +EXPECTED_MIN_NUM_TOKENS = 250 + +# Tests the /tokens/metadata endpoint +class TestTokensMetadata: + def test_token_metadata_count_above_min(self, environment_url): + sqs_service = SQSService(environment_url) + + tokens_metadata = sqs_service.get_tokens_metadata() + + assert len(tokens_metadata) > EXPECTED_MIN_NUM_TOKENS, f"Token metadata count was {len(tokens_metadata)} - expected at least {EXPECTED_MIN_NUM_TOKENS}" \ No newline at end of file From f1716af39e5462dff7389f3bfe79c7ebccd9109e Mon Sep 17 00:00:00 2001 From: Roman Date: Thu, 9 May 2024 13:51:50 +0000 Subject: [PATCH 12/19] fetch expected number of routes from config --- tests/sqs_service.py | 22 ++++++++++++++++++++++ tests/test_candidate_routes.py | 16 ++++++++++------ 2 files changed, 32 insertions(+), 6 deletions(-) diff --git a/tests/sqs_service.py b/tests/sqs_service.py index 7f6127dc4..6ba37efde 100644 --- a/tests/sqs_service.py +++ b/tests/sqs_service.py @@ -6,10 +6,32 @@ TOKENS_METADATA_URL = "/tokens/metadata" +CONFIG_URL = "/config" + class SQSService: def __init__(self, url): self.url = url self.tokens_metadata = None + self.config = None + + def get_config(self): + """ + Fetches the config from the specified endpoint and returns it. + Caches it internally to avoid fetching it multiple times. + + Raises error if non-200 is returned from the endpoint. + """ + if self.config: + return self.config + + response = requests.get(self.url + CONFIG_URL) + + if response.status_code != 200: + raise Exception(f"Error fetching config: {response.text}") + + self.config = response.json() + + return self.config def get_candidate_routes(self, denom_in, denom_out, human_denoms="false"): # Set the query parameters diff --git a/tests/test_candidate_routes.py b/tests/test_candidate_routes.py index da22c5fd2..734bf083c 100644 --- a/tests/test_candidate_routes.py +++ b/tests/test_candidate_routes.py @@ -14,18 +14,22 @@ class TestCandidateRoutes: # Sanity check to ensure the test setup is correct # before continunig with more complex test cases. def test_usdc_uosmo(self, environment_url): - # Defined by the config.json. - # TODO: read max-routes config from /config endpoint to get this value - expected_num_routes = 20 + # Get max routes value from deployment config to expect the same number of candidate routes + # to be found + config = SQSService(environment_url).get_config() + expected_num_routes = config['Router']['MaxRoutes'] + # Arbitrary choice based on performance at the time of test writing expected_latency_upper_bound_ms = 300 self.run_candidate_routes_test(environment_url, constants.USDC, constants.UOSMO, expected_latency_upper_bound_ms, expected_min_routes=expected_num_routes, expected_max_routes=expected_num_routes) # Switch token in and out denoms compared to test_usdc_uosmo def test_uosmo_usdc(self, environment_url): - # Defined by the config.json. - # TODO: read max-routes config from /config endpoint to get this value - expected_num_routes = 20 + # Get max routes value from deployment config to expect the same number of candidate routes + # to be found + config = SQSService(environment_url).get_config() + expected_num_routes = config['Router']['MaxRoutes'] + # Arbitrary choice based on performance at the time of test writing expected_latency_upper_bound_ms = 300 self.run_candidate_routes_test(environment_url, constants.UOSMO, constants.USDC, expected_latency_upper_bound_ms, expected_min_routes=expected_num_routes, expected_max_routes=expected_num_routes) From 23be3e41df5d23684a63a0e8f9b049e11698307b Mon Sep 17 00:00:00 2001 From: Roman Date: Thu, 9 May 2024 23:23:51 +0000 Subject: [PATCH 13/19] more candidate route tests --- tests/conftest.py | 8 ++- tests/constants.py | 2 +- tests/requirements.txt | 2 + tests/setup.py | 2 + tests/test_candidate_routes.py | 121 ++++++++++++++++++++++++++++++++- tests/test_tokens_metadata.py | 3 +- 6 files changed, 132 insertions(+), 6 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 522ebdde0..b85f48a31 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,8 @@ import pytest from sqs_service import * +SERVICE_SQS_STAGE = SQSService(SQS_STAGE) + # Define the environment URLs # All tests will be run against these URLs @pytest.fixture(params=[ @@ -8,4 +10,8 @@ ]) def environment_url(request): - return request.param \ No newline at end of file + return request.param + +SERVICE_MAP = { + SQS_STAGE: SERVICE_SQS_STAGE, +} diff --git a/tests/constants.py b/tests/constants.py index 584f82d2f..d3142a795 100644 --- a/tests/constants.py +++ b/tests/constants.py @@ -2,4 +2,4 @@ # Misc constants UOSMO = "uosmo" -USDC = setup.display_to_data_map["usdc"]["denom"] +USDC = 'ibc/498A0751C798A0D9A389AA3691123DADA57DAA4FE165D5C75894505B876BA6E4' diff --git a/tests/requirements.txt b/tests/requirements.txt index df3c49320..4e8d6df3c 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,11 +1,13 @@ certifi==2024.2.2 charset-normalizer==3.3.2 exceptiongroup==1.2.1 +execnet==2.1.1 idna==3.7 iniconfig==2.0.0 packaging==24.0 pluggy==1.5.0 pytest==8.2.0 +pytest-xdist==3.6.1 requests==2.31.0 tomli==2.0.1 urllib3==2.2.1 diff --git a/tests/setup.py b/tests/setup.py index 9ef0289d9..eec459a10 100644 --- a/tests/setup.py +++ b/tests/setup.py @@ -87,6 +87,8 @@ def map_pool_type_to_pool_data(pool_data): pool_type_to_data = {} + denom_to_pool_id = {} + for pool in pool_data: # Convert Numia pool type to e2e pool type e2e_pool_type = get_e2e_pool_type_from_numia_pool(pool) diff --git a/tests/test_candidate_routes.py b/tests/test_candidate_routes.py index 734bf083c..b5f2ee8bf 100644 --- a/tests/test_candidate_routes.py +++ b/tests/test_candidate_routes.py @@ -4,6 +4,7 @@ from sqs_service import * import constants +from conftest import SERVICE_MAP SQS_STAGE = "https://sqs.stage.osmosis.zone" @@ -14,9 +15,11 @@ class TestCandidateRoutes: # Sanity check to ensure the test setup is correct # before continunig with more complex test cases. def test_usdc_uosmo(self, environment_url): + sqs_service = SERVICE_MAP[environment_url] + # Get max routes value from deployment config to expect the same number of candidate routes # to be found - config = SQSService(environment_url).get_config() + config = sqs_service.get_config() expected_num_routes = config['Router']['MaxRoutes'] # Arbitrary choice based on performance at the time of test writing expected_latency_upper_bound_ms = 300 @@ -25,15 +28,72 @@ def test_usdc_uosmo(self, environment_url): # Switch token in and out denoms compared to test_usdc_uosmo def test_uosmo_usdc(self, environment_url): + sqs_service = SERVICE_MAP[environment_url] + # Get max routes value from deployment config to expect the same number of candidate routes # to be found - config = SQSService(environment_url).get_config() + config = sqs_service.get_config() expected_num_routes = config['Router']['MaxRoutes'] # Arbitrary choice based on performance at the time of test writing expected_latency_upper_bound_ms = 300 self.run_candidate_routes_test(environment_url, constants.UOSMO, constants.USDC, expected_latency_upper_bound_ms, expected_min_routes=expected_num_routes, expected_max_routes=expected_num_routes) + def test_all_tokens_usdc(self, environment_url): + sqs_service = SERVICE_MAP[environment_url] + + config = sqs_service.get_config() + expected_num_routes = config['Router']['MaxRoutes'] + # Arbitrary choice based on performance at the time of test writing + expected_latency_upper_bound_ms = 500 + + tokens_metadata = sqs_service.get_tokens_metadata() + + assert len(tokens_metadata) > 0, "Error: no tokens metadata found" + for denom, metadata in tokens_metadata.items(): + + # Skip unlisted tokens as they should be unsupported + if metadata['is_unlisted']: + continue + + # Set of denoms that we can skip + skip_denoms = { + # USDC.pica + 'ibc/078AD6F581E8115CDFBD8FFA29D8C71AFE250CE952AFF80040CBC64868D44AD3', + # NIM network + 'ibc/279D69A6EF8E37456C8D2DC7A7C1C50F7A566EC4758F6DE17472A9FDE36C4426', + # DAI.pica + 'ibc/37DFAFDA529FF7D513B0DB23E9728DF9BF73122D38D46824C78BB7F91E6A736B', + # Furya + 'ibc/42D0FBF9DDC72D7359D309A93A6DF9F6FDEE3987EA1C5B3CDE95C06FCE183F12', + # frxETH.pica + 'ibc/688E70EF567E5D4BA1CF4C54BAD758C288BC1A6C8B0B12979F911A2AE95E27EC', + # MuseDAO + 'ibc/6B982170CE024689E8DD0E7555B129B488005130D4EDA426733D552D10B36D8F', + # FURY.legacy + 'ibc/7CE5F388D661D82A0774E47B5129DA51CC7129BD1A70B5FA6BCEBB5B0A2FAEAF', + # FRAX.pica, + 'ibc/9A8CBC029002DC5170E715F93FBF35011FFC9796371F59B1F3C3094AE1B453A9', + # ASTRO + 'ibc/B8C608CEE08C4F30A15A7955306F2EDAF4A02BB191CABC4185C1A57FD978DA1B', + # ASTRO.cw20 + 'ibc/C25A2303FE24B922DAFFDCE377AC5A42E5EF746806D32E2ED4B610DE85C203F7', + # bJUNO + "ibc/C2DF5C3949CA835B221C575625991F09BAB4E48FB9C11A4EE357194F736111E3", + # BSKT + 'ibc/CDD1E59BD5034C1B2597DD199782204EB397DB93200AA2E99C0AF3A66B2915FA', + # USDC = No routes with itself + constants.USDC, + # Kava SWAP - pool 1631 TODO: understand why no route + 'ibc/D6C28E07F7343360AC41E15DDD44D79701DDCA2E0C2C41279739C8D4AE5264BC', + } + + if denom in skip_denoms: + continue + + self.run_candidate_routes_test(environment_url, denom, constants.USDC, expected_latency_upper_bound_ms, expected_min_routes=1, expected_max_routes=expected_num_routes) + + def run_candidate_routes_test(self, environment_url, token_in, token_out, expected_latency_upper_bound_ms, expected_min_routes, expected_max_routes): """ Runs a test for the /router/routes endpoint with the given input parameters. @@ -46,7 +106,7 @@ def run_candidate_routes_test(self, environment_url, token_in, token_out, expect - The latency is under the given bound """ - sqs_service = SQSService(environment_url) + sqs_service = SERVICE_MAP[environment_url] start_time = time.time() response = sqs_service.get_candidate_routes(token_in, token_out) @@ -71,6 +131,8 @@ def validate_candidate_routes(self, routes, token_in, token_out, expected_min_ro - Following pools in each route, all tokens within these pools are present and valid - The number of routes is within the expected range """ + + assert routes is not None, f"Error: no routes found for token in {token_in} and token out {token_out}" assert len(routes) <= expected_max_routes, f"Error: found more than {expected_max_routes} routes with token in {token_in} and token out {token_out}" assert len(routes) >= expected_min_routes, f"Error: found fewer than {expected_min_routes} routes with token in {token_in} and token out {token_out}" @@ -101,3 +163,56 @@ def validate_candidate_routes(self, routes, token_in, token_out, expected_min_ro # Last token in must be equal to token out assert cur_token_in == token_out, f"Error: last token out {cur_token_in} not equal to token out {token_out}" + + +def generate_all_token_usdc_tests(environment_url): + sqs_service = SERVICE_MAP[environment_url] + + config = sqs_service.get_config() + expected_num_routes = config['Router']['MaxRoutes'] + # Arbitrary choice based on performance at the time of test writing + expected_latency_upper_bound_ms = 500 + + tokens_metadata = sqs_service.get_tokens_metadata() + + assert len(tokens_metadata) > 0, "Error: no tokens metadata found" + for denom, metadata in tokens_metadata.items(): + + # Skip unlisted tokens as they should be unsupported + if metadata['is_unlisted']: + continue + + # Set of denoms that we can skip + skip_denoms = { + # USDC.pica + 'ibc/078AD6F581E8115CDFBD8FFA29D8C71AFE250CE952AFF80040CBC64868D44AD3', + # NIM network + 'ibc/279D69A6EF8E37456C8D2DC7A7C1C50F7A566EC4758F6DE17472A9FDE36C4426', + # DAI.pica + 'ibc/37DFAFDA529FF7D513B0DB23E9728DF9BF73122D38D46824C78BB7F91E6A736B', + # Furya + 'ibc/42D0FBF9DDC72D7359D309A93A6DF9F6FDEE3987EA1C5B3CDE95C06FCE183F12', + # frxETH.pica + 'ibc/688E70EF567E5D4BA1CF4C54BAD758C288BC1A6C8B0B12979F911A2AE95E27EC', + # MuseDAO + 'ibc/6B982170CE024689E8DD0E7555B129B488005130D4EDA426733D552D10B36D8F', + # FURY.legacy + 'ibc/7CE5F388D661D82A0774E47B5129DA51CC7129BD1A70B5FA6BCEBB5B0A2FAEAF', + # FRAX.pica, + 'ibc/9A8CBC029002DC5170E715F93FBF35011FFC9796371F59B1F3C3094AE1B453A9', + # ASTRO + 'ibc/B8C608CEE08C4F30A15A7955306F2EDAF4A02BB191CABC4185C1A57FD978DA1B', + # ASTRO.cw20 + 'ibc/C25A2303FE24B922DAFFDCE377AC5A42E5EF746806D32E2ED4B610DE85C203F7', + # bJUNO + "ibc/C2DF5C3949CA835B221C575625991F09BAB4E48FB9C11A4EE357194F736111E3", + # BSKT + 'ibc/CDD1E59BD5034C1B2597DD199782204EB397DB93200AA2E99C0AF3A66B2915FA', + # USDC = No routes with itself + constants.USDC, + # Kava SWAP - pool 1631 TODO: understand why no route + 'ibc/D6C28E07F7343360AC41E15DDD44D79701DDCA2E0C2C41279739C8D4AE5264BC', + } + + if denom in skip_denoms: + continue \ No newline at end of file diff --git a/tests/test_tokens_metadata.py b/tests/test_tokens_metadata.py index 00c6aff25..bccfda887 100644 --- a/tests/test_tokens_metadata.py +++ b/tests/test_tokens_metadata.py @@ -1,5 +1,6 @@ from sqs_service import * import pytest +from conftest import SERVICE_MAP # Minimum number of supported tokens expected # It should grow as we list more assets @@ -8,7 +9,7 @@ # Tests the /tokens/metadata endpoint class TestTokensMetadata: def test_token_metadata_count_above_min(self, environment_url): - sqs_service = SQSService(environment_url) + sqs_service = SERVICE_MAP[environment_url] tokens_metadata = sqs_service.get_tokens_metadata() From 22dd6ad12756bdaa3b4bf4f96c74bb5fccf8df9c Mon Sep 17 00:00:00 2001 From: Roman Date: Fri, 10 May 2024 20:18:34 +0000 Subject: [PATCH 14/19] Candidate routes test between every token and USDC --- Makefile | 4 +- tests/conftest.py | 2 + tests/setup.py | 88 +++++++++++++++++++++++-- tests/sqs_service.py | 1 + tests/test_candidate_routes.py | 116 ++++----------------------------- 5 files changed, 99 insertions(+), 112 deletions(-) diff --git a/Makefile b/Makefile index 043d24b93..7092dfb77 100644 --- a/Makefile +++ b/Makefile @@ -180,9 +180,9 @@ test-prices-mainnet: ### E2E Test -# Run E2E tests in verbose mode (-s) +# Run E2E tests in verbose mode (-s) -n 4 concurrent workers e2e-run-dev: - pytest -s + pytest -s -n 4 #### E2E Python Setup diff --git a/tests/conftest.py b/tests/conftest.py index b85f48a31..8a07bca42 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,6 +2,7 @@ from sqs_service import * SERVICE_SQS_STAGE = SQSService(SQS_STAGE) +SERVICE_SQS_PROD = SQSService(SQS_PROD) # Define the environment URLs # All tests will be run against these URLs @@ -14,4 +15,5 @@ def environment_url(request): SERVICE_MAP = { SQS_STAGE: SERVICE_SQS_STAGE, + SQS_PROD: SERVICE_SQS_PROD } diff --git a/tests/setup.py b/tests/setup.py index eec459a10..01d239a9b 100644 --- a/tests/setup.py +++ b/tests/setup.py @@ -1,5 +1,7 @@ import copy +import constants from data_service import all_tokens_data, all_pools_data +from conftest import SERVICE_SQS_PROD from enum import Enum, IntEnum # Numia pool type constants using an Enum @@ -29,6 +31,10 @@ class E2EPoolType(IntEnum): NumiaPoolType.CONCENTRATED.value: E2EPoolType.CONCENTRATED } +# This is the number of tokens we allow being skipped due to being unlisted +# or not having liquidity. This number is hand-picked arbitrarily. We have around ~350 tokens +# at the time of writing this test and we leave a small buffer. +ALLOWED_NUM_TOKENS_SKIPPED = 50 def get_e2e_pool_type_from_numia_pool(pool): """Gets an e2e pool type from a Numia pool.""" @@ -71,9 +77,13 @@ def get_denoms_from_pool_tokens(pool_tokens): return denoms -def map_pool_type_to_pool_data(pool_data): +def create_pool_data_maps(pool_data): """ - Returns a dictionary mapping each pool type to associated ID, liquidity, and tokens. + Creates and returns pool data dictionaries for testing. + + Returns two dictionaries: + + 1. A dictionary mapping each pool type to associated ID, liquidity, and tokens. Example output: { @@ -81,13 +91,21 @@ def map_pool_type_to_pool_data(pool_data): "e2e_pool_type_2": [[pool_id, liquidity, [denoms]], ...], ... } + + 2. A dictionary mapping each denom to the pool with highest liquidity + + Example output: + { + "uosmo": { "pool_liquidity": 1000000, "pool_id": 1 }, + ... + } """ if not pool_data: return {} pool_type_to_data = {} - denom_to_pool_id = {} + denom_top_liquidity_pool_map = {} for pool in pool_data: # Convert Numia pool type to e2e pool type @@ -108,7 +126,19 @@ def map_pool_type_to_pool_data(pool_data): # Append the pool data to the list for this pool type pool_type_to_data[e2e_pool_type].append([pool_id, liquidity, denoms]) - return pool_type_to_data + for denom in denoms: + denom_pool_data = denom_top_liquidity_pool_map.get(denom) + + if denom_pool_data: + # Update the pool ID if the liquidity is higher + if liquidity > denom_pool_data['pool_liquidity']: + denom_pool_data['pool_liquidity'] = liquidity + denom_pool_data['pool_id'] = pool_id + else: + # Create first mapping for this denom + denom_top_liquidity_pool_map[denom] = {'pool_liquidity': liquidity, 'pool_id': pool_id} + + return pool_type_to_data, denom_top_liquidity_pool_map def create_field_to_data_map(tokens_data, key_field): @@ -223,6 +253,47 @@ def choose_pcl_pool_tokens_by_liq_asc(num_pairs=1, min_liq=0, max_liq=float('inf Returns [pool ID, [tokens]]""" return choose_pool_type_tokens_by_liq_asc(E2EPoolType.COSMWASM_ASTROPORT, num_pairs, min_liq, max_liq, asc) +def choose_valid_listed_tokens(): + """ + Returns all listed tokens from the asset list that have at least one pool with liquidity. + + Queries production SQS for the tokens asset list metadata. + + Queries Numia for the pool liquidity data. + """ + + # We rely on SQS itself for getting the tokens metadata for configuring tests. + # While it is not the best practice, we make an exception since this is the most reliable way to get + # The asset list data. In the future, we can implement custom test parsing to replace relying on SQS + # in test setup. + tokens_metadata = SERVICE_SQS_PROD.get_tokens_metadata() + + if len(tokens_metadata) == 0: + raise ValueError("Error: no tokens metadata retrieved from SQS during tokens setup") + + valid_listed_tokens = [] + + for denom, metadata in tokens_metadata.items(): + # Skip unlisted tokens as they should be unsupported + # in SQS. + if metadata['is_unlisted']: + [print(f"Denom {denom} is unlisted")] + continue + + # Skip tokens with no pools with liquidity as we cannot find routes in-between them. + top_liquidity_pool = denom_top_liquidity_pool_map.get(denom) + if top_liquidity_pool is None: + print(f"Denom {denom} has no pool with liquidity") + continue + + valid_listed_tokens.append(denom) + + skipped_token_count = len(tokens_metadata) - len(valid_listed_tokens) + if skipped_token_count > ALLOWED_NUM_TOKENS_SKIPPED: + raise ValueError(f"Too many tokens {skipped_token_count} from the metadata were untested, allowed {ALLOWED_NUM_TOKENS_SKIPPED} tokens to be skipped") + + return valid_listed_tokens + def chain_denom_to_display(chain_denom): """Function to map chain denom to display.""" @@ -239,8 +310,13 @@ def chain_denoms_to_display(chain_denoms): # Create a map of chain denom to token data chain_denom_to_data_map = create_chain_denom_to_data_map(all_tokens_data) -# Create a map of pool type to pool data -pool_type_to_denoms = map_pool_type_to_pool_data(all_pools_data) +# Create two maps: +# 1. A map of pool type to pool data +# 2. A map of denom to top liquidity pool +pool_type_to_denoms, denom_top_liquidity_pool_map = create_pool_data_maps(all_pools_data) # Create a map of pool ID to pool data pool_by_id_map = {pool.get('pool_id'): pool for pool in all_pools_data} + +# Listed tokens that have at least one pool with liquidity +valid_listed_tokens = choose_valid_listed_tokens() diff --git a/tests/sqs_service.py b/tests/sqs_service.py index 6ba37efde..c7b42c592 100644 --- a/tests/sqs_service.py +++ b/tests/sqs_service.py @@ -1,6 +1,7 @@ import requests SQS_STAGE = "https://sqs.stage.osmosis.zone" +SQS_PROD = "https://sqs.osmosis.zone" ROUTER_ROUTES_URL = "/router/routes" diff --git a/tests/test_candidate_routes.py b/tests/test_candidate_routes.py index f3c5fac35..b8ec17bc0 100644 --- a/tests/test_candidate_routes.py +++ b/tests/test_candidate_routes.py @@ -1,6 +1,6 @@ -import requests import setup import time +import pytest from sqs_service import * import constants @@ -22,7 +22,7 @@ def test_usdc_uosmo(self, environment_url): config = sqs_service.get_config() expected_num_routes = config['Router']['MaxRoutes'] # Arbitrary choice based on performance at the time of test writing - expected_latency_upper_bound_ms = 300 + expected_latency_upper_bound_ms = 1000 self.run_candidate_routes_test(environment_url, constants.USDC, constants.UOSMO, expected_latency_upper_bound_ms, expected_min_routes=expected_num_routes, expected_max_routes=expected_num_routes) @@ -35,64 +35,21 @@ def test_uosmo_usdc(self, environment_url): config = sqs_service.get_config() expected_num_routes = config['Router']['MaxRoutes'] # Arbitrary choice based on performance at the time of test writing - expected_latency_upper_bound_ms = 300 + expected_latency_upper_bound_ms = 1000 self.run_candidate_routes_test(environment_url, constants.UOSMO, constants.USDC, expected_latency_upper_bound_ms, expected_min_routes=expected_num_routes, expected_max_routes=expected_num_routes) - def test_all_tokens_usdc(self, environment_url): + # Test function with dynamic parameterization + @pytest.mark.parametrize("denom", setup.valid_listed_tokens) + def test_all_valid_tokens(self, environment_url, denom): sqs_service = SERVICE_MAP[environment_url] config = sqs_service.get_config() expected_num_routes = config['Router']['MaxRoutes'] # Arbitrary choice based on performance at the time of test writing - expected_latency_upper_bound_ms = 500 - - tokens_metadata = sqs_service.get_tokens_metadata() - - assert len(tokens_metadata) > 0, "Error: no tokens metadata found" - for denom, metadata in tokens_metadata.items(): - - # Skip unlisted tokens as they should be unsupported - if metadata['is_unlisted']: - continue - - # Set of denoms that we can skip - skip_denoms = { - # USDC.pica - 'ibc/078AD6F581E8115CDFBD8FFA29D8C71AFE250CE952AFF80040CBC64868D44AD3', - # NIM network - 'ibc/279D69A6EF8E37456C8D2DC7A7C1C50F7A566EC4758F6DE17472A9FDE36C4426', - # DAI.pica - 'ibc/37DFAFDA529FF7D513B0DB23E9728DF9BF73122D38D46824C78BB7F91E6A736B', - # Furya - 'ibc/42D0FBF9DDC72D7359D309A93A6DF9F6FDEE3987EA1C5B3CDE95C06FCE183F12', - # frxETH.pica - 'ibc/688E70EF567E5D4BA1CF4C54BAD758C288BC1A6C8B0B12979F911A2AE95E27EC', - # MuseDAO - 'ibc/6B982170CE024689E8DD0E7555B129B488005130D4EDA426733D552D10B36D8F', - # FURY.legacy - 'ibc/7CE5F388D661D82A0774E47B5129DA51CC7129BD1A70B5FA6BCEBB5B0A2FAEAF', - # FRAX.pica, - 'ibc/9A8CBC029002DC5170E715F93FBF35011FFC9796371F59B1F3C3094AE1B453A9', - # ASTRO - 'ibc/B8C608CEE08C4F30A15A7955306F2EDAF4A02BB191CABC4185C1A57FD978DA1B', - # ASTRO.cw20 - 'ibc/C25A2303FE24B922DAFFDCE377AC5A42E5EF746806D32E2ED4B610DE85C203F7', - # bJUNO - "ibc/C2DF5C3949CA835B221C575625991F09BAB4E48FB9C11A4EE357194F736111E3", - # BSKT - 'ibc/CDD1E59BD5034C1B2597DD199782204EB397DB93200AA2E99C0AF3A66B2915FA', - # USDC = No routes with itself - constants.USDC, - # Kava SWAP - pool 1631 TODO: understand why no route - 'ibc/D6C28E07F7343360AC41E15DDD44D79701DDCA2E0C2C41279739C8D4AE5264BC', - } - - if denom in skip_denoms: - continue - - self.run_candidate_routes_test(environment_url, denom, constants.USDC, expected_latency_upper_bound_ms, expected_min_routes=1, expected_max_routes=expected_num_routes) + expected_latency_upper_bound_ms = 1000 + self.run_candidate_routes_test(environment_url, denom, constants.USDC, expected_latency_upper_bound_ms, expected_min_routes=1, expected_max_routes=expected_num_routes) def run_candidate_routes_test(self, environment_url, token_in, token_out, expected_latency_upper_bound_ms, expected_min_routes, expected_max_routes): """ @@ -132,6 +89,10 @@ def validate_candidate_routes(self, routes, token_in, token_out, expected_min_ro - The number of routes is within the expected range """ + if token_in == token_out: + assert routes is None, f"equal tokens in and out for candidate route must have no route" + return + assert routes is not None, f"Error: no routes found for token in {token_in} and token out {token_out}" assert len(routes) <= expected_max_routes, f"Error: found more than {expected_max_routes} routes with token in {token_in} and token out {token_out}" assert len(routes) >= expected_min_routes, f"Error: found fewer than {expected_min_routes} routes with token in {token_in} and token out {token_out}" @@ -163,56 +124,3 @@ def validate_candidate_routes(self, routes, token_in, token_out, expected_min_ro # Last token in must be equal to token out assert cur_token_in == token_out, f"Error: last token out {cur_token_in} not equal to token out {token_out}" - - -def generate_all_token_usdc_tests(environment_url): - sqs_service = SERVICE_MAP[environment_url] - - config = sqs_service.get_config() - expected_num_routes = config['Router']['MaxRoutes'] - # Arbitrary choice based on performance at the time of test writing - expected_latency_upper_bound_ms = 500 - - tokens_metadata = sqs_service.get_tokens_metadata() - - assert len(tokens_metadata) > 0, "Error: no tokens metadata found" - for denom, metadata in tokens_metadata.items(): - - # Skip unlisted tokens as they should be unsupported - if metadata['is_unlisted']: - continue - - # Set of denoms that we can skip - skip_denoms = { - # USDC.pica - 'ibc/078AD6F581E8115CDFBD8FFA29D8C71AFE250CE952AFF80040CBC64868D44AD3', - # NIM network - 'ibc/279D69A6EF8E37456C8D2DC7A7C1C50F7A566EC4758F6DE17472A9FDE36C4426', - # DAI.pica - 'ibc/37DFAFDA529FF7D513B0DB23E9728DF9BF73122D38D46824C78BB7F91E6A736B', - # Furya - 'ibc/42D0FBF9DDC72D7359D309A93A6DF9F6FDEE3987EA1C5B3CDE95C06FCE183F12', - # frxETH.pica - 'ibc/688E70EF567E5D4BA1CF4C54BAD758C288BC1A6C8B0B12979F911A2AE95E27EC', - # MuseDAO - 'ibc/6B982170CE024689E8DD0E7555B129B488005130D4EDA426733D552D10B36D8F', - # FURY.legacy - 'ibc/7CE5F388D661D82A0774E47B5129DA51CC7129BD1A70B5FA6BCEBB5B0A2FAEAF', - # FRAX.pica, - 'ibc/9A8CBC029002DC5170E715F93FBF35011FFC9796371F59B1F3C3094AE1B453A9', - # ASTRO - 'ibc/B8C608CEE08C4F30A15A7955306F2EDAF4A02BB191CABC4185C1A57FD978DA1B', - # ASTRO.cw20 - 'ibc/C25A2303FE24B922DAFFDCE377AC5A42E5EF746806D32E2ED4B610DE85C203F7', - # bJUNO - "ibc/C2DF5C3949CA835B221C575625991F09BAB4E48FB9C11A4EE357194F736111E3", - # BSKT - 'ibc/CDD1E59BD5034C1B2597DD199782204EB397DB93200AA2E99C0AF3A66B2915FA', - # USDC = No routes with itself - constants.USDC, - # Kava SWAP - pool 1631 TODO: understand why no route - 'ibc/D6C28E07F7343360AC41E15DDD44D79701DDCA2E0C2C41279739C8D4AE5264BC', - } - - if denom in skip_denoms: - continue From 637fb5fb4b4d714150b22c6ba891816201172605 Mon Sep 17 00:00:00 2001 From: Roman Date: Fri, 10 May 2024 22:17:57 +0000 Subject: [PATCH 15/19] complete candidate route test suite --- scripts/quote.sh | 10 ++- tests/main.py | 2 +- tests/setup.py | 50 +++++++++++- tests/test_candidate_routes.py | 142 +++++++++++++++++++++++---------- 4 files changed, 156 insertions(+), 48 deletions(-) diff --git a/scripts/quote.sh b/scripts/quote.sh index aff80be3a..6fe0af333 100755 --- a/scripts/quote.sh +++ b/scripts/quote.sh @@ -1,15 +1,17 @@ #!/bin/bash -url=$1 +sqs_address=$1 +node_address=$2 # This script compares single hop quotes by running them against SQS and chain directly. +# ./scripts/quote.sh https://sqs.osmosis.zone https://rpc.osmosis.zone:443 -chain_amount_out=$(osmosisd q poolmanager estimate-swap-exact-amount-in 1436 100000000factory/osmo1z0qrq605sjgcqpylfl4aa6s90x738j7m58wyatt0tdzflg2ha26q67k743/wbtc --swap-route-pool-ids 1436 --swap-route-denoms ibc/498A0751C798A0D9A389AA3691123DADA57DAA4FE165D5C75894505B876BA6E4 --node $url:26657) +chain_amount_out=$(osmosisd q poolmanager estimate-swap-exact-amount-in 1436 100000000factory/osmo1z0qrq605sjgcqpylfl4aa6s90x738j7m58wyatt0tdzflg2ha26q67k743/wbtc --swap-route-pool-ids 1436 --swap-route-denoms ibc/498A0751C798A0D9A389AA3691123DADA57DAA4FE165D5C75894505B876BA6E4 --node $node_address) -sqs_custom_res=$(curl "$url:9092/router/custom-direct-quote?tokenIn=100000000factory/osmo1z0qrq605sjgcqpylfl4aa6s90x738j7m58wyatt0tdzflg2ha26q67k743/wbtc&tokenOutDenom=ibc/498A0751C798A0D9A389AA3691123DADA57DAA4FE165D5C75894505B876BA6E4&poolID=1436") +sqs_custom_res=$(curl "$sqs_address/router/custom-direct-quote?tokenIn=100000000factory/osmo1z0qrq605sjgcqpylfl4aa6s90x738j7m58wyatt0tdzflg2ha26q67k743/wbtc&tokenOutDenom=ibc/498A0751C798A0D9A389AA3691123DADA57DAA4FE165D5C75894505B876BA6E4&poolID=1436") sqs_custom_amount_out=$(echo $sqs_custom_res | jq .amount_out) -sqs_optimal_res=$(curl "$url:9092/router/quote?tokenIn=100000000factory/osmo1z0qrq605sjgcqpylfl4aa6s90x738j7m58wyatt0tdzflg2ha26q67k743/wbtc&tokenOutDenom=ibc/498A0751C798A0D9A389AA3691123DADA57DAA4FE165D5C75894505B876BA6E4") +sqs_optimal_res=$(curl "$sqs_address/router/quote?tokenIn=100000000factory/osmo1z0qrq605sjgcqpylfl4aa6s90x738j7m58wyatt0tdzflg2ha26q67k743/wbtc&tokenOutDenom=ibc/498A0751C798A0D9A389AA3691123DADA57DAA4FE165D5C75894505B876BA6E4") sqs_optimal_amount_out=$(echo $sqs_optimal_res | jq .amount_out) echo "chain_amount_out: $chain_amount_out" diff --git a/tests/main.py b/tests/main.py index b004f8c69..68cc1f772 100644 --- a/tests/main.py +++ b/tests/main.py @@ -4,7 +4,7 @@ transmuter_token_pairs = choose_transmuter_pool_tokens_by_liq_asc(1) print("Transmuter ", [chain_denoms_to_display(pool_tokens[1]) for pool_tokens in transmuter_token_pairs]) -astroport_token_pairs = choose_pcl_pool_tokens_by_liq_asc(2, 1000, 10000) +astroport_token_pairs = choose_pcl_pool_tokens_by_liq_asc(1) print("Astroport PCL ", [chain_denoms_to_display(pool_tokens[1]) for pool_tokens in astroport_token_pairs]) print("Top Liquidity Token ", chain_denoms_to_display(choose_tokens_liq_range())) diff --git a/tests/setup.py b/tests/setup.py index 01d239a9b..79500a659 100644 --- a/tests/setup.py +++ b/tests/setup.py @@ -1,5 +1,5 @@ import copy -import constants +import itertools from data_service import all_tokens_data, all_pools_data from conftest import SERVICE_SQS_PROD from enum import Enum, IntEnum @@ -320,3 +320,51 @@ def chain_denoms_to_display(chain_denoms): # Listed tokens that have at least one pool with liquidity valid_listed_tokens = choose_valid_listed_tokens() + +# One Transmuter token pair [[pool_id, ['denom0', 'denom1']]] +transmuter_token_pairs = choose_transmuter_pool_tokens_by_liq_asc(1) + +# One Astroport token pair [[pool_id, ['denom0', 'denom1']]] +astroport_token_pair = choose_pcl_pool_tokens_by_liq_asc(1) + +def create_token_pairs(): + """ + Selects the following groups of tokens: + 1. Top 5 by-liquidity + 2. Top 5 by-volume + 3. Five low liquidity (between 5000 and 10000 USD) + 4. Five low volume (between 5000 and 10000 USD) + + Then, + - Puts them all in a set + - Constructs combinations between each. + + Returns combinations in the following format: + [['denom0', 'denom1']] + """ + + # Five top by-liquidity tokens + top_five_liquidity_tokens = choose_tokens_liq_range(5) + + # Five top by-volume tokens + top_five_volume_tokens = choose_tokens_volume_range(5) + + # Five low liquidity tokens + five_low_liquidity_tokens = choose_tokens_liq_range(5, 5000, 10000) + + # Five low volume tokens + five_low_volume_tokens = choose_tokens_volume_range(5, 5000, 10000) + + # Put all tokens in a set to ensure uniqueness + all_tokens = set(top_five_liquidity_tokens + top_five_volume_tokens + + five_low_liquidity_tokens + five_low_volume_tokens) + + # Construct all unique combinations of token pairs + token_pairs = list(itertools.combinations(all_tokens, 2)) + + # Format pairs for return + formatted_pairs = [[token1, token2] for token1, token2 in token_pairs] + + return formatted_pairs + +misc_token_pairs = create_token_pairs() diff --git a/tests/test_candidate_routes.py b/tests/test_candidate_routes.py index b8ec17bc0..cf5b9be69 100644 --- a/tests/test_candidate_routes.py +++ b/tests/test_candidate_routes.py @@ -10,6 +10,9 @@ ROUTES_URL = "/router/routes" +# Arbitrary choice based on performance at the time of test writing +expected_latency_upper_bound_ms = 1000 + # Test suite for the /router/routes endpoint class TestCandidateRoutes: # Sanity check to ensure the test setup is correct @@ -17,12 +20,8 @@ class TestCandidateRoutes: def test_usdc_uosmo(self, environment_url): sqs_service = SERVICE_MAP[environment_url] - # Get max routes value from deployment config to expect the same number of candidate routes - # to be found config = sqs_service.get_config() expected_num_routes = config['Router']['MaxRoutes'] - # Arbitrary choice based on performance at the time of test writing - expected_latency_upper_bound_ms = 1000 self.run_candidate_routes_test(environment_url, constants.USDC, constants.UOSMO, expected_latency_upper_bound_ms, expected_min_routes=expected_num_routes, expected_max_routes=expected_num_routes) @@ -30,27 +29,65 @@ def test_usdc_uosmo(self, environment_url): def test_uosmo_usdc(self, environment_url): sqs_service = SERVICE_MAP[environment_url] - # Get max routes value from deployment config to expect the same number of candidate routes - # to be found config = sqs_service.get_config() expected_num_routes = config['Router']['MaxRoutes'] - # Arbitrary choice based on performance at the time of test writing - expected_latency_upper_bound_ms = 1000 self.run_candidate_routes_test(environment_url, constants.UOSMO, constants.USDC, expected_latency_upper_bound_ms, expected_min_routes=expected_num_routes, expected_max_routes=expected_num_routes) - # Test function with dynamic parameterization + # Test all valid listed tokens with appropriate liquidity with dynamic parameterization @pytest.mark.parametrize("denom", setup.valid_listed_tokens) def test_all_valid_tokens(self, environment_url, denom): sqs_service = SERVICE_MAP[environment_url] config = sqs_service.get_config() expected_num_routes = config['Router']['MaxRoutes'] - # Arbitrary choice based on performance at the time of test writing - expected_latency_upper_bound_ms = 1000 + self.run_candidate_routes_test(environment_url, denom, constants.USDC, expected_latency_upper_bound_ms, expected_min_routes=1, expected_max_routes=expected_num_routes) + def test_transmuter_tokens(self, environment_url): + sqs_service = SERVICE_MAP[environment_url] + + transmuter_token_data = setup.transmuter_token_pairs[0] + transmuter_pool_id = transmuter_token_data[0] + tansmuter_token_pair = transmuter_token_data[1] + + config = sqs_service.get_config() + expected_num_routes = config['Router']['MaxRoutes'] + + routes = self.run_candidate_routes_test(environment_url, tansmuter_token_pair[0], tansmuter_token_pair[1], expected_latency_upper_bound_ms, expected_min_routes=1, expected_max_routes=expected_num_routes) + + validate_pool_id_in_route(routes, [transmuter_pool_id]) + + def test_astroport_tokens(self, environment_url): + sqs_service = SERVICE_MAP[environment_url] + + astroport_token_data = setup.astroport_token_pair[0] + astroport_pool_id = astroport_token_data[0] + astroport_token_pair = astroport_token_data[1] + + config = sqs_service.get_config() + expected_num_routes = config['Router']['MaxRoutes'] + + routes = self.run_candidate_routes_test(environment_url, astroport_token_pair[0], astroport_token_pair[1], expected_latency_upper_bound_ms, expected_min_routes=1, expected_max_routes=expected_num_routes) + + validate_pool_id_in_route(routes, [astroport_pool_id]) + + # Test various combinations between tokens in the following groups: + # Selects the following groups of tokens: + # 1. Top 5 by-liquidity + # 2. Top 5 by-volume + # 3. Five low liquidity (between 5000 and 10000 USD) + # 4. Five low volume (between 5000 and 10000 USD) + @pytest.mark.parametrize("pair", setup.misc_token_pairs) + def test_misc_token_pairs(self, environment_url, pair): + sqs_service = SERVICE_MAP[environment_url] + + config = sqs_service.get_config() + expected_num_routes = config['Router']['MaxRoutes'] + + self.run_candidate_routes_test(environment_url, pair[0], pair[1], expected_latency_upper_bound_ms, expected_min_routes=1, expected_max_routes=expected_num_routes) + def run_candidate_routes_test(self, environment_url, token_in, token_out, expected_latency_upper_bound_ms, expected_min_routes, expected_max_routes): """ Runs a test for the /router/routes endpoint with the given input parameters. @@ -75,52 +112,73 @@ def run_candidate_routes_test(self, environment_url, token_in, token_out, expect response_json = response.json() routes = response_json['Routes'] - self.validate_candidate_routes(routes, token_in, token_out, expected_min_routes, expected_max_routes) + validate_candidate_routes(routes, token_in, token_out, expected_min_routes, expected_max_routes) # Return routes in case additional validation is desired return routes - def validate_candidate_routes(self, routes, token_in, token_out, expected_min_routes, expected_max_routes): - """ - Validates the given routes. +def validate_candidate_routes(routes, token_in, token_out, expected_min_routes, expected_max_routes): + """ + Validates the given routes. - Validates: - - Following pools in each route, all tokens within these pools are present and valid - - The number of routes is within the expected range - """ + Validates: + - Following pools in each route, all tokens within these pools are present and valid + - The number of routes is within the expected range + """ - if token_in == token_out: - assert routes is None, f"equal tokens in and out for candidate route must have no route" - return + if token_in == token_out: + assert routes is None, f"equal tokens in and out for candidate route must have no route" + return + + assert routes is not None, f"Error: no routes found for token in {token_in} and token out {token_out}" + assert len(routes) <= expected_max_routes, f"Error: found more than {expected_max_routes} routes with token in {token_in} and token out {token_out}" + assert len(routes) >= expected_min_routes, f"Error: found fewer than {expected_min_routes} routes with token in {token_in} and token out {token_out}" + + for route in routes: + cur_token_in = token_in + + pools = route['Pools'] - assert routes is not None, f"Error: no routes found for token in {token_in} and token out {token_out}" - assert len(routes) <= expected_max_routes, f"Error: found more than {expected_max_routes} routes with token in {token_in} and token out {token_out}" - assert len(routes) >= expected_min_routes, f"Error: found fewer than {expected_min_routes} routes with token in {token_in} and token out {token_out}" + assert len(pools) > 0, f"Error: no pools found in route {route}" + for pool in pools: + pool_id = pool['ID'] - for route in routes: - cur_token_in = token_in + expected_pool_data = setup.pool_by_id_map.get(pool_id) - pools = route['Pools'] + assert expected_pool_data, f"Error: pool ID {pool_id} not found in test data" - assert len(pools) > 0, f"Error: no pools found in route {route}" - for pool in pools: - pool_id = pool['ID'] + # Extract denoms using a helper function + pool_tokens = expected_pool_data.get("pool_tokens") + denoms = setup.get_denoms_from_pool_tokens(pool_tokens) - expected_pool_data = setup.pool_by_id_map.get(pool_id) + found_denom = cur_token_in in denoms - assert expected_pool_data, f"Error: pool ID {pool_id} not found in test data" + assert found_denom, f"Error: token in {cur_token_in} not found in pool denoms {denoms}" - # Extract denoms using a helper function - pool_tokens = expected_pool_data.get("pool_tokens") - denoms = setup.get_denoms_from_pool_tokens(pool_tokens) + cur_token_out = pool['TokenOutDenom'] - found_denom = cur_token_in in denoms + cur_token_in = cur_token_out - assert found_denom, f"Error: token in {cur_token_in} not found in pool denoms {denoms}" + # Last token in must be equal to token out + assert cur_token_in == token_out, f"Error: last token out {cur_token_in} not equal to token out {token_out}" - cur_token_out = pool['TokenOutDenom'] +def validate_pool_id_in_route(routes, expected_pool_ids): + assert len(routes) > 0 + for route in routes: + pools = route['Pools'] - cur_token_in = cur_token_out + if len(pools) != len(expected_pool_ids): + continue + + is_expected_route = True + for pool, expected_pool_id in zip(pools, expected_pool_ids): + pool_id = pool['ID'] + + if pool_id != expected_pool_id: + is_expected_route = False + break + + if is_expected_route: + return - # Last token in must be equal to token out - assert cur_token_in == token_out, f"Error: last token out {cur_token_in} not equal to token out {token_out}" + assert False, f"{routes} do not contain {expected_pool_id}" From 28b7965efe700a506071fc45532ef1024e8d72ec Mon Sep 17 00:00:00 2001 From: Roman Date: Fri, 10 May 2024 22:27:06 +0000 Subject: [PATCH 16/19] prevent false positives --- tests/setup.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/tests/setup.py b/tests/setup.py index 79500a659..b921a5750 100644 --- a/tests/setup.py +++ b/tests/setup.py @@ -34,7 +34,13 @@ class E2EPoolType(IntEnum): # This is the number of tokens we allow being skipped due to being unlisted # or not having liquidity. This number is hand-picked arbitrarily. We have around ~350 tokens # at the time of writing this test and we leave a small buffer. -ALLOWED_NUM_TOKENS_SKIPPED = 50 +ALLOWED_NUM_TOKENS_USDC_PAIR_SKIPPED = 50 +# This is the minimum number of misc token pairs +# that we expect to construct in setup. +# Since our test setup is dynamic, it is important to +# validate we do not get false positve passes due to small +# number of pairs constructed. +MIN_NUM_MISC_TOKEN_PAIRS = 10 def get_e2e_pool_type_from_numia_pool(pool): """Gets an e2e pool type from a Numia pool.""" @@ -178,7 +184,7 @@ def choose_tokens_generic(tokens, filter_key, min_value, max_value, sort_key, nu Args: tokens (list): The list of token data dictionaries. - filter_key (str): The field name sed to filter tokens. + filter_key (str): The field name used to filter tokens. min_value (float): The minimum value for filtering. max_value (float): The maximum value for filtering. sort_key (str): The field name used for sorting tokens. @@ -289,8 +295,8 @@ def choose_valid_listed_tokens(): valid_listed_tokens.append(denom) skipped_token_count = len(tokens_metadata) - len(valid_listed_tokens) - if skipped_token_count > ALLOWED_NUM_TOKENS_SKIPPED: - raise ValueError(f"Too many tokens {skipped_token_count} from the metadata were untested, allowed {ALLOWED_NUM_TOKENS_SKIPPED} tokens to be skipped") + if skipped_token_count > ALLOWED_NUM_TOKENS_USDC_PAIR_SKIPPED: + raise ValueError(f"Too many tokens {skipped_token_count} from the metadata were untested, allowed {ALLOWED_NUM_TOKENS_USDC_PAIR_SKIPPED} tokens to be skipped") return valid_listed_tokens @@ -365,6 +371,9 @@ def create_token_pairs(): # Format pairs for return formatted_pairs = [[token1, token2] for token1, token2 in token_pairs] + if len(formatted_pairs) > MIN_NUM_MISC_TOKEN_PAIRS: + ValueError(f"Constructeed {len(formatted_pairs)}, min expected {MIN_NUM_MISC_TOKEN_PAIRS}") + return formatted_pairs misc_token_pairs = create_token_pairs() From 8ca6833202097bacc01a35d849548bbc27a1cc15 Mon Sep 17 00:00:00 2001 From: Roman Date: Fri, 10 May 2024 22:28:11 +0000 Subject: [PATCH 17/19] comment --- tests/test_candidate_routes.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/test_candidate_routes.py b/tests/test_candidate_routes.py index cf5b9be69..6ee025774 100644 --- a/tests/test_candidate_routes.py +++ b/tests/test_candidate_routes.py @@ -163,6 +163,13 @@ def validate_candidate_routes(routes, token_in, token_out, expected_min_routes, assert cur_token_in == token_out, f"Error: last token out {cur_token_in} not equal to token out {token_out}" def validate_pool_id_in_route(routes, expected_pool_ids): + """ + Validates that there is at least one route in routes + that contains pools exactly as given per expected_pool_ids + + Fails if not + """ + assert len(routes) > 0 for route in routes: pools = route['Pools'] From 1c576abd8ee77e1b1aaecbedb0432602417502ac Mon Sep 17 00:00:00 2001 From: Roman Date: Fri, 10 May 2024 22:29:18 +0000 Subject: [PATCH 18/19] remove main.py --- tests/main.py | 16 ---------------- 1 file changed, 16 deletions(-) delete mode 100644 tests/main.py diff --git a/tests/main.py b/tests/main.py deleted file mode 100644 index 68cc1f772..000000000 --- a/tests/main.py +++ /dev/null @@ -1,16 +0,0 @@ -from setup import * - -# DEMO - to be removed in a subsequent PR -transmuter_token_pairs = choose_transmuter_pool_tokens_by_liq_asc(1) -print("Transmuter ", [chain_denoms_to_display(pool_tokens[1]) for pool_tokens in transmuter_token_pairs]) - -astroport_token_pairs = choose_pcl_pool_tokens_by_liq_asc(1) -print("Astroport PCL ", [chain_denoms_to_display(pool_tokens[1]) for pool_tokens in astroport_token_pairs]) - -print("Top Liquidity Token ", chain_denoms_to_display(choose_tokens_liq_range())) - -print("Top Volume Token ", chain_denoms_to_display(choose_tokens_volume_range())) - -print("2 Low Liquidity Tokens ", chain_denoms_to_display(choose_tokens_liq_range(2, 5000, 10000))) - -print("3 Low Volume Tokens ", chain_denoms_to_display(choose_tokens_volume_range(3, 5000, 10000))) From 24b2354592fc515e6e1977a4339dff665f057fe3 Mon Sep 17 00:00:00 2001 From: Roman Date: Mon, 13 May 2024 23:57:02 +0000 Subject: [PATCH 19/19] remove obsolete constants & avoid hardcoding --- tests/constants.py | 12 ++++++++++++ tests/setup.py | 23 ++++++++++++----------- tests/test_candidate_routes.py | 4 ---- 3 files changed, 24 insertions(+), 15 deletions(-) diff --git a/tests/constants.py b/tests/constants.py index d3142a795..b4fdfad9d 100644 --- a/tests/constants.py +++ b/tests/constants.py @@ -3,3 +3,15 @@ # Misc constants UOSMO = "uosmo" USDC = 'ibc/498A0751C798A0D9A389AA3691123DADA57DAA4FE165D5C75894505B876BA6E4' + +# Defaults + +## Liquidity +MIN_LIQ_FILTER_DEFAULT = 5000 +MAX_LIQ_FILTER_DEFAULT = 10000 + +## Volume +MIN_VOL_FILTER_DEFAULT = 5000 +MAX_VOL_FILTER_DEFAULT = 10000 + +NUM_TOKENS_DEFAULT = 5 diff --git a/tests/setup.py b/tests/setup.py index b921a5750..51fa6df9a 100644 --- a/tests/setup.py +++ b/tests/setup.py @@ -3,6 +3,7 @@ from data_service import all_tokens_data, all_pools_data from conftest import SERVICE_SQS_PROD from enum import Enum, IntEnum +from constants import * # Numia pool type constants using an Enum class NumiaPoolType(Enum): @@ -336,10 +337,10 @@ def chain_denoms_to_display(chain_denoms): def create_token_pairs(): """ Selects the following groups of tokens: - 1. Top 5 by-liquidity - 2. Top 5 by-volume - 3. Five low liquidity (between 5000 and 10000 USD) - 4. Five low volume (between 5000 and 10000 USD) + 1. Top NUM_TOKENS_DEFAULT by-liquidity + 2. Top NUM_TOKENS_DEFAULT by-volume + 3. Five low liquidity (between MIN_LIQ_FILTER_DEFAULT and MAX_LIQ_FILTER_DEFAULT USD) + 4. Five low volume (between MIN_VOL_FILTER_DEFAULT and MAX_LIQ_FILTER_DEFAULT USD) Then, - Puts them all in a set @@ -350,16 +351,16 @@ def create_token_pairs(): """ # Five top by-liquidity tokens - top_five_liquidity_tokens = choose_tokens_liq_range(5) + top_five_liquidity_tokens = choose_tokens_liq_range(NUM_TOKENS_DEFAULT) - # Five top by-volume tokens - top_five_volume_tokens = choose_tokens_volume_range(5) + # NUM_TOKENS_DEFAULT top by-volume tokens + top_five_volume_tokens = choose_tokens_volume_range(NUM_TOKENS_DEFAULT) - # Five low liquidity tokens - five_low_liquidity_tokens = choose_tokens_liq_range(5, 5000, 10000) + # NUM_TOKENS_DEFAULT low liquidity tokens + five_low_liquidity_tokens = choose_tokens_liq_range(NUM_TOKENS_DEFAULT, MIN_LIQ_FILTER_DEFAULT, MAX_LIQ_FILTER_DEFAULT) - # Five low volume tokens - five_low_volume_tokens = choose_tokens_volume_range(5, 5000, 10000) + # NUM_TOKENS_DEFAULT low volume tokens + five_low_volume_tokens = choose_tokens_volume_range(NUM_TOKENS_DEFAULT, MIN_VOL_FILTER_DEFAULT, MAX_VOL_FILTER_DEFAULT) # Put all tokens in a set to ensure uniqueness all_tokens = set(top_five_liquidity_tokens + top_five_volume_tokens + diff --git a/tests/test_candidate_routes.py b/tests/test_candidate_routes.py index 6ee025774..e21037ebe 100644 --- a/tests/test_candidate_routes.py +++ b/tests/test_candidate_routes.py @@ -6,10 +6,6 @@ import constants from conftest import SERVICE_MAP -SQS_STAGE = "https://sqs.stage.osmosis.zone" - -ROUTES_URL = "/router/routes" - # Arbitrary choice based on performance at the time of test writing expected_latency_upper_bound_ms = 1000