From a9598fc27821d297ec5c1ede8e4c62ae9831b26a Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Fri, 3 Mar 2023 16:09:15 +0200 Subject: [PATCH 01/83] Increased no retries and backoff factor. --- ocean_provider/requests_session.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocean_provider/requests_session.py b/ocean_provider/requests_session.py index 0e07ab44..6117d260 100644 --- a/ocean_provider/requests_session.py +++ b/ocean_provider/requests_session.py @@ -13,7 +13,7 @@ def get_requests_session() -> Session: :return: requests session """ session = Session() - retries = Retry(total=5, backoff_factor=1, status_forcelist=[502, 503, 504]) + retries = Retry(total=8, backoff_factor=1.5, status_forcelist=[502, 503, 504]) session.mount( "http://", HTTPAdapter( From db65c9ca7f66ac042211fc1339a476e5e29484b7 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Fri, 3 Mar 2023 16:24:09 +0200 Subject: [PATCH 02/83] updated workflow. --- .github/workflows/pytest.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index df549633..c65fdc25 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -46,6 +46,10 @@ jobs: sleep 10 [ -f "$HOME/.ocean/ocean-contracts/artifacts/ready" -a -f "$HOME/.ocean/ocean-c2d/ready" ] && break done + - name: Verify deployments + run: | + cat $HOME/.ocean/ocean-contracts/artifacts/address.json + curl http://172.15.0.13:31000 - name: Test with pytest run: | From 613cb0c32706bd8cb4a6b3899161fb8bdad757a2 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Fri, 3 Mar 2023 18:17:32 +0200 Subject: [PATCH 03/83] tweak. --- ocean_provider/requests_session.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocean_provider/requests_session.py b/ocean_provider/requests_session.py index 6117d260..5baa4150 100644 --- a/ocean_provider/requests_session.py +++ b/ocean_provider/requests_session.py @@ -13,7 +13,7 @@ def get_requests_session() -> Session: :return: requests session """ session = Session() - retries = Retry(total=8, backoff_factor=1.5, status_forcelist=[502, 503, 504]) + retries = Retry(total=6, backoff_factor=1.5, status_forcelist=[502, 503, 504]) session.mount( "http://", HTTPAdapter( From c40e9a0d15bd945704c25a5c585102ccba18fbb7 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Fri, 3 Mar 2023 18:47:35 +0200 Subject: [PATCH 04/83] tweak 2. --- ocean_provider/requests_session.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocean_provider/requests_session.py b/ocean_provider/requests_session.py index 5baa4150..861f3ad8 100644 --- a/ocean_provider/requests_session.py +++ b/ocean_provider/requests_session.py @@ -13,7 +13,7 @@ def get_requests_session() -> Session: :return: requests session """ session = Session() - retries = Retry(total=6, backoff_factor=1.5, status_forcelist=[502, 503, 504]) + retries = Retry(total=7, backoff_factor=1, status_forcelist=[502, 503, 504]) session.mount( "http://", HTTPAdapter( From 7e9ad420c7fa316ba7588415222809c85e54a140 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 6 Mar 2023 14:09:00 +0200 Subject: [PATCH 05/83] updated workflow 2. --- .github/workflows/pytest.yml | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index c65fdc25..c2096fd1 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -34,11 +34,6 @@ jobs: working-directory: ${{ github.workspace }}/barge run: | bash -x start_ocean.sh --no-dashboard 2>&1 --with-rbac --with-provider2 --with-c2d --with-thegraph > start_ocean.log & - - name: Install dependencies - working-directory: ${{ github.workspace }} - run: | - python -m pip install --upgrade pip - pip install -r requirements_dev.txt - name: Wait for contracts deployment and C2D cluster to be ready working-directory: ${{ github.workspace }}/barge run: | @@ -48,9 +43,15 @@ jobs: done - name: Verify deployments run: | + netstat -lnpt + nc -zv 172.15.0.13 31000 cat $HOME/.ocean/ocean-contracts/artifacts/address.json curl http://172.15.0.13:31000 - + - name: Install dependencies + working-directory: ${{ github.workspace }} + run: | + python -m pip install --upgrade pip + pip install -r requirements_dev.txt - name: Test with pytest run: | coverage run --source ocean_provider -m pytest From 76f82e090aeb62b28cbc93db6e9037f76bf17e5e Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 6 Mar 2023 14:52:07 +0200 Subject: [PATCH 06/83] Changed config.ini op serv url. --- config.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config.ini b/config.ini index e7551205..3b8da200 100644 --- a/config.ini +++ b/config.ini @@ -15,6 +15,6 @@ ocean_provider.url = http://localhost:8030 provider.url = http://localhost:8030 provider.address = 0x00bd138abd70e2f00903268f3db08f2d25677c9e -operator_service.url = https://operator-api.operator.dev-ocean.com/ +operator_service.url = http://172.15.0.13:31000/ storage.path = ocean-provider.db downloads.path = consume-downloads From 348886636b854941781cf625c09aa3467de6b749 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 6 Mar 2023 14:54:40 +0200 Subject: [PATCH 07/83] modified c2d envs. --- config.ini | 2 +- ocean_provider/utils/compute_environments.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/config.ini b/config.ini index 3b8da200..e7551205 100644 --- a/config.ini +++ b/config.ini @@ -15,6 +15,6 @@ ocean_provider.url = http://localhost:8030 provider.url = http://localhost:8030 provider.address = 0x00bd138abd70e2f00903268f3db08f2d25677c9e -operator_service.url = http://172.15.0.13:31000/ +operator_service.url = https://operator-api.operator.dev-ocean.com/ storage.path = ocean-provider.db downloads.path = consume-downloads diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index 8b1331c0..4132788a 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -12,7 +12,7 @@ def get_compute_environments_endpoint(): - return urljoin(get_config().operator_service_url, "api/v1/operator/environments") + return urljoin(os.getenv("OPERATOR_SERVICE_URL"), "api/v1/operator/environments") def get_c2d_environments() -> List: From a111e934da6d0c4fc40f0e96bd06133ff6032819 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 6 Mar 2023 14:56:57 +0200 Subject: [PATCH 08/83] Updated config.ini --- config.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config.ini b/config.ini index e7551205..e81d99ec 100644 --- a/config.ini +++ b/config.ini @@ -15,6 +15,6 @@ ocean_provider.url = http://localhost:8030 provider.url = http://localhost:8030 provider.address = 0x00bd138abd70e2f00903268f3db08f2d25677c9e -operator_service.url = https://operator-api.operator.dev-ocean.com/ +operator_service.url = https://stagev4.c2d.oceanprotocol.com storage.path = ocean-provider.db downloads.path = consume-downloads From 1d4f54964908f423da7bf7ae8ae4ccb2d2795eac Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 6 Mar 2023 15:01:22 +0200 Subject: [PATCH 09/83] refactor tweak. --- ocean_provider/utils/compute_environments.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index 4132788a..8b1331c0 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -12,7 +12,7 @@ def get_compute_environments_endpoint(): - return urljoin(os.getenv("OPERATOR_SERVICE_URL"), "api/v1/operator/environments") + return urljoin(get_config().operator_service_url, "api/v1/operator/environments") def get_c2d_environments() -> List: From d5c0a6b5fa32691f7fc25a65f935021cf1e45a40 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 6 Mar 2023 15:34:36 +0200 Subject: [PATCH 10/83] print url. --- ocean_provider/utils/compute_environments.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index 8b1331c0..10037cc6 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -12,6 +12,7 @@ def get_compute_environments_endpoint(): + print(f"op serv url: {get_config().operator_service_url}") return urljoin(get_config().operator_service_url, "api/v1/operator/environments") From 39b7bf4412768c407b2eaaabb807935f7cfc3ddd Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 6 Mar 2023 16:02:03 +0200 Subject: [PATCH 11/83] updated url for c2d envs. --- ocean_provider/utils/compute_environments.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index 10037cc6..5777aef2 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -13,7 +13,9 @@ def get_compute_environments_endpoint(): print(f"op serv url: {get_config().operator_service_url}") - return urljoin(get_config().operator_service_url, "api/v1/operator/environments") + return urljoin( + "https://stagev4.c2d.oceanprotocol.com/", "api/v1/operator/environments" + ) def get_c2d_environments() -> List: From 26bdb13c8d17f8a1f8d16786ef3aedcfd04101c7 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 6 Mar 2023 16:47:31 +0200 Subject: [PATCH 12/83] revert changes. added slash --- config.ini | 2 +- ocean_provider/utils/compute_environments.py | 5 +---- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/config.ini b/config.ini index e81d99ec..7814acf7 100644 --- a/config.ini +++ b/config.ini @@ -15,6 +15,6 @@ ocean_provider.url = http://localhost:8030 provider.url = http://localhost:8030 provider.address = 0x00bd138abd70e2f00903268f3db08f2d25677c9e -operator_service.url = https://stagev4.c2d.oceanprotocol.com +operator_service.url = https://stagev4.c2d.oceanprotocol.com/ storage.path = ocean-provider.db downloads.path = consume-downloads diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index 5777aef2..8b1331c0 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -12,10 +12,7 @@ def get_compute_environments_endpoint(): - print(f"op serv url: {get_config().operator_service_url}") - return urljoin( - "https://stagev4.c2d.oceanprotocol.com/", "api/v1/operator/environments" - ) + return urljoin(get_config().operator_service_url, "api/v1/operator/environments") def get_c2d_environments() -> List: From 8417c57978e032033391e754af88f1d161fb14c1 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 6 Mar 2023 16:50:18 +0200 Subject: [PATCH 13/83] tweak --- pytest.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index 5fc5d7ca..03d6228f 100644 --- a/pytest.ini +++ b/pytest.ini @@ -7,7 +7,7 @@ env = D:PROVIDER_PRIVATE_KEY=0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215 D:TEST_PRIVATE_KEY1=0xef4b441145c1d0f3b4bc6d61d29f5c6e502359481152f869247c7a4244d45209 D:TEST_PRIVATE_KEY2=0x5d75837394b078ce97bc289fa8d75e21000573520bfa7784a9d28ccaae602bf8 - D:OPERATOR_SERVICE_URL=http://172.15.0.13:31000/ + D:OPERATOR_SERVICE_URL=http://localhost:31000/ D:ADDRESS_FILE=~/.ocean/ocean-contracts/artifacts/address.json D:IPFS_GATEWAY=http://172.15.0.16:8080 D:ARWEAVE_GATEWAY=https://arweave.net/ From 6507dfafac343d407826dbca673810eb473ab754 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 6 Mar 2023 16:51:58 +0200 Subject: [PATCH 14/83] tweak 22. --- pytest.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index 03d6228f..34e2d8ef 100644 --- a/pytest.ini +++ b/pytest.ini @@ -7,7 +7,7 @@ env = D:PROVIDER_PRIVATE_KEY=0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215 D:TEST_PRIVATE_KEY1=0xef4b441145c1d0f3b4bc6d61d29f5c6e502359481152f869247c7a4244d45209 D:TEST_PRIVATE_KEY2=0x5d75837394b078ce97bc289fa8d75e21000573520bfa7784a9d28ccaae602bf8 - D:OPERATOR_SERVICE_URL=http://localhost:31000/ + D:OPERATOR_SERVICE_URL=https://stagev4.c2d.oceanprotocol.com/ D:ADDRESS_FILE=~/.ocean/ocean-contracts/artifacts/address.json D:IPFS_GATEWAY=http://172.15.0.16:8080 D:ARWEAVE_GATEWAY=https://arweave.net/ From f5c9781b37f170aae2b2ad4bb8f7d13fc42253c3 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 6 Mar 2023 17:27:20 +0200 Subject: [PATCH 15/83] revert --- pytest.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index 34e2d8ef..5fc5d7ca 100644 --- a/pytest.ini +++ b/pytest.ini @@ -7,7 +7,7 @@ env = D:PROVIDER_PRIVATE_KEY=0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215 D:TEST_PRIVATE_KEY1=0xef4b441145c1d0f3b4bc6d61d29f5c6e502359481152f869247c7a4244d45209 D:TEST_PRIVATE_KEY2=0x5d75837394b078ce97bc289fa8d75e21000573520bfa7784a9d28ccaae602bf8 - D:OPERATOR_SERVICE_URL=https://stagev4.c2d.oceanprotocol.com/ + D:OPERATOR_SERVICE_URL=http://172.15.0.13:31000/ D:ADDRESS_FILE=~/.ocean/ocean-contracts/artifacts/address.json D:IPFS_GATEWAY=http://172.15.0.16:8080 D:ARWEAVE_GATEWAY=https://arweave.net/ From 8255033d5409f920f7a1cb6027ad66242229e10e Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 7 Mar 2023 12:50:28 +0200 Subject: [PATCH 16/83] fix web3 package path. --- tests/test_smartcontract.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_smartcontract.py b/tests/test_smartcontract.py index 249e59fb..069fbf18 100644 --- a/tests/test_smartcontract.py +++ b/tests/test_smartcontract.py @@ -29,7 +29,7 @@ def test_download_smartcontract_asset(client, publisher_wallet, consumer_wallet, web3): # publish asset, that calls Router's swapOceanFee function (does not need params) router_address = get_contract_address( - get_config().address_file, "Router", web3.chain_id + get_config().address_file, "Router", web3.eth.chain_id ) abi = { "inputs": [], @@ -84,7 +84,7 @@ def test_download_smartcontract_asset_with_userdata( ): # publish asset, that calls Router's getOPCFee for a provided baseToken userdata router_address = get_contract_address( - get_config().address_file, "Router", web3.chain_id + get_config().address_file, "Router", web3.eth.chain_id ) abi = { "inputs": [{"internalType": "address", "name": "baseToken", "type": "address"}], From c2dcb391a4032b1dcb0442e9d76561a16a750cfc Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 7 Mar 2023 12:54:35 +0200 Subject: [PATCH 17/83] added a dict --- ocean_provider/utils/compute_environments.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index 8b1331c0..5e77e847 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -21,7 +21,7 @@ def get_c2d_environments() -> List: standard_headers = {"Content-type": "application/json", "Connection": "close"} web3 = get_web3() - params = {"chainId": web3.eth.chain_id} + params = dict({"chainId": web3.eth.chain_id}) response = requests_session.get( get_compute_environments_endpoint(), headers=standard_headers, params=params ) From e595fe1e02d4e7960fdfd977517586c7c38f9f1a Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 7 Mar 2023 13:02:45 +0200 Subject: [PATCH 18/83] more changes --- ocean_provider/utils/compute_environments.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index 5e77e847..edd3abb1 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -12,6 +12,10 @@ def get_compute_environments_endpoint(): + print( + f"op serv local: {get_config().operator_service_url}\n," + f"urljoin result: {urljoin(get_config().operator_service_url, 'api/v1/operator/environments')}" + ) return urljoin(get_config().operator_service_url, "api/v1/operator/environments") @@ -19,7 +23,7 @@ def get_c2d_environments() -> List: if not os.getenv("OPERATOR_SERVICE_URL"): return [] - standard_headers = {"Content-type": "application/json", "Connection": "close"} + standard_headers = {"Content-type": "application/json"} web3 = get_web3() params = dict({"chainId": web3.eth.chain_id}) response = requests_session.get( From 19d925f1f13da3fa48a508c1bd099ba122778971 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 7 Mar 2023 13:04:12 +0200 Subject: [PATCH 19/83] changed into capital T. --- ocean_provider/utils/compute_environments.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index edd3abb1..77c5480c 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -23,7 +23,7 @@ def get_c2d_environments() -> List: if not os.getenv("OPERATOR_SERVICE_URL"): return [] - standard_headers = {"Content-type": "application/json"} + standard_headers = {"Content-Type": "application/json"} web3 = get_web3() params = dict({"chainId": web3.eth.chain_id}) response = requests_session.get( From 8107a133d7eb6d0bf2b33f92d1f2ff3ce3e018a6 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 7 Mar 2023 13:04:58 +0200 Subject: [PATCH 20/83] removed additional dict --- ocean_provider/utils/compute_environments.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index 77c5480c..1f269cbd 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -25,7 +25,7 @@ def get_c2d_environments() -> List: standard_headers = {"Content-Type": "application/json"} web3 = get_web3() - params = dict({"chainId": web3.eth.chain_id}) + params = {"chainId": web3.eth.chain_id} response = requests_session.get( get_compute_environments_endpoint(), headers=standard_headers, params=params ) From c41444754d5dcde8c70310078a8d8e7d9a0ae5d3 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 7 Mar 2023 13:40:19 +0200 Subject: [PATCH 21/83] added connection close back. --- ocean_provider/utils/compute_environments.py | 4 ++-- tests/test_initialize.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index 1f269cbd..e15fd729 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -23,9 +23,9 @@ def get_c2d_environments() -> List: if not os.getenv("OPERATOR_SERVICE_URL"): return [] - standard_headers = {"Content-Type": "application/json"} + standard_headers = {"Content-Type": "application/json", "Connection": "close"} web3 = get_web3() - params = {"chainId": web3.eth.chain_id} + params = dict({"chainId": web3.eth.chain_id}) response = requests_session.get( get_compute_environments_endpoint(), headers=standard_headers, params=params ) diff --git a/tests/test_initialize.py b/tests/test_initialize.py index 6de6ca04..470b406d 100644 --- a/tests/test_initialize.py +++ b/tests/test_initialize.py @@ -338,7 +338,7 @@ def test_initialize_compute_order_reused( assert "providerFee" in response.json["algorithm"] # Sleep long enough for orders to expire - timeout = time.time() + (60 * 6) + timeout = time.time() + (60 * 8) while True: payload["compute"]["validUntil"] = get_future_valid_until(short=True) + 30 response = client.post( From 69e914480ca0940dbd41d966f6e8aa4e8653ed1d Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 7 Mar 2023 14:51:09 +0200 Subject: [PATCH 22/83] check fw. --- .github/workflows/pytest.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index c2096fd1..31d7ed4c 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -47,6 +47,7 @@ jobs: nc -zv 172.15.0.13 31000 cat $HOME/.ocean/ocean-contracts/artifacts/address.json curl http://172.15.0.13:31000 + sudo ufw status | grep 31000 - name: Install dependencies working-directory: ${{ github.workspace }} run: | From c58ab6233c58ae9e2b9a8799070f135acf3392a8 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 7 Mar 2023 19:50:15 +0200 Subject: [PATCH 23/83] Check with updated op serv. --- .github/workflows/pytest.yml | 2 +- ocean_provider/utils/compute_environments.py | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 31d7ed4c..0314c3de 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -23,6 +23,7 @@ jobs: with: repository: "oceanprotocol/barge" path: 'barge' + ref: update-copyright-year - name: Login to Docker Hub if: ${{ env.DOCKERHUB_PASSWORD && env.DOCKERHUB_USERNAME }} run: | @@ -47,7 +48,6 @@ jobs: nc -zv 172.15.0.13 31000 cat $HOME/.ocean/ocean-contracts/artifacts/address.json curl http://172.15.0.13:31000 - sudo ufw status | grep 31000 - name: Install dependencies working-directory: ${{ github.workspace }} run: | diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index e15fd729..725824ba 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -12,10 +12,6 @@ def get_compute_environments_endpoint(): - print( - f"op serv local: {get_config().operator_service_url}\n," - f"urljoin result: {urljoin(get_config().operator_service_url, 'api/v1/operator/environments')}" - ) return urljoin(get_config().operator_service_url, "api/v1/operator/environments") From 707f5a5aba9390fe8912b44f5bcbb51be27892eb Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 7 Mar 2023 20:58:02 +0200 Subject: [PATCH 24/83] added prints. modified workflow --- .github/workflows/pytest.yml | 4 +--- tests/test_compute.py | 2 ++ 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 0314c3de..fd2e8e5d 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -55,9 +55,7 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - coverage run --source ocean_provider -m pytest - coverage report - coverage xml + pytest tests/test_compute::test_compute_environments - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 if: ${{ failure() }} diff --git a/tests/test_compute.py b/tests/test_compute.py index 36fd724f..29823cd3 100644 --- a/tests/test_compute.py +++ b/tests/test_compute.py @@ -563,6 +563,7 @@ def test_compute_delete_job( @pytest.mark.unit def test_compute_environments(client): compute_envs_endpoint = BaseURLs.SERVICES_URL + "/computeEnvironments" + print(f"compute envs: {compute_envs_endpoint}") retries = 2 response = None while retries != 0: @@ -574,6 +575,7 @@ def test_compute_environments(client): continue assert response, "Compute envs could not be retrieved." + print(f"res: {response}") for env in response.json: if env["priceMin"] == 0: From 1e8fc1a2d8fd8e93d4ed11735c1b8400251c6ecb Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 7 Mar 2023 21:14:47 +0200 Subject: [PATCH 25/83] tinny fix. --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index fd2e8e5d..0dea4dc2 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -55,7 +55,7 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - pytest tests/test_compute::test_compute_environments + pytest tests/test_compute.py::test_compute_environments - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 if: ${{ failure() }} From fe7930e5f726c9fd939361e2220b9bf829229846 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 7 Mar 2023 21:25:05 +0200 Subject: [PATCH 26/83] eval. --- ocean_provider/utils/compute_environments.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index 725824ba..46c0bbc9 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -12,7 +12,7 @@ def get_compute_environments_endpoint(): - return urljoin(get_config().operator_service_url, "api/v1/operator/environments") + return eval(urljoin(get_config().operator_service_url, "api/v1/operator/environments")) def get_c2d_environments() -> List: @@ -21,7 +21,7 @@ def get_c2d_environments() -> List: standard_headers = {"Content-Type": "application/json", "Connection": "close"} web3 = get_web3() - params = dict({"chainId": web3.eth.chain_id}) + params = {"chainId": web3.eth.chain_id} response = requests_session.get( get_compute_environments_endpoint(), headers=standard_headers, params=params ) From 5ace7bd3c8874a5c309a602a3d6ac3efc53f9631 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 7 Mar 2023 21:40:45 +0200 Subject: [PATCH 27/83] debug string format. --- ocean_provider/utils/compute_environments.py | 10 +++++++++- tests/test_compute.py | 4 +--- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index 46c0bbc9..aa8e417f 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -12,7 +12,15 @@ def get_compute_environments_endpoint(): - return eval(urljoin(get_config().operator_service_url, "api/v1/operator/environments")) + url2 = urljoin(get_config().operator_service_url, "api/v1/operator/environments") + print( + f"url: {url2}\n eval: {eval(url2)}\n type: {type(url2)} type eval: {type(eval(url2))}" + ) + url = get_config().operator_service_url + "api/v1/operator/environments" + print( + f"url: {url}\n eval: {eval(url)}\n type: {type(url)} type eval: {type(eval(url))}" + ) + return eval(url) def get_c2d_environments() -> List: diff --git a/tests/test_compute.py b/tests/test_compute.py index 29823cd3..f989a641 100644 --- a/tests/test_compute.py +++ b/tests/test_compute.py @@ -563,7 +563,6 @@ def test_compute_delete_job( @pytest.mark.unit def test_compute_environments(client): compute_envs_endpoint = BaseURLs.SERVICES_URL + "/computeEnvironments" - print(f"compute envs: {compute_envs_endpoint}") retries = 2 response = None while retries != 0: @@ -574,8 +573,7 @@ def test_compute_environments(client): retries -= 1 continue - assert response, "Compute envs could not be retrieved." - print(f"res: {response}") + assert response.status == 200, "Compute envs could not be retrieved." for env in response.json: if env["priceMin"] == 0: From 87ed8b9a010b0016830371411d36c52894a78c84 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 7 Mar 2023 21:59:04 +0200 Subject: [PATCH 28/83] tweak 3 --- ocean_provider/utils/compute_environments.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index aa8e417f..4048922e 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -12,10 +12,10 @@ def get_compute_environments_endpoint(): - url2 = urljoin(get_config().operator_service_url, "api/v1/operator/environments") - print( - f"url: {url2}\n eval: {eval(url2)}\n type: {type(url2)} type eval: {type(eval(url2))}" - ) + # url2 = urljoin(get_config().operator_service_url, "api/v1/operator/environments") + # print( + # f"url: {url2}\n eval: {eval(url2)}\n type: {type(url2)} type eval: {type(eval(url2))}" + # ) url = get_config().operator_service_url + "api/v1/operator/environments" print( f"url: {url}\n eval: {eval(url)}\n type: {type(url)} type eval: {type(eval(url))}" From 081cc8664d851ddcb7c080375c0accba4d0f7317 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Wed, 8 Mar 2023 02:28:52 +0200 Subject: [PATCH 29/83] removed eval. --- ocean_provider/utils/compute_environments.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index 4048922e..b67054c1 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -12,15 +12,9 @@ def get_compute_environments_endpoint(): - # url2 = urljoin(get_config().operator_service_url, "api/v1/operator/environments") - # print( - # f"url: {url2}\n eval: {eval(url2)}\n type: {type(url2)} type eval: {type(eval(url2))}" - # ) url = get_config().operator_service_url + "api/v1/operator/environments" - print( - f"url: {url}\n eval: {eval(url)}\n type: {type(url)} type eval: {type(eval(url))}" - ) - return eval(url) + print(f"url: {url}\n") + return url def get_c2d_environments() -> List: From cd442c7f6174806972532526574395c942f95249 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Wed, 8 Mar 2023 02:30:18 +0200 Subject: [PATCH 30/83] removed string escape. --- ocean_provider/utils/compute_environments.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index b67054c1..d00fee2e 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -14,7 +14,7 @@ def get_compute_environments_endpoint(): url = get_config().operator_service_url + "api/v1/operator/environments" print(f"url: {url}\n") - return url + return url.decode("string_escape") def get_c2d_environments() -> List: From 20c84339983203f6596dc0416559ae3644f25712 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Wed, 8 Mar 2023 02:45:33 +0200 Subject: [PATCH 31/83] removed urljoin. updated workflow. --- .github/workflows/pytest.yml | 5 +++-- ocean_provider/utils/compute_environments.py | 5 +---- tests/test_compute.py | 2 +- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 0dea4dc2..c2096fd1 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -23,7 +23,6 @@ jobs: with: repository: "oceanprotocol/barge" path: 'barge' - ref: update-copyright-year - name: Login to Docker Hub if: ${{ env.DOCKERHUB_PASSWORD && env.DOCKERHUB_USERNAME }} run: | @@ -55,7 +54,9 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - pytest tests/test_compute.py::test_compute_environments + coverage run --source ocean_provider -m pytest + coverage report + coverage xml - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 if: ${{ failure() }} diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index d00fee2e..5c119413 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -1,6 +1,5 @@ import os from typing import List -from urllib.parse import urljoin from ocean_provider.requests_session import get_requests_session @@ -12,9 +11,7 @@ def get_compute_environments_endpoint(): - url = get_config().operator_service_url + "api/v1/operator/environments" - print(f"url: {url}\n") - return url.decode("string_escape") + return get_config().operator_service_url + "api/v1/operator/environments" def get_c2d_environments() -> List: diff --git a/tests/test_compute.py b/tests/test_compute.py index f989a641..c6f2b924 100644 --- a/tests/test_compute.py +++ b/tests/test_compute.py @@ -573,7 +573,7 @@ def test_compute_environments(client): retries -= 1 continue - assert response.status == 200, "Compute envs could not be retrieved." + assert response.status == "200 OK", "Compute envs could not be retrieved." for env in response.json: if env["priceMin"] == 0: From 96fa409cfb56fdab7c9cfd6d4a1f7235c6919a46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?C=C4=83lina=20Cenan?= Date: Thu, 9 Mar 2023 08:40:09 +0200 Subject: [PATCH 32/83] Update pytest.yml --- .github/workflows/pytest.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index c2096fd1..4eedd5ef 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -11,6 +11,7 @@ on: jobs: build: runs-on: ubuntu-latest + container: ubuntu steps: - name: Setup Provider uses: actions/checkout@v2 From bddb25f5b112f1a1aa5e0c6605793d27bac8e096 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?C=C4=83lina=20Cenan?= Date: Thu, 9 Mar 2023 08:43:21 +0200 Subject: [PATCH 33/83] Update pytest.yml --- .github/workflows/pytest.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 4eedd5ef..c2096fd1 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -11,7 +11,6 @@ on: jobs: build: runs-on: ubuntu-latest - container: ubuntu steps: - name: Setup Provider uses: actions/checkout@v2 From a62e1b3a3693ba68bfc1fc2f53e6ddc8bcbb52ae Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 13:39:51 +0200 Subject: [PATCH 34/83] added user agent --- ocean_provider/utils/compute_environments.py | 6 +++++- tests/test_compute.py | 13 +++++++------ 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index ce20b538..d0fb319d 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -18,7 +18,11 @@ def get_c2d_environments() -> List: if not os.getenv("OPERATOR_SERVICE_URL"): return [] - standard_headers = {"Content-Type": "application/json", "Connection": "close"} + standard_headers = { + "Content-Type": "application/json", + "Connection": "close", + "User-Agent": "Definitely-Not-Requests", + } web3 = get_web3() params = {"chainId": web3.eth.chain_id} response = requests.get( diff --git a/tests/test_compute.py b/tests/test_compute.py index 45d98da1..8185b4a0 100644 --- a/tests/test_compute.py +++ b/tests/test_compute.py @@ -292,6 +292,7 @@ def test_compute(client, publisher_wallet, consumer_wallet, free_c2d_env): @pytest.mark.integration +@pytest.skip(reason="C2D connection failed. Need fix in #610") def test_compute_arweave(client, publisher_wallet, consumer_wallet, free_c2d_env): valid_until = get_future_valid_until() ddo, tx_id, alg_ddo, alg_tx_id = build_and_send_ddo_with_compute_service( @@ -329,7 +330,7 @@ def test_compute_arweave(client, publisher_wallet, consumer_wallet, free_c2d_env @pytest.mark.integration -@skip_on(requests.exceptions.ConnectionError, "C2D connection failed. Need fix in #610") +@pytest.skip(reason="C2D connection failed. Need fix in #610") def test_compute_diff_provider(client, publisher_wallet, consumer_wallet, free_c2d_env): valid_until = get_future_valid_until() ddo, tx_id, alg_ddo, alg_tx_id = build_and_send_ddo_with_compute_service( @@ -365,7 +366,7 @@ def test_compute_diff_provider(client, publisher_wallet, consumer_wallet, free_c @pytest.mark.integration -@skip_on(requests.exceptions.ConnectionError, "C2D connection failed. Need fix in #610") +@pytest.skip(reason="C2D connection failed. Need fix in #610") def test_compute_allow_all_published( client, publisher_wallet, consumer_wallet, free_c2d_env ): @@ -413,7 +414,7 @@ def test_compute_allow_all_published( @pytest.mark.integration -@skip_on(requests.exceptions.ConnectionError, "C2D connection failed. Need fix in #610") +@pytest.skip(reason="C2D connection failed. Need fix in #610") def test_compute_additional_input( client, publisher_wallet, consumer_wallet, monkeypatch, free_c2d_env ): @@ -500,7 +501,7 @@ def test_compute_additional_input( @pytest.mark.integration -@skip_on(requests.exceptions.ConnectionError, "C2D connection failed. Need fix in #610") +@pytest.skip(reason="C2D connection failed. Need fix in #610") def test_compute_delete_job( client, publisher_wallet, consumer_wallet, consumer_address, free_c2d_env ): @@ -566,7 +567,7 @@ def test_compute_delete_job( @pytest.mark.unit -@skip_on(AssertionError, "C2D connection failed. Need fix in #610") +@pytest.skip(reason="C2D connection failed. Need fix in #610") def test_compute_environments(client): compute_envs_endpoint = BaseURLs.SERVICES_URL + "/computeEnvironments" retries = 2 @@ -637,7 +638,7 @@ def test_compute_paid_env( @pytest.mark.integration -@skip_on(requests.exceptions.ConnectionError, "C2D connection failed. Need fix in #610") +@pytest.skip(reason="C2D connection failed. Need fix in #610") def test_compute_auth_token(client, publisher_wallet, consumer_wallet, free_c2d_env): valid_until = get_future_valid_until() ddo, tx_id, alg_ddo, alg_tx_id = build_and_send_ddo_with_compute_service( From 6887a5c032c48b8f621c6a5d1fcb165b4be74342 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 14:02:10 +0200 Subject: [PATCH 35/83] fix pytest skip --- tests/test_compute.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/test_compute.py b/tests/test_compute.py index 8185b4a0..74f09299 100644 --- a/tests/test_compute.py +++ b/tests/test_compute.py @@ -292,7 +292,7 @@ def test_compute(client, publisher_wallet, consumer_wallet, free_c2d_env): @pytest.mark.integration -@pytest.skip(reason="C2D connection failed. Need fix in #610") +@pytest.mark.skip(reason="C2D connection failed. Need fix in #610") def test_compute_arweave(client, publisher_wallet, consumer_wallet, free_c2d_env): valid_until = get_future_valid_until() ddo, tx_id, alg_ddo, alg_tx_id = build_and_send_ddo_with_compute_service( @@ -330,7 +330,7 @@ def test_compute_arweave(client, publisher_wallet, consumer_wallet, free_c2d_env @pytest.mark.integration -@pytest.skip(reason="C2D connection failed. Need fix in #610") +@pytest.mark.skip(reason="C2D connection failed. Need fix in #610") def test_compute_diff_provider(client, publisher_wallet, consumer_wallet, free_c2d_env): valid_until = get_future_valid_until() ddo, tx_id, alg_ddo, alg_tx_id = build_and_send_ddo_with_compute_service( @@ -366,7 +366,7 @@ def test_compute_diff_provider(client, publisher_wallet, consumer_wallet, free_c @pytest.mark.integration -@pytest.skip(reason="C2D connection failed. Need fix in #610") +@pytest.mark.skip(reason="C2D connection failed. Need fix in #610") def test_compute_allow_all_published( client, publisher_wallet, consumer_wallet, free_c2d_env ): @@ -414,7 +414,7 @@ def test_compute_allow_all_published( @pytest.mark.integration -@pytest.skip(reason="C2D connection failed. Need fix in #610") +@pytest.mark.skip(reason="C2D connection failed. Need fix in #610") def test_compute_additional_input( client, publisher_wallet, consumer_wallet, monkeypatch, free_c2d_env ): @@ -501,7 +501,7 @@ def test_compute_additional_input( @pytest.mark.integration -@pytest.skip(reason="C2D connection failed. Need fix in #610") +@pytest.mark.skip(reason="C2D connection failed. Need fix in #610") def test_compute_delete_job( client, publisher_wallet, consumer_wallet, consumer_address, free_c2d_env ): @@ -567,7 +567,7 @@ def test_compute_delete_job( @pytest.mark.unit -@pytest.skip(reason="C2D connection failed. Need fix in #610") +@pytest.mark.skip(reason="C2D connection failed. Need fix in #610") def test_compute_environments(client): compute_envs_endpoint = BaseURLs.SERVICES_URL + "/computeEnvironments" retries = 2 @@ -638,7 +638,7 @@ def test_compute_paid_env( @pytest.mark.integration -@pytest.skip(reason="C2D connection failed. Need fix in #610") +@pytest.mark.skip(reason="C2D connection failed. Need fix in #610") def test_compute_auth_token(client, publisher_wallet, consumer_wallet, free_c2d_env): valid_until = get_future_valid_until() ddo, tx_id, alg_ddo, alg_tx_id = build_and_send_ddo_with_compute_service( From 914ff450f8ba8beb5ccc55388177ccd92ffaedb8 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 14:36:40 +0200 Subject: [PATCH 36/83] add e different version of python. --- .github/workflows/pytest.yml | 6 +++--- compose-env-values | 4 ++-- start_provider.sh | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index c2096fd1..62f94a3d 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -14,10 +14,10 @@ jobs: steps: - name: Setup Provider uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v2 + - name: Set up Python 3.10 + uses: actions/setup-python@v4 with: - python-version: '3.8' + python-version: '3.10' - uses: actions/checkout@v2 name: Checkout Barge with: diff --git a/compose-env-values b/compose-env-values index 989bab2b..98c91935 100644 --- a/compose-env-values +++ b/compose-env-values @@ -5,9 +5,9 @@ PARITY_KEYFILE1=/ocean-provider/tests/resources/consumer_key_file.json PROVIDER_ADDRESS=068ed00cf0441e4829d9784fcbe7b9e26d4bd8d0 NETWORK_URL=wss://rinkeby.infura.io/ws/v3/357f2fe737db4304bd2f7285c5602d0d -AQUARIUS_URL=https://aquarius.marketplace.dev-ocean.com +AQUARIUS_URL=https://v4.aquarius.oceanprotocol.com OCEAN_PROVIDER_URL=http://0.0.0.0:8030 -OPERATOR_SERVICE_URL=https://operator-api.operator.dev-ocean.com/ +OPERATOR_SERVICE_URL=https://stagev4.c2d.oceanprotocol.com AZURE_ACCOUNT_NAME= AZURE_ACCOUNT_KEY= diff --git a/start_provider.sh b/start_provider.sh index 83e0259f..18c63329 100755 --- a/start_provider.sh +++ b/start_provider.sh @@ -2,7 +2,7 @@ export PROVIDER_PRIVATE_KEY=0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215 export TEST_PRIVATE_KEY1=0xef4b441145c1d0f3b4bc6d61d29f5c6e502359481152f869247c7a4244d45209 export TEST_PRIVATE_KEY2=0x5d75837394b078ce97bc289fa8d75e21000573520bfa7784a9d28ccaae602bf8 -export OPERATOR_SERVICE_URL=https://c2d-dev.operator.oceanprotocol.com/ +export OPERATOR_SERVICE_URL=http://172.15.0.13:31000 export ADDRESS_FILE=~/.ocean/ocean-contracts/artifacts/address.json export IPFS_GATEWAY=http://172.15.0.16:8080 export AUTHORIZED_DECRYPTERS=[] From 819171480227835039edc1754ce43a912dd4f3ec Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 14:42:24 +0200 Subject: [PATCH 37/83] add docker ps. --- .github/workflows/pytest.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 62f94a3d..dc70998f 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -47,6 +47,7 @@ jobs: nc -zv 172.15.0.13 31000 cat $HOME/.ocean/ocean-contracts/artifacts/address.json curl http://172.15.0.13:31000 + docker ps - name: Install dependencies working-directory: ${{ github.workspace }} run: | From 073445a2457e98f0bb1d9e7ed389b076b84d34c4 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 15:16:25 +0200 Subject: [PATCH 38/83] tweaks to the workflow. --- .github/workflows/pytest.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index dc70998f..71872437 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -13,12 +13,12 @@ jobs: runs-on: ubuntu-latest steps: - name: Setup Provider - uses: actions/checkout@v2 - - name: Set up Python 3.10 + uses: actions/checkout@v3 + - name: Set up Python 3.9 uses: actions/setup-python@v4 with: - python-version: '3.10' - - uses: actions/checkout@v2 + python-version: '3.9' + - uses: actions/checkout@v3 name: Checkout Barge with: repository: "oceanprotocol/barge" @@ -72,6 +72,6 @@ jobs: if: ${{ success() && github.event_name == 'pull_request'}} steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Build run: docker build -t "testimage:latest" . From 4ec38f2fa039f366600a9427d0f77a8ab3a42410 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 15:18:37 +0200 Subject: [PATCH 39/83] stop fw. --- .github/workflows/pytest.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 71872437..1e38b2e5 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -43,6 +43,7 @@ jobs: done - name: Verify deployments run: | + service firewalld stop netstat -lnpt nc -zv 172.15.0.13 31000 cat $HOME/.ocean/ocean-contracts/artifacts/address.json From 1ee27e5ff58a52ed03aa65c49887c3009b7cbcd4 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 15:23:05 +0200 Subject: [PATCH 40/83] add traceroute --- .github/workflows/pytest.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 1e38b2e5..c4f57b07 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -49,6 +49,7 @@ jobs: cat $HOME/.ocean/ocean-contracts/artifacts/address.json curl http://172.15.0.13:31000 docker ps + traceroute -p 31000 - name: Install dependencies working-directory: ${{ github.workspace }} run: | From a0a899efb7306c9be85dbc241808b48612149a81 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 15:26:21 +0200 Subject: [PATCH 41/83] add traceroute host --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index c4f57b07..eec998e2 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -49,7 +49,7 @@ jobs: cat $HOME/.ocean/ocean-contracts/artifacts/address.json curl http://172.15.0.13:31000 docker ps - traceroute -p 31000 + traceroute 172.15.0.13 -p 31000 - name: Install dependencies working-directory: ${{ github.workspace }} run: | From 37b31350290862110f8987d684d247968c4f60a0 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 15:30:03 +0200 Subject: [PATCH 42/83] chaned op serv url. --- pytest.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index c5afc042..c04999e2 100644 --- a/pytest.ini +++ b/pytest.ini @@ -6,7 +6,7 @@ env = D:PROVIDER_PRIVATE_KEY=0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215 D:TEST_PRIVATE_KEY1=0xef4b441145c1d0f3b4bc6d61d29f5c6e502359481152f869247c7a4244d45209 D:TEST_PRIVATE_KEY2=0x5d75837394b078ce97bc289fa8d75e21000573520bfa7784a9d28ccaae602bf8 - D:OPERATOR_SERVICE_URL=http://172.15.0.13:31000/ + D:OPERATOR_SERVICE_URL=http://ocean_kindcluster_1:31000/ D:ADDRESS_FILE=~/.ocean/ocean-contracts/artifacts/address.json D:IPFS_GATEWAY=http://172.15.0.16:8080 D:ARWEAVE_GATEWAY=https://arweave.net/ From 7b87b6b8835f3bb70562ce6215e606520b67ed9c Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 15:32:33 +0200 Subject: [PATCH 43/83] remove stop fw. --- .github/workflows/pytest.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index eec998e2..4173e7c3 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -43,7 +43,6 @@ jobs: done - name: Verify deployments run: | - service firewalld stop netstat -lnpt nc -zv 172.15.0.13 31000 cat $HOME/.ocean/ocean-contracts/artifacts/address.json From 39ca9686e608fa859d607c1ad55c8931124d2c86 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 16:14:45 +0200 Subject: [PATCH 44/83] remove traceroute --- .github/workflows/pytest.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 4173e7c3..71872437 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -48,7 +48,6 @@ jobs: cat $HOME/.ocean/ocean-contracts/artifacts/address.json curl http://172.15.0.13:31000 docker ps - traceroute 172.15.0.13 -p 31000 - name: Install dependencies working-directory: ${{ github.workspace }} run: | From 5bd28f56cd1ae569c80bbf99675e7a5a8bed1ee8 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 17:12:36 +0200 Subject: [PATCH 45/83] added self hosted runner. --- .github/workflows/pytest.yml | 13 ++++++------- pytest.ini | 2 +- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 71872437..6fdb5e7e 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -6,19 +6,18 @@ name: Provider tests on: - push - - pull_request jobs: build: - runs-on: ubuntu-latest + runs-on: self-hosted steps: - name: Setup Provider uses: actions/checkout@v3 - - name: Set up Python 3.9 - uses: actions/setup-python@v4 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 with: - python-version: '3.9' - - uses: actions/checkout@v3 + python-version: '3.8' + - uses: actions/checkout@v2 name: Checkout Barge with: repository: "oceanprotocol/barge" @@ -72,6 +71,6 @@ jobs: if: ${{ success() && github.event_name == 'pull_request'}} steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v2 - name: Build run: docker build -t "testimage:latest" . diff --git a/pytest.ini b/pytest.ini index c04999e2..c5afc042 100644 --- a/pytest.ini +++ b/pytest.ini @@ -6,7 +6,7 @@ env = D:PROVIDER_PRIVATE_KEY=0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215 D:TEST_PRIVATE_KEY1=0xef4b441145c1d0f3b4bc6d61d29f5c6e502359481152f869247c7a4244d45209 D:TEST_PRIVATE_KEY2=0x5d75837394b078ce97bc289fa8d75e21000573520bfa7784a9d28ccaae602bf8 - D:OPERATOR_SERVICE_URL=http://ocean_kindcluster_1:31000/ + D:OPERATOR_SERVICE_URL=http://172.15.0.13:31000/ D:ADDRESS_FILE=~/.ocean/ocean-contracts/artifacts/address.json D:IPFS_GATEWAY=http://172.15.0.16:8080 D:ARWEAVE_GATEWAY=https://arweave.net/ From 4c437ad5cc6f7b403bb5e31983bc848784bae56f Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 18:22:20 +0200 Subject: [PATCH 46/83] revert code changes from debug process. --- ocean_provider/utils/compute_environments.py | 8 +++----- tests/test_compute.py | 9 --------- tests/test_initialize.py | 5 ----- 3 files changed, 3 insertions(+), 19 deletions(-) diff --git a/ocean_provider/utils/compute_environments.py b/ocean_provider/utils/compute_environments.py index d0fb319d..2b0386fc 100644 --- a/ocean_provider/utils/compute_environments.py +++ b/ocean_provider/utils/compute_environments.py @@ -1,7 +1,6 @@ import os from typing import List - -import requests +from urllib.parse import urljoin from ocean_provider.requests_session import get_requests_session from ocean_provider.utils.address import get_provider_fee_token @@ -11,7 +10,7 @@ def get_compute_environments_endpoint(): - return os.getenv("OPERATOR_SERVICE_URL") + "api/v1/operator/environments" + return urljoin(os.getenv("OPERATOR_SERVICE_URL"), "api/v1/operator/environments") def get_c2d_environments() -> List: @@ -21,11 +20,10 @@ def get_c2d_environments() -> List: standard_headers = { "Content-Type": "application/json", "Connection": "close", - "User-Agent": "Definitely-Not-Requests", } web3 = get_web3() params = {"chainId": web3.eth.chain_id} - response = requests.get( + response = requests_session.get( get_compute_environments_endpoint(), headers=standard_headers, params=params ) diff --git a/tests/test_compute.py b/tests/test_compute.py index 74f09299..c6f2b924 100644 --- a/tests/test_compute.py +++ b/tests/test_compute.py @@ -28,7 +28,6 @@ mint_100_datatokens, post_to_compute, start_order, - skip_on, ) import pytest @@ -292,7 +291,6 @@ def test_compute(client, publisher_wallet, consumer_wallet, free_c2d_env): @pytest.mark.integration -@pytest.mark.skip(reason="C2D connection failed. Need fix in #610") def test_compute_arweave(client, publisher_wallet, consumer_wallet, free_c2d_env): valid_until = get_future_valid_until() ddo, tx_id, alg_ddo, alg_tx_id = build_and_send_ddo_with_compute_service( @@ -330,7 +328,6 @@ def test_compute_arweave(client, publisher_wallet, consumer_wallet, free_c2d_env @pytest.mark.integration -@pytest.mark.skip(reason="C2D connection failed. Need fix in #610") def test_compute_diff_provider(client, publisher_wallet, consumer_wallet, free_c2d_env): valid_until = get_future_valid_until() ddo, tx_id, alg_ddo, alg_tx_id = build_and_send_ddo_with_compute_service( @@ -366,7 +363,6 @@ def test_compute_diff_provider(client, publisher_wallet, consumer_wallet, free_c @pytest.mark.integration -@pytest.mark.skip(reason="C2D connection failed. Need fix in #610") def test_compute_allow_all_published( client, publisher_wallet, consumer_wallet, free_c2d_env ): @@ -414,7 +410,6 @@ def test_compute_allow_all_published( @pytest.mark.integration -@pytest.mark.skip(reason="C2D connection failed. Need fix in #610") def test_compute_additional_input( client, publisher_wallet, consumer_wallet, monkeypatch, free_c2d_env ): @@ -501,7 +496,6 @@ def test_compute_additional_input( @pytest.mark.integration -@pytest.mark.skip(reason="C2D connection failed. Need fix in #610") def test_compute_delete_job( client, publisher_wallet, consumer_wallet, consumer_address, free_c2d_env ): @@ -567,7 +561,6 @@ def test_compute_delete_job( @pytest.mark.unit -@pytest.mark.skip(reason="C2D connection failed. Need fix in #610") def test_compute_environments(client): compute_envs_endpoint = BaseURLs.SERVICES_URL + "/computeEnvironments" retries = 2 @@ -588,7 +581,6 @@ def test_compute_environments(client): @pytest.mark.integration -# @skip_on(requests.exceptions.ConnectionError, "C2D connection failed. Need fix in #610") def test_compute_paid_env( client, publisher_wallet, consumer_wallet, paid_c2d_env, web3 ): @@ -638,7 +630,6 @@ def test_compute_paid_env( @pytest.mark.integration -@pytest.mark.skip(reason="C2D connection failed. Need fix in #610") def test_compute_auth_token(client, publisher_wallet, consumer_wallet, free_c2d_env): valid_until = get_future_valid_until() ddo, tx_id, alg_ddo, alg_tx_id = build_and_send_ddo_with_compute_service( diff --git a/tests/test_initialize.py b/tests/test_initialize.py index d9ef4843..422707b0 100644 --- a/tests/test_initialize.py +++ b/tests/test_initialize.py @@ -7,7 +7,6 @@ import time import pytest -import requests from ocean_provider.constants import BaseURLs from ocean_provider.utils.currency import to_wei @@ -16,7 +15,6 @@ from tests.helpers.compute_helpers import ( build_and_send_ddo_with_compute_service, get_future_valid_until, - skip_on, ) from tests.test_helpers import ( get_dataset_ddo_disabled, @@ -195,7 +193,6 @@ def test_can_not_initialize_compute_service_with_simple_initialize( @pytest.mark.integration -@skip_on(requests.exceptions.ConnectionError, "C2D connection failed. Need fix in #610") def test_initialize_compute_works( client, publisher_wallet, consumer_wallet, free_c2d_env ): @@ -249,7 +246,6 @@ def test_initialize_compute_works( @pytest.mark.integration -@skip_on(requests.exceptions.ConnectionError, "C2D connection failed. Need fix in #610") def test_initialize_compute_order_reused( client, publisher_wallet, consumer_wallet, free_c2d_env ): @@ -378,7 +374,6 @@ def test_initialize_compute_order_reused( @pytest.mark.integration -@skip_on(requests.exceptions.ConnectionError, "C2D connection failed. Need fix in #610") def test_initialize_compute_paid_env( client, publisher_wallet, consumer_wallet, paid_c2d_env ): From 58d55b572d90bf26f91a8da7a6cda069618124a9 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 21:06:55 +0200 Subject: [PATCH 47/83] removed docker ps. --- .github/workflows/pytest.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 3e65b439..cb1ce377 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -47,7 +47,6 @@ jobs: nc -zv 172.15.0.13 31000 cat $HOME/.ocean/ocean-contracts/artifacts/address.json curl http://172.15.0.13:31000 - docker ps - name: Install dependencies working-directory: ${{ github.workspace }} run: | From f738c53cbce7015cbb13b40c21bf0d3d92ceb44d Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 13 Mar 2023 22:32:49 +0200 Subject: [PATCH 48/83] downgrade ubuntu. --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index cb1ce377..1deffdf8 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -10,7 +10,7 @@ on: jobs: build: - runs-on: self-hosted + runs-on: ubuntu-20.04 steps: - name: Setup Provider uses: actions/checkout@v3 From 8c67a2f7a1542084bcd377af4dd790f64b509cd3 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 14 Mar 2023 03:24:14 +0200 Subject: [PATCH 49/83] added back self-hosted runner. --- .github/workflows/pytest.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 1deffdf8..1f99467d 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -10,7 +10,7 @@ on: jobs: build: - runs-on: ubuntu-20.04 + runs-on: self-hosted steps: - name: Setup Provider uses: actions/checkout@v3 @@ -66,7 +66,7 @@ jobs: CC_TEST_REPORTER_ID: b0d75c25d5176c59e8ea665bf74396d9ee1bdf2c97f11ccc6869f9e91d80a6c7 dockerbuild: - runs-on: ubuntu-latest + runs-on: self-hosted needs: [build] if: ${{ success() && github.event_name == 'pull_request'}} steps: From f516e81c353c4be89deb267e44eb853eb09466da Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 14 Mar 2023 22:18:15 +0200 Subject: [PATCH 50/83] Try with a larger runner. --- .github/workflows/pytest.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 1f99467d..d603923e 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -10,7 +10,7 @@ on: jobs: build: - runs-on: self-hosted + runs-on: ubuntu-20.04-16core steps: - name: Setup Provider uses: actions/checkout@v3 @@ -66,7 +66,7 @@ jobs: CC_TEST_REPORTER_ID: b0d75c25d5176c59e8ea665bf74396d9ee1bdf2c97f11ccc6869f9e91d80a6c7 dockerbuild: - runs-on: self-hosted + runs-on: ubuntu-20.04-16core needs: [build] if: ${{ success() && github.event_name == 'pull_request'}} steps: From 0ad4ff090978851ca92edba36c99f3f1c07837de Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 14 Mar 2023 22:52:47 +0200 Subject: [PATCH 51/83] try with ubuntu 22 with 4 cores. --- .github/workflows/pytest.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index d603923e..54992a74 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -10,7 +10,7 @@ on: jobs: build: - runs-on: ubuntu-20.04-16core + runs-on: ubuntu-22.04-4core steps: - name: Setup Provider uses: actions/checkout@v3 @@ -66,7 +66,7 @@ jobs: CC_TEST_REPORTER_ID: b0d75c25d5176c59e8ea665bf74396d9ee1bdf2c97f11ccc6869f9e91d80a6c7 dockerbuild: - runs-on: ubuntu-20.04-16core + runs-on: ubuntu-22.04-4core needs: [build] if: ${{ success() && github.event_name == 'pull_request'}} steps: From 1ceef6008af8daf8ae42d303af7c4e5c643a588c Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Wed, 15 Mar 2023 01:54:35 +0200 Subject: [PATCH 52/83] Return to self-hosted env. --- .github/workflows/pytest.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 54992a74..1f99467d 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -10,7 +10,7 @@ on: jobs: build: - runs-on: ubuntu-22.04-4core + runs-on: self-hosted steps: - name: Setup Provider uses: actions/checkout@v3 @@ -66,7 +66,7 @@ jobs: CC_TEST_REPORTER_ID: b0d75c25d5176c59e8ea665bf74396d9ee1bdf2c97f11ccc6869f9e91d80a6c7 dockerbuild: - runs-on: ubuntu-22.04-4core + runs-on: self-hosted needs: [build] if: ${{ success() && github.event_name == 'pull_request'}} steps: From b73318738966ff8dacb6802c093c266c9e0908d4 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 3 Apr 2023 12:49:00 +0300 Subject: [PATCH 53/83] Removed tests for debugging. --- .github/workflows/pytest.yml | 4 +- ocean_provider/utils/test/test_accounts.py | 54 - ocean_provider/utils/test/test_address.py | 31 - ocean_provider/utils/test/test_basics.py | 156 --- ocean_provider/utils/test/test_compute.py | 30 - ocean_provider/utils/test/test_credentials.py | 90 -- ocean_provider/utils/test/test_currency.py | 96 -- ocean_provider/utils/test/test_encryption.py | 35 - .../utils/test/test_error_responses.py | 70 -- .../utils/test/test_provider_fees.py | 63 - ocean_provider/utils/test/test_url.py | 93 -- ocean_provider/utils/test/test_util.py | 445 -------- .../validation/test/test_algo_validation.py | 1012 ----------------- 13 files changed, 2 insertions(+), 2177 deletions(-) delete mode 100644 ocean_provider/utils/test/test_accounts.py delete mode 100644 ocean_provider/utils/test/test_address.py delete mode 100644 ocean_provider/utils/test/test_basics.py delete mode 100644 ocean_provider/utils/test/test_compute.py delete mode 100644 ocean_provider/utils/test/test_credentials.py delete mode 100644 ocean_provider/utils/test/test_currency.py delete mode 100644 ocean_provider/utils/test/test_encryption.py delete mode 100644 ocean_provider/utils/test/test_error_responses.py delete mode 100644 ocean_provider/utils/test/test_provider_fees.py delete mode 100644 ocean_provider/utils/test/test_url.py delete mode 100644 ocean_provider/utils/test/test_util.py delete mode 100644 ocean_provider/validation/test/test_algo_validation.py diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 1f99467d..53e37eda 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -10,7 +10,7 @@ on: jobs: build: - runs-on: self-hosted + runs-on: ubuntu-latest steps: - name: Setup Provider uses: actions/checkout@v3 @@ -66,7 +66,7 @@ jobs: CC_TEST_REPORTER_ID: b0d75c25d5176c59e8ea665bf74396d9ee1bdf2c97f11ccc6869f9e91d80a6c7 dockerbuild: - runs-on: self-hosted + runs-on: ubuntu-latest needs: [build] if: ${{ success() && github.event_name == 'pull_request'}} steps: diff --git a/ocean_provider/utils/test/test_accounts.py b/ocean_provider/utils/test/test_accounts.py deleted file mode 100644 index 0e9f2943..00000000 --- a/ocean_provider/utils/test/test_accounts.py +++ /dev/null @@ -1,54 +0,0 @@ -import os -from datetime import datetime, timedelta - -import pytest -from ocean_provider.exceptions import InvalidSignatureError -from ocean_provider.user_nonce import update_nonce -from ocean_provider.utils.accounts import ( - get_private_key, - sign_message, - verify_signature, -) - - -@pytest.mark.unit -def test_get_private_key(publisher_wallet): - assert ( - str(get_private_key(publisher_wallet)).lower() - == os.getenv("TEST_PRIVATE_KEY1").lower() - ) - - -@pytest.mark.unit -def test_verify_signature(consumer_wallet, publisher_wallet): - update_nonce(consumer_wallet.address, datetime.utcnow().timestamp()) - - nonce = datetime.utcnow().timestamp() - did = "did:op:test" - msg = f"{consumer_wallet.address}{did}{nonce}" - msg_w_nonce = f"{consumer_wallet.address}{did}" - signature = sign_message(msg, consumer_wallet) - - assert verify_signature(consumer_wallet.address, signature, msg_w_nonce, nonce) - - nonce = datetime.utcnow().timestamp() - did = "did:op:test" - msg = f"{consumer_wallet.address}{did}{nonce}" - msg_w_nonce = f"{consumer_wallet.address}{did}" - signature = sign_message(msg, consumer_wallet) - - with pytest.raises(InvalidSignatureError) as e_info: - verify_signature(publisher_wallet.address, signature, msg_w_nonce, nonce) - - assert f"Invalid signature {signature} for ethereum address" in e_info.value.args[0] - - nonce = (datetime.utcnow() - timedelta(days=7)).timestamp() - did = "did:op:test" - msg = f"{consumer_wallet.address}{did}{nonce}" - msg_w_nonce = f"{consumer_wallet.address}{did}" - signature = sign_message(msg, consumer_wallet) - # expired nonce - with pytest.raises(InvalidSignatureError) as e_info: - verify_signature(consumer_wallet.address, signature, msg_w_nonce, nonce) - - assert e_info.value.args[0].startswith("Invalid signature expected nonce") diff --git a/ocean_provider/utils/test/test_address.py b/ocean_provider/utils/test/test_address.py deleted file mode 100644 index ce46ce6d..00000000 --- a/ocean_provider/utils/test/test_address.py +++ /dev/null @@ -1,31 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -import os - -import pytest -from ocean_provider.utils.address import get_address_json, get_contract_address - - -@pytest.mark.unit -def test_get_address_json(): - address_json = get_address_json(os.getenv("ADDRESS_FILE")) - assert address_json["development"]["chainId"] == 8996 - assert address_json["development"]["Ocean"].startswith("0x") - - -@pytest.mark.unit -def test_get_contract_address(): - assert get_contract_address( - os.getenv("ADDRESS_FILE"), "ERC721Factory", 8996 - ).startswith("0x") - - -@pytest.mark.unit -def test_get_address_json_missing_var(monkeypatch): - monkeypatch.delenv("ADDRESS_FILE") - address_json = get_address_json(os.getenv("ADDRESS_FILE")) - - assert address_json["goerli"]["chainId"] == 5 - assert address_json["goerli"]["Ocean"].startswith("0x") diff --git a/ocean_provider/utils/test/test_basics.py b/ocean_provider/utils/test/test_basics.py deleted file mode 100644 index e5ecc41c..00000000 --- a/ocean_provider/utils/test/test_basics.py +++ /dev/null @@ -1,156 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -from datetime import datetime, timedelta - -import pytest -from ocean_provider.utils.basics import ( - decode_keyed, - get_configured_chains, - get_provider_addresses, - get_provider_private_key, - get_value_from_decoded_env, - get_web3, - get_web3_connection_provider, - send_ether, - validate_timestamp, -) -from ocean_provider.utils.currency import to_wei - - -@pytest.mark.unit -def test_get_web3_connection_provider(monkeypatch): - # typical http uri "http://foo.com" - provider = get_web3_connection_provider("http://foo.com") - assert provider.endpoint_uri == "http://foo.com" - - # typical https uri "https://bar.com" - provider = get_web3_connection_provider("https://bar.com") - assert provider.endpoint_uri == "https://bar.com" - - # non-supported name - with pytest.raises(AssertionError): - get_web3_connection_provider("not_network_name") - - # typical websockets uri "wss://foo.com" - provider = get_web3_connection_provider("wss://bah.com") - assert provider.endpoint_uri == "wss://bah.com" - - -@pytest.mark.unit -def test_send_ether(publisher_wallet, consumer_address): - assert send_ether( - get_web3(8996), publisher_wallet, consumer_address, to_wei(1) - ), "Send ether was unsuccessful." - - -@pytest.mark.unit -def test_validate_timestamp(): - timestamp_future = int((datetime.utcnow() + timedelta(hours=1)).timestamp()) - assert validate_timestamp(timestamp_future) - assert validate_timestamp(1644831664000) is False - assert validate_timestamp(str(timestamp_future)) - - timestamp_past = (datetime.utcnow() - timedelta(hours=1)).timestamp() - assert validate_timestamp(timestamp_past) is False - - -@pytest.mark.unit -def test_decode_keyed(monkeypatch): - monkeypatch.setenv("TEST_ENV", '{"valid": "json"}') - assert decode_keyed("TEST_ENV") == {"valid": "json"} - monkeypatch.setenv("TEST_ENV", '{"invalid json"}') - assert not decode_keyed("TEST_ENV") - monkeypatch.setenv("TEST_ENV", "simple string") - assert not decode_keyed("TEST_ENV") - - -@pytest.mark.unit -def test_get_configured_chains(monkeypatch): - monkeypatch.setenv("NETWORK_URL", '{"3": "http://127.0.0.1:8545", "15": "fifteen"}') - assert get_configured_chains() == [3, 15] - - monkeypatch.setenv("NETWORK_URL", "http://127.0.0.1:8545") - assert get_configured_chains() == [8996] - - monkeypatch.delenv("NETWORK_URL") - with pytest.raises(Exception, match="No chains configured"): - get_configured_chains() - - -@pytest.mark.unit -def test_get_value_from_decoded_env(monkeypatch): - monkeypatch.setenv("SOME_ENV", '{"3": "three", "15": "fifteen"}') - assert get_value_from_decoded_env(3, "SOME_ENV") == "three" - - with pytest.raises(Exception, match="Unconfigured chain_id"): - get_value_from_decoded_env(7, "SOME_ENV") - - with pytest.raises(Exception, match="Unconfigured chain_id"): - get_value_from_decoded_env(None, "SOME_ENV") - - monkeypatch.setenv("SOME_ENV", "simple string") - assert get_value_from_decoded_env(3, "SOME_ENV") == "simple string" - - -@pytest.mark.unit -def test_get_provider_addresses(monkeypatch): - monkeypatch.setenv("NETWORK_URL", '{"3": "http://127.0.0.1:8545"}') - monkeypatch.setenv( - "PROVIDER_PRIVATE_KEY", - '{"3": "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215"}', - ) - assert 3 in get_provider_addresses() - - monkeypatch.setenv("NETWORK_URL", "http://127.0.0.1:8545") - monkeypatch.setenv( - "PROVIDER_PRIVATE_KEY", - "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215", - ) - assert 8996 in get_provider_addresses() - - monkeypatch.setenv("NETWORK_URL", '{"3": "http://127.0.0.1:8545"}') - monkeypatch.setenv( - "PROVIDER_PRIVATE_KEY", - "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215", - ) - with pytest.raises(Exception, match="must both be single or both json encoded"): - get_provider_addresses() - - monkeypatch.setenv( - "PROVIDER_PRIVATE_KEY", - '{"3": "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215"}', - ) - monkeypatch.setenv("NETWORK_URL", "http://127.0.0.1:8545") - with pytest.raises(Exception, match="must both be single or both json encoded"): - get_provider_addresses() - - -@pytest.mark.unit -def test_get_provider_private_key(monkeypatch): - monkeypatch.delenv("UNIVERSAL_PRIVATE_KEY") - monkeypatch.setenv( - "PROVIDER_PRIVATE_KEY", - '{"3": "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215"}', - ) - assert get_provider_private_key(3).startswith("0xfd5c1") - - with pytest.raises( - Exception, - match="Must define UNIVERSAL_PRIVATE_KEY or a single PROVIDER_PRIVATE_KEY.", - ): - get_provider_private_key(None, use_universal_key=True) - - monkeypatch.setenv( - "PROVIDER_PRIVATE_KEY", - "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215", - ) - assert get_provider_private_key(8996).startswith("0xfd5c1") - - monkeypatch.delenv("PROVIDER_PRIVATE_KEY") - monkeypatch.setenv( - "UNIVERSAL_PRIVATE_KEY", - "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215", - ) - assert get_provider_private_key(None, use_universal_key=True).startswith("0xfd5c1") diff --git a/ocean_provider/utils/test/test_compute.py b/ocean_provider/utils/test/test_compute.py deleted file mode 100644 index b9d247ec..00000000 --- a/ocean_provider/utils/test/test_compute.py +++ /dev/null @@ -1,30 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -import logging - -import pytest -from ocean_provider.utils.compute import ( - get_compute_endpoint, - get_compute_result_endpoint, -) - -test_logger = logging.getLogger(__name__) - - -@pytest.mark.unit -def test_get_compute_endpoint(monkeypatch): - monkeypatch.setenv("OPERATOR_SERVICE_URL", "http://with-slash.com/") - assert get_compute_endpoint() == "http://with-slash.com/api/v1/operator/compute" - assert ( - get_compute_result_endpoint() - == "http://with-slash.com/api/v1/operator/getResult" - ) - - monkeypatch.setenv("OPERATOR_SERVICE_URL", "http://without-slash.com") - assert get_compute_endpoint() == "http://without-slash.com/api/v1/operator/compute" - assert ( - get_compute_result_endpoint() - == "http://without-slash.com/api/v1/operator/getResult" - ) diff --git a/ocean_provider/utils/test/test_credentials.py b/ocean_provider/utils/test/test_credentials.py deleted file mode 100644 index 477c9b7d..00000000 --- a/ocean_provider/utils/test/test_credentials.py +++ /dev/null @@ -1,90 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -from copy import deepcopy - -import pytest -from ocean_provider.utils.asset import Asset -from ocean_provider.utils.consumable import ConsumableCodes, MalformedCredential -from ocean_provider.utils.credentials import AddressCredential -from tests.ddo.ddo_sa_sample_with_credentials_v4 import json_dict -from tests.test_helpers import get_resource_path - - -@pytest.mark.unit -def test_asset_credentials_addresses_both(): - """Tests asset credentials when both deny and allow lists exist on the asset.""" - sample_asset_path = get_resource_path("ddo", "ddo_sa_sample_with_credentials.json") - assert sample_asset_path.exists(), "{} does not exist!".format(sample_asset_path) - - ddo = deepcopy(json_dict) - asset = Asset(ddo) - - address_credential = AddressCredential(asset) - assert address_credential.get_addresses_of_class("allow") == ["0x123", "0x456a"] - assert address_credential.get_addresses_of_class("deny") == ["0x2222", "0x333"] - assert ( - address_credential.validate_access({"type": "address", "value": "0x111"}) - == ConsumableCodes.CREDENTIAL_NOT_IN_ALLOW_LIST - ) - assert ( - address_credential.validate_access({"type": "address", "value": "0x456A"}) - == ConsumableCodes.OK - ) - # if "allow" exists, "deny" is not checked anymore - - -@pytest.mark.unit -def test_asset_credentials_addresses_only_deny(): - """Tests asset credentials when only the deny list exists on the asset.""" - sample_asset_path = get_resource_path("ddo", "ddo_sa_sample_with_credentials.json") - assert sample_asset_path.exists(), "{} does not exist!".format(sample_asset_path) - - ddo = deepcopy(json_dict) - asset = Asset(ddo) - - # remove allow to test the behaviour of deny - asset.credentials.pop("allow") - - address_credential = AddressCredential(asset) - assert address_credential.get_addresses_of_class("allow") == [] - assert address_credential.get_addresses_of_class("deny") == ["0x2222", "0x333"] - assert ( - address_credential.validate_access({"type": "address", "value": "0x111"}) - == ConsumableCodes.OK - ) - assert ( - address_credential.validate_access({"type": "address", "value": "0x333"}) - == ConsumableCodes.CREDENTIAL_IN_DENY_LIST - ) - - credential = {"type": "address", "value": ""} - with pytest.raises(MalformedCredential): - address_credential.validate_access(credential) - - -@pytest.mark.unit -def test_asset_credentials_addresses_no_access_list(): - """Tests asset credentials when neither deny, nor allow lists exist on the asset.""" - sample_asset_path = get_resource_path("ddo", "ddo_sa_sample_with_credentials.json") - assert sample_asset_path.exists(), "{} does not exist!".format(sample_asset_path) - - ddo = deepcopy(json_dict) - asset = Asset(ddo) - - # if "allow" OR "deny" exist, we need a credential, - # so remove both to test the behaviour of no credential supplied - address_credential = AddressCredential(asset) - asset.credentials.pop("allow") - asset.credentials.pop("deny") - - assert address_credential.validate_access() == ConsumableCodes.OK - - # test that we can use another credential if address is not required - assert ( - asset.is_consumable( - {"type": "somethingelse", "value": "test"}, with_connectivity_check=False - ) - == ConsumableCodes.OK - ) diff --git a/ocean_provider/utils/test/test_currency.py b/ocean_provider/utils/test/test_currency.py deleted file mode 100644 index 2ca65a1e..00000000 --- a/ocean_provider/utils/test/test_currency.py +++ /dev/null @@ -1,96 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -from decimal import Decimal, localcontext - -import pytest -from ocean_provider.utils.currency import ( - ETHEREUM_DECIMAL_CONTEXT, - MAX_ETHER, - MAX_WEI, - MIN_ETHER, - MIN_WEI, - parse_units, - to_wei, -) - -USDT_DECIMALS = 6 -MIN_USDT = Decimal("0.000001") -MAX_USDT = Decimal(MAX_WEI).scaleb(-USDT_DECIMALS, context=ETHEREUM_DECIMAL_CONTEXT) - -SEVEN_DECIMALS = 7 -MIN_SEVEN = Decimal("0.0000001") -MAX_SEVEN = Decimal(MAX_WEI).scaleb(-SEVEN_DECIMALS, context=ETHEREUM_DECIMAL_CONTEXT) - - -@pytest.mark.unit -def test_to_wei(): - """Test the to_wei function""" - assert to_wei(Decimal("0")) == 0, "Zero ether (Decimal) should equal zero wei" - assert to_wei("0") == 0, "Zero ether (string) should equal zero wei" - assert to_wei(0) == 0, "Zero ether (int) should equal zero wei" - assert ( - to_wei(Decimal("0.123456789123456789")) == 123456789_123456789 - ), "Conversion from ether (Decimal) to wei failed." - assert ( - to_wei("0.123456789123456789") == 123456789_123456789 - ), "Conversion from ether (string) to wei failed." - assert ( - to_wei(1) == 1_000000000_000000000 - ), "Conversion from ether (int) to wei failed." - - assert ( - to_wei("0.1234567891234567893") == 123456789_123456789 - ), "Conversion from ether to wei failed, supposed to round towards 0 (aka. truncate)." - assert ( - to_wei("0.1234567891234567897") == 123456789_123456789 - ), "Conversion from ether to wei failed, supposed to round towards 0 (aka. truncate)." - - assert ( - to_wei(MIN_ETHER) == MIN_WEI - ), "Conversion from minimum ether to minimum wei failed." - - assert ( - to_wei(MAX_ETHER) == MAX_WEI - ), "Conversion from maximum ether to maximum wei failed." - - # Use ETHEREUM_DECIMAL_CONTEXT when performing arithmetic on MAX_ETHER - with localcontext(ETHEREUM_DECIMAL_CONTEXT): - with pytest.raises(ValueError): - to_wei(MAX_ETHER + 1) - - -@pytest.mark.unit -def test_parse_units(): - """Test the parse_units function""" - assert parse_units("0", USDT_DECIMALS) == 0 - assert parse_units("0.123456789123456789", USDT_DECIMALS) == 123456 - assert parse_units("1.123456789123456789", USDT_DECIMALS) == 1_123456 - assert parse_units("5278.02", USDT_DECIMALS) == 5278_020000 - assert parse_units(MIN_USDT, USDT_DECIMALS) == MIN_WEI - assert parse_units(MAX_USDT, USDT_DECIMALS) == MAX_WEI - - # Use ETHEREUM_DECIMAL_CONTEXT when performing arithmetic on MAX_USDT - with localcontext(ETHEREUM_DECIMAL_CONTEXT): - with pytest.raises(ValueError): - parse_units(MAX_USDT + 1, USDT_DECIMALS) - - assert parse_units("0", "mwei") == 0 - assert parse_units("0.123456789123456789", "mwei") == 123456 - assert parse_units("1.123456789123456789", "mwei") == 1_123456 - assert parse_units("5278.02", "mwei") == 5278_020000 - assert parse_units(MIN_USDT, "mwei") == MIN_WEI - assert parse_units(MAX_USDT, "mwei") == MAX_WEI - - # Use ETHEREUM_DECIMAL_CONTEXT when performing arithmetic on MAX_USDT - with localcontext(ETHEREUM_DECIMAL_CONTEXT): - with pytest.raises(ValueError): - parse_units(MAX_USDT + 1, "mwei") - - assert parse_units("0", SEVEN_DECIMALS) == 0 - assert parse_units("0.123456789", SEVEN_DECIMALS) == 1234567 - assert parse_units("1.123456789", SEVEN_DECIMALS) == 1_1234567 - assert parse_units("5278.02", SEVEN_DECIMALS) == 5278_0200000 - assert parse_units(MIN_SEVEN, SEVEN_DECIMALS) == MIN_WEI - assert parse_units(MAX_SEVEN, SEVEN_DECIMALS) == MAX_WEI diff --git a/ocean_provider/utils/test/test_encryption.py b/ocean_provider/utils/test/test_encryption.py deleted file mode 100644 index 64efaf8d..00000000 --- a/ocean_provider/utils/test/test_encryption.py +++ /dev/null @@ -1,35 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -import pytest -from ocean_provider.utils.encryption import do_decrypt, do_encrypt -from web3.main import Web3 - - -@pytest.mark.unit -def test_encryption_with_bytes(provider_wallet): - test_string = "hello_world" - test_bytes = Web3.toBytes(text=test_string) - result = do_encrypt(test_bytes, provider_wallet) - assert result.startswith("0x") - assert do_decrypt(result, provider_wallet) == test_bytes - - -@pytest.mark.unit -def test_encryption_with_hexstr(provider_wallet): - test_string = '["https://raw.githubusercontent.com/tbertinmahieux/MSongsDB/master/Tasks_Demos/CoverSongs/shs_dataset_test.txt"]' - result = do_encrypt(Web3.toHex(text=test_string), provider_wallet) - assert result.startswith("0x") - assert do_decrypt(result, provider_wallet) == Web3.toBytes(text=test_string) - - -@pytest.mark.unit -def test_encryption_with_text(provider_wallet): - test_string = ( - '["https://raw.githubusercontent.com/tbertinmahieux/MSongsDB/master/Tasks_Demos/CoverSongs/shs_dataset_test.txt", ' - '"https://raw.githubusercontent.com/tbertinmahieux/MSongsDB/master/Tasks_Demos/CoverSongs/shs_dataset_test.txt"]' - ) - result = do_encrypt(test_string, provider_wallet) - assert result.startswith("0x") - assert do_decrypt(result, provider_wallet) == Web3.toBytes(text=test_string) diff --git a/ocean_provider/utils/test/test_error_responses.py b/ocean_provider/utils/test/test_error_responses.py deleted file mode 100644 index 3a12155a..00000000 --- a/ocean_provider/utils/test/test_error_responses.py +++ /dev/null @@ -1,70 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -import logging - -import pytest -from ocean_provider.run import app, handle_error - -test_logger = logging.getLogger(__name__) - - -@pytest.mark.unit -def test_service_unavailable(caplog): - context = {"item1": "test1", "item2": "test2"} - - with app.test_request_context(json=context): - e = Exception("test message") - response = handle_error(e) - assert response.status_code == 503 - response = response.json - assert response["error"] == "test message" - assert response["context"] == context - - -@pytest.mark.unit -def test_service_unavailable_strip_infura_project_id(): - """Test that service_unavilable strips out URLs.""" - - context = {"item1": "test1", "item2": "test2"} - - # HTTP Infura URL (rinkeby) - with app.test_request_context(json=context): - e = Exception( - "429 Client Error: Too Many Requests for url: " - "https://rinkeby.infura.io/v3/ffffffffffffffffffffffffffffffff" - ) - response = handle_error(e) - assert ( - response.json["error"] == "429 Client Error: Too Many Requests for url: " - "" - ) - - # Websocket Infura URL (ropsten) - with app.test_request_context(json=context): - e = Exception( - "429 Client Error: Too Many Requests for url: " - "wss://ropsten.infura.io/ws/v3/ffffffffffffffffffffffffffffffff" - ) - response = handle_error(e) - assert ( - response.json["error"] == "429 Client Error: Too Many Requests for url: " - "" - ) - - # No URL - with app.test_request_context(json=context): - e = Exception("string without a URL in it") - response = handle_error(e) - assert response.json["error"] == "string without a URL in it" - - # Two URLs - with app.test_request_context(json=context): - e = Exception("Two URLs: wss://google.com https://google.com") - response = handle_error(e) - assert ( - response.json["error"] == "Two URLs: " - " " - "" - ) diff --git a/ocean_provider/utils/test/test_provider_fees.py b/ocean_provider/utils/test/test_provider_fees.py deleted file mode 100644 index f4bcfa2a..00000000 --- a/ocean_provider/utils/test/test_provider_fees.py +++ /dev/null @@ -1,63 +0,0 @@ -from unittest.mock import patch - -import pytest -from freezegun import freeze_time -from ocean_provider.utils.currency import to_wei -from ocean_provider.utils.provider_fees import get_provider_fee_amount -from tests.helpers.compute_helpers import get_future_valid_until -from tests.test_helpers import ( - BLACK_HOLE_ADDRESS, - deploy_data_nft, - deploy_datatoken, - get_ocean_token_address, -) - - -@pytest.mark.unit -@freeze_time("Feb 11th, 2012 00:00") -def test_get_provider_fee_amount(web3, publisher_wallet): - valid_until = get_future_valid_until() - assert ( - get_provider_fee_amount( - valid_until, - "ocean-compute", - web3, - "0x0000000000000000000000000000000000000000", - ) - == 0 - ) - - data_nft_address = deploy_data_nft( - web3, - "Data NFT Name", - "DATANFTSYMBOL", - 1, - BLACK_HOLE_ADDRESS, - BLACK_HOLE_ADDRESS, - "", - publisher_wallet, - ) - - datatoken_address = deploy_datatoken( - web3=web3, - data_nft_address=data_nft_address, - template_index=1, - name="Datatoken 1", - symbol="DT1", - minter=publisher_wallet.address, - fee_manager=publisher_wallet.address, - publishing_market=BLACK_HOLE_ADDRESS, - publishing_market_fee_token=get_ocean_token_address(web3), - cap=to_wei(1000), - publishing_market_fee_amount=0, - from_wallet=publisher_wallet, - ) - - with patch("ocean_provider.utils.provider_fees.get_c2d_environments") as mock: - mock.return_value = [{"id": "ocean-compute", "priceMin": 60}] - assert ( - get_provider_fee_amount( - valid_until, "ocean-compute", web3, datatoken_address - ) - == 3600000000000000000000 - ) diff --git a/ocean_provider/utils/test/test_url.py b/ocean_provider/utils/test/test_url.py deleted file mode 100644 index afbb60b1..00000000 --- a/ocean_provider/utils/test/test_url.py +++ /dev/null @@ -1,93 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -import logging -from unittest.mock import Mock, patch - -import pytest -from ocean_provider.utils.url import ( - get_redirect, - is_safe_url, - is_this_same_provider, - is_url, -) -from requests.models import Response - -test_logger = logging.getLogger(__name__) - - -@pytest.mark.unit -def test_is_url(): - assert is_url("https://jsonplaceholder.typicode.com/") is True - assert is_url("127.0.0.1") is False - assert is_url("169.254.169.254") is False - assert is_url("http://169.254.169.254/latest/meta-data/hostname") is True - - -@pytest.mark.unit -def test_is_safe_url(): - assert is_safe_url("https://jsonplaceholder.typicode.com/") is True - assert is_safe_url("127.0.0.1") is False - assert is_safe_url("169.254.169.254") is False - assert is_safe_url("http://169.254.169.254/latest/meta-data/hostname") is False - - assert is_safe_url("https://bit.ly/3zqzc4m") is True # jsonplaceholder example - assert is_safe_url("https://bit.ly/3znh0Zg") is False # meta-data/hostname example - - assert is_safe_url("blabla") is False - - -@pytest.mark.unit -def test_is_same_provider(): - assert is_this_same_provider("http://localhost:8030", 8996) - - -@pytest.mark.unit -def test_get_redirect(): - assert ( - get_redirect("https://bit.ly/3zqzc4m") - == "https://jsonplaceholder.typicode.com/" - ) - - redirect_response = Mock(spec=Response) - redirect_response.is_redirect = True - redirect_response.status_code = 200 - redirect_response.headers = {"Location": "/root-relative.html"} - - normal_response = Mock(spec=Response) - normal_response.is_redirect = False - normal_response.status_code = 200 - - with patch("ocean_provider.utils.url.requests.head") as mock: - mock.side_effect = [redirect_response, normal_response] - assert ( - get_redirect("https://some-url.com:3000/index") - == "https://some-url.com:3000/root-relative.html" - ) - - redirect_response = Mock(spec=Response) - redirect_response.is_redirect = True - redirect_response.status_code = 200 - redirect_response.headers = {"Location": "relative.html"} - - normal_response = Mock(spec=Response) - normal_response.is_redirect = False - normal_response.status_code = 200 - - with patch("ocean_provider.utils.url.requests.head") as mock: - mock.side_effect = [redirect_response, normal_response] - assert ( - get_redirect("https://some-url.com:3000/index") - == "https://some-url.com:3000/index/relative.html" - ) - - redirect_response = Mock(spec=Response) - redirect_response.is_redirect = True - redirect_response.status_code = 200 - redirect_response.headers = {"Location": "https://some-url.com:3000/index"} - - with patch("ocean_provider.utils.url.requests.head") as mock: - mock.return_value = redirect_response - assert get_redirect("https://some-url.com:3000/index") is None - assert mock.call_count == 6 diff --git a/ocean_provider/utils/test/test_util.py b/ocean_provider/utils/test/test_util.py deleted file mode 100644 index 7189251d..00000000 --- a/ocean_provider/utils/test/test_util.py +++ /dev/null @@ -1,445 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -import copy -import json -import logging -import mimetypes -from copy import deepcopy -from unittest.mock import Mock, patch - -import ipfshttpclient -import pytest -import requests -from flask import Request -from ocean_provider.file_types.file_types_factory import FilesTypeFactory -from ocean_provider.utils.asset import Asset -from ocean_provider.utils.encryption import do_encrypt -from ocean_provider.utils.services import Service -from ocean_provider.utils.util import ( - get_service_files_list, - get_service_files_list_old_structure, - msg_hash, -) -from tests.ddo.ddo_sample1_v4 import json_dict as ddo_sample1_v4 -from tests.helpers.constants import ARWEAVE_TRANSACTION_ID -from web3.main import Web3 -from werkzeug.utils import get_content_type - -test_logger = logging.getLogger(__name__) - - -@pytest.mark.unit -def test_msg_hash(): - msg = "Hello World!" - hashed = msg_hash(msg) - expected = "7f83b1657ff1fc53b92dc18148a1d65dfc2d4b1fa3d677284addd200126d9069" - assert hashed == expected - - -@pytest.mark.unit -def test_build_download_response(): - request = Mock() - request.range = None - - class Dummy: - pass - - mocked_response = Dummy() - mocked_response.content = b"asdsadf" - mocked_response.status_code = 200 - mocked_response.headers = {} - - filename = "<>.xml" - content_type = mimetypes.guess_type(filename)[0] - url_object = {"url": f"https://source-lllllll.cccc/{filename}", "type": "url"} - _, instance = FilesTypeFactory.validate_and_create(url_object) - with patch( - "ocean_provider.file_types.definitions.is_safe_url", - side_effect=[True], - ): - with patch( - "requests.get", - side_effect=[mocked_response], - ): - response = instance.build_download_response(request) - - assert response.headers["content-type"] == content_type - assert ( - response.headers.get_all("Content-Disposition")[0] - == f"attachment;filename={filename}" - ) - - filename = "<>" - url_object = {"url": f"https://source-lllllll.cccc/{filename}", "type": "url"} - _, instance = FilesTypeFactory.validate_and_create(url_object) - with patch( - "ocean_provider.file_types.definitions.is_safe_url", - side_effect=[True], - ): - with patch( - "requests.get", - side_effect=[mocked_response], - ): - response = instance.build_download_response(request) - assert response.headers["content-type"] == get_content_type( - response.default_mimetype, response.charset - ) - assert ( - response.headers.get_all("Content-Disposition")[0] - == f"attachment;filename={filename}" - ) - - filename = "<>" - url_object = {"url": f"https://source-lllllll.cccc/{filename}", "type": "url"} - _, instance = FilesTypeFactory.validate_and_create(url_object) - instance.checked_details = {"contentType": content_type} - with patch( - "ocean_provider.file_types.definitions.is_safe_url", - side_effect=[True], - ): - with patch( - "requests.get", - side_effect=[mocked_response], - ): - response = instance.build_download_response(request) - assert response.headers["content-type"] == content_type - - matched_cd = ( - f"attachment;filename={filename + mimetypes.guess_extension(content_type)}" - ) - assert response.headers.get_all("Content-Disposition")[0] == matched_cd - - mocked_response_with_attachment = deepcopy(mocked_response) - attachment_file_name = "test.xml" - mocked_response_with_attachment.headers = { - "content-disposition": f"attachment;filename={attachment_file_name}" - } - - url_object = {"url": "https://source-lllllll.cccc/not-a-filename", "type": "url"} - _, instance = FilesTypeFactory.validate_and_create(url_object) - with patch( - "ocean_provider.file_types.definitions.is_safe_url", - side_effect=[True], - ): - with patch( - "requests.get", - side_effect=[mocked_response_with_attachment], - ): - response = instance.build_download_response(request) - assert ( - response.headers["content-type"] - == mimetypes.guess_type(attachment_file_name)[0] - ) # noqa - - matched_cd = f"attachment;filename={attachment_file_name}" - assert response.headers.get_all("Content-Disposition")[0] == matched_cd - - mocked_response_with_content_type = deepcopy(mocked_response) - response_content_type = "text/csv" - mocked_response_with_content_type.headers = {"content-type": response_content_type} - - filename = "filename.txt" - url_object = { - "url": f"https://source-lllllll.cccc/{filename}", - "type": "url", - "headers": {"APIKEY": "sample"}, - } - _, instance = FilesTypeFactory.validate_and_create(url_object) - with patch( - "ocean_provider.file_types.definitions.is_safe_url", - side_effect=[True], - ): - with patch( - "requests.get", - side_effect=[mocked_response_with_content_type], - ): - response = instance.build_download_response(request) - assert response.headers["content-type"] == response_content_type - assert ( - response.headers.get_all("Content-Disposition")[0] - == f"attachment;filename={filename}" - ) - - -@pytest.mark.unit -def test_httpbin(): - request = Mock(spec=Request) - request.range = None - request.headers = {} - - try: - url_object = { - "url": "https://httpbin.org/get", - "type": "url", - "method": "GET", - "userdata": {"test_param": "OCEAN value"}, - } - _, instance = FilesTypeFactory.validate_and_create(url_object) - response = instance.build_download_response(request) - assert response.json["args"] == {"test_param": "OCEAN value"} - - url_object["url"] = "https://httpbin.org/headers" - url_object["headers"] = {"test_header": "OCEAN header", "Range": "DDO range"} - _, instance = FilesTypeFactory.validate_and_create(url_object) - response = instance.build_download_response(request) - # no request range, but DDO range exists - assert response.headers.get("Range") == "DDO range" - - url_object["headers"] = {} - _, instance = FilesTypeFactory.validate_and_create(url_object) - response = instance.build_download_response(request) - # no request range and no DDO range - assert response.headers.get("Range") is None - - _, instance = FilesTypeFactory.validate_and_create(url_object) - request.range = 200 - request.headers = {"Range": "200"} - response = instance.build_download_response(request) - # request range and no DDO range - assert response.headers.get("Range") == "200" - - url_object["headers"] = {"test_header": "OCEAN header", "Range": "DDO range"} - _, instance = FilesTypeFactory.validate_and_create(url_object) - request.range = 200 - request.headers = {"Range": "200"} - response = instance.build_download_response(request) - # request range and DDO range, will favor DDO range - assert response.headers.get("Range") == "DDO range" - - request.range = None - request.headers = {} - url_object = { - "url": "https://httpbin.org/post", - "type": "url", - "method": "POST", - "userdata": {"test_param": "OCEAN POST value"}, - } - _, instance = FilesTypeFactory.validate_and_create(url_object) - response = instance.build_download_response(request) - assert response.json["json"]["test_param"] == "OCEAN POST value" - except requests.exceptions.ReadTimeout: - # skippable error due to httpbin downtime - logging.warning("test failed due to httpbin downtime") - return - - -@pytest.mark.unit -def test_get_service_files_list(provider_wallet): - ddo_sample1 = copy.deepcopy(ddo_sample1_v4) - ddo = Asset(ddo_sample1) - service = Mock(template=Service) - service.datatoken_address = "0x0000000000000000000000000000000000000000" - service.type = "access" - - encrypted_files_str = json.dumps( - { - "nftAddress": "0x0000000000000000000000000000000000000000", - "datatokenAddress": "0x0000000000000000000000000000000000000000", - "files": ["test1", "test2"], - }, - separators=(",", ":"), - ) - service.encrypted_files = do_encrypt( - Web3.toHex(text=encrypted_files_str), provider_wallet - ) - assert ["test1", "test2"] == get_service_files_list(service, provider_wallet, ddo) - - # empty and raw - service.encrypted_files = "" - assert get_service_files_list(service, provider_wallet, ddo) is None - - # empty and encrypted - encrypted_files_str = "" - service.encrypted_files = do_encrypt( - Web3.toHex(text=encrypted_files_str), provider_wallet - ) - assert get_service_files_list(service, provider_wallet, ddo) is None - - # not a dict - encrypted_files_str = json.dumps([], separators=(",", ":")) - service.encrypted_files = do_encrypt( - Web3.toHex(text=encrypted_files_str), provider_wallet - ) - - assert get_service_files_list(service, provider_wallet, ddo) is None - - # files not a list - encrypted_files_str = json.dumps( - { - "nftAddress": "0x0000000000000000000000000000000000000000", - "datatokenAddress": "0x0000000000000000000000000000000000000000", - "files": {"some_dict": "test"}, - }, - separators=(",", ":"), - ) - service.encrypted_files = do_encrypt( - Web3.toHex(text=encrypted_files_str), provider_wallet - ) - - assert get_service_files_list(service, provider_wallet, ddo) is None - - # missing nftAddress - encrypted_files_str = json.dumps( - { - "datatokenAddress": "0x0000000000000000000000000000000000000000", - "files": {"some_dict": "test"}, - }, - separators=(",", ":"), - ) - service.encrypted_files = do_encrypt( - Web3.toHex(text=encrypted_files_str), provider_wallet - ) - - assert get_service_files_list(service, provider_wallet, ddo) is None - - # wrong nftAddress - encrypted_files_str = json.dumps( - { - "nftAddress": "0x0000000000000000000000000000000000000001", - "datatokenAddress": "0x0000000000000000000000000000000000000000", - "files": {"some_dict": "test"}, - }, - separators=(",", ":"), - ) - service.encrypted_files = do_encrypt( - Web3.toHex(text=encrypted_files_str), provider_wallet - ) - - assert get_service_files_list(service, provider_wallet, ddo) is None - - -@pytest.mark.unit -def test_get_service_files_list_old_structure(provider_wallet): - service = Mock(template=Service) - encrypted_files_str = json.dumps(["test1", "test2"], separators=(",", ":")) - service.encrypted_files = do_encrypt( - Web3.toHex(text=encrypted_files_str), provider_wallet - ) - assert ["test1", "test2"] == get_service_files_list_old_structure( - service, provider_wallet - ) - - # empty and raw - service.encrypted_files = "" - assert get_service_files_list(service, provider_wallet) is None - - # empty and encrypted - encrypted_files_str = "" - service.encrypted_files = do_encrypt( - Web3.toHex(text=encrypted_files_str), provider_wallet - ) - assert get_service_files_list_old_structure(service, provider_wallet) is None - - # not a list - encrypted_files_str = json.dumps({"test": "test"}, separators=(",", ":")) - service.encrypted_files = do_encrypt( - Web3.toHex(text=encrypted_files_str), provider_wallet - ) - - assert get_service_files_list_old_structure(service, provider_wallet) is None - - -@pytest.mark.unit -def test_validate_url_object(): - result, message = FilesTypeFactory.validate_and_create({}) - assert result is False - assert message == "cannot decrypt files for this service." - - result, message = FilesTypeFactory.validate_and_create({"type": "invalid"}) - assert result is False - assert message == "Unsupported type invalid" - - result, message = FilesTypeFactory.validate_and_create( - {"type": "ipfs", "but_hash": "missing"} - ) - assert result is False - assert message == "malformed service files, missing required keys." - - result, message = FilesTypeFactory.validate_and_create( - {"type": "arweave", "but_transactionId": "missing"} - ) - assert result is False - assert message == "malformed service files, missing transactionId." - - result, message = FilesTypeFactory.validate_and_create( - {"type": "url", "url": "x", "headers": "not_a_dict"} - ) - assert result is False - assert message == "malformed file object." - - result, message = FilesTypeFactory.validate_and_create( - {"type": "url", "url": "x", "headers": '{"dict": "but_stringified"}'} - ) - # we purposefully require a dictionary - assert result is False - assert message == "malformed file object." - - result, message = FilesTypeFactory.validate_and_create( - {"type": "url", "url": "x", "headers": {"dict": "dict_key"}} - ) - assert result is True - - url_object = { - "url": "x", - "type": "url", - "method": "DELETE", - } - result, message = FilesTypeFactory.validate_and_create(url_object) - assert result is False - assert message == "Unsafe method delete." - - -@pytest.mark.unit -def test_build_download_response_ipfs(): - client = ipfshttpclient.connect("/dns/172.15.0.16/tcp/5001/http") - cid = client.add("./tests/resources/ddo_sample_file.txt")["Hash"] - url_object = {"type": "ipfs", "hash": cid} - - request = Mock() - request.range = None - - _, instance = FilesTypeFactory.validate_and_create(url_object) - download_url = instance.get_download_url() - print(f"got ipfs download url: {download_url}") - assert download_url and download_url.endswith(f"ipfs/{cid}") - - response = instance.build_download_response(request) - assert response.data, f"got no data {response.data}" - - # Assert that Content-Disposition header doesn't leak CID - assert cid not in response.headers["Content-Disposition"] - - -@pytest.mark.unit -def test_build_download_response_arweave(monkeypatch): - """Test the special cases relevant only to Arweave""" - transaction_id = ARWEAVE_TRANSACTION_ID - url_object = { - "type": "arweave", - "transactionId": ARWEAVE_TRANSACTION_ID, - } - - request = Mock() - request.range = None - - _, instance = FilesTypeFactory.validate_and_create(url_object) - assert ( - instance.get_download_url() == f"https://arweave.net/{ARWEAVE_TRANSACTION_ID}" - ) - - response = instance.build_download_response(request) - assert response.status == "200 OK" - assert response.data, f"got no data {response.data}" - - # Assert that Content-Disposition header doesn't leak transaction ID - assert transaction_id not in response.headers["Content-Disposition"] - - # Unset ARWEAVE_GATEWAY - monkeypatch.delenv("ARWEAVE_GATEWAY") - with pytest.raises( - Exception, - match="No ARWEAVE_GATEWAY defined, can not resolve arweave transaction id.", - ): - instance.get_download_url() diff --git a/ocean_provider/validation/test/test_algo_validation.py b/ocean_provider/validation/test/test_algo_validation.py deleted file mode 100644 index e94f9555..00000000 --- a/ocean_provider/validation/test/test_algo_validation.py +++ /dev/null @@ -1,1012 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -import copy -from unittest.mock import Mock, patch - -import pytest -from ocean_provider.utils.asset import Asset -from ocean_provider.utils.services import Service, ServiceType -from ocean_provider.validation.algo import WorkflowValidator -from tests.ddo.ddo_sample1_compute import alg_ddo_dict, ddo_dict -from tests.helpers.compute_helpers import get_future_valid_until -from tests.test_helpers import get_first_service_by_type - -provider_fees_event = Mock() -provider_fees_event.args.providerData = {"environment": "ocean-compute"} -provider_fees_event.args.validUntil = get_future_valid_until() -provider_fees_event.args.providerFeeAmount = 0 - -this_is_a_gist = "https://gist.githubusercontent.com/calina-c/5e8c965962bc0240eab516cb7a180670/raw/6e6cd245c039a9aac0a488857c6927d39eaafe4d/sprintf-py-conversions" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": this_is_a_gist, "type": "url"}], -) -def test_passes_algo_ddo(provider_wallet, consumer_address, web3): - """Tests happy flow of validator with algo ddo.""" - ddo = Asset(ddo_dict) - alg_ddo = Asset(alg_ddo_dict) - sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - - data = { - "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, - "algorithm": { - "documentId": alg_ddo.did, - "serviceId": sa_compute.id, - "transferTxId": "alg_tx_id", - }, - "environment": "ocean-compute", - } - - def side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if ddo.did == args[1]: - return ddo - if alg_ddo.did == args[1]: - return alg_ddo - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is True - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": this_is_a_gist, "type": "url"}], -) -def test_passes_raw(provider_wallet, consumer_address, web3): - """Tests happy flow of validator with raw algo.""" - ddo = Asset(ddo_dict) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - data = { - "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, - "algorithm": { - "serviceId": sa.id, - "meta": { - "rawcode": "console.log('Hello world'!)", - "format": "docker-image", - "version": "0.1", - "container": { - "entrypoint": "node $ALGO", - "image": "oceanprotocol/algo_dockers", - "tag": "python-branin", - "checksum": "sha256:8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", - }, - }, - }, - "environment": "ocean-compute", - } - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is True - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": this_is_a_gist, "type": "url"}], -) -def test_fails_not_an_algo(provider_wallet, consumer_address, web3): - """Tests happy flow of validator with algo ddo.""" - _copy = copy.deepcopy(ddo_dict) - _copy["services"][0]["compute"]["publisherTrustedAlgorithms"] = [] - ddo = Asset(_copy) - did = ddo.did - alg_ddo = Asset(alg_ddo_dict) - sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - - data = { - "dataset": {"documentId": did, "transferTxId": "tx_id", "serviceId": sa.id}, - "algorithm": { - "documentId": did, - "serviceId": sa_compute.id, - "transferTxId": "alg_tx_id", - }, - } - - def side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if ddo.did == args[1]: - return ddo - if alg_ddo.did == args[1]: - return alg_ddo - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "algorithm" - assert validator.message == "not_algo" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": this_is_a_gist, "type": "url"}], -) -def test_fails_meta_issues(provider_wallet, consumer_address, web3): - """Tests happy flow of validator with raw algo.""" - ddo = Asset(ddo_dict) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - """Tests happy flow of validator with algo ddo and raw algo.""" - data = { - "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, - "algorithm": {"serviceId": sa.id, "meta": {}}, - } - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "algorithm" - assert validator.message == "meta_oneof_url_rawcode_remote" - - # algorithmMeta container is empty - data = { - "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, - "algorithm": { - "serviceId": sa.id, - "meta": { - "rawcode": "console.log('Hello world'!)", - "format": "docker-image", - "version": "0.1", - "container": {}, - }, - }, - } - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "algorithm.container" - assert validator.message == "missing_entrypoint_image_checksum" - - # algorithmMeta container is missing image - data = { - "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, - "algorithm": { - "serviceId": sa.id, - "meta": { - "rawcode": "console.log('Hello world'!)", - "format": "docker-image", - "version": "0.1", - "container": {"entrypoint": "node $ALGO", "tag": "10"}, - }, - }, - } - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "algorithm.container" - assert validator.message == "missing_entrypoint_image_checksum" - - # algorithmMeta container checksum does not start with sha256 - data = { - "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, - "algorithm": { - "serviceId": sa.id, - "meta": { - "rawcode": "console.log('Hello world'!)", - "format": "docker-image", - "version": "0.1", - "container": { - "entrypoint": "node $ALGO", - "image": "oceanprotocol/algo_dockers", - "tag": "python-branin", - "checksum": "8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", - }, - }, - }, - } - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "algorithm.container" - assert validator.message == "checksum_prefix" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": this_is_a_gist, "type": "url"}], -) -def test_additional_datasets(provider_wallet, consumer_address, web3): - ddo = Asset(ddo_dict) - alg_ddo = Asset(alg_ddo_dict) - sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - - data = { - "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, - "algorithm": { - "documentId": alg_ddo.did, - "serviceId": sa_compute.id, - "transferTxId": "alg_tx_id", - }, - "additionalDatasets": "", - "environment": "ocean-compute", - } - - def side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if ddo.did == args[1]: - return ddo - if alg_ddo.did == args[1]: - return alg_ddo - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - validator = WorkflowValidator(consumer_address, data) - # basically the same test as test_passes_algo_ddo, additionalDatasets is empty - assert validator.validate() is True - - # additional input is invalid - data = { - "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, - "algorithm": { - "serviceId": sa_compute.id, - "documentId": alg_ddo.did, - "transferTxId": "alg_tx_id", - }, - "additionalDatasets": "i can not be decoded in json!", - } - - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "additional_input" - assert validator.message == "invalid" - - did = ddo.did - - # Missing did in additional input - data = { - "dataset": {"documentId": did, "transferTxId": "tx_id", "serviceId": sa.id}, - "algorithm": { - "serviceId": sa_compute.id, - "documentId": alg_ddo.did, - "transferTxId": "alg_tx_id", - }, - "additionalDatasets": [{"transferTxId": "tx_id", "serviceId": sa.id}], - } - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "datasets[1].documentId" - assert validator.message == "missing" - - # Did is not valid - data = { - "dataset": {"documentId": did, "transferTxId": "tx_id", "serviceId": sa.id}, - "algorithm": { - "serviceId": sa_compute.id, - "documentId": alg_ddo.did, - "transferTxId": "alg_tx_id", - }, - "additionalDatasets": [ - { - "documentId": "i am not a did", - "transferTxId": "tx_id", - "serviceId": sa.id, - } - ], - } - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "datasets[1].documentId" - assert validator.message == "did_not_found" - - data = { - "dataset": {"documentId": did, "transferTxId": "tx_id", "serviceId": sa.id}, - "algorithm": { - "serviceId": sa_compute.id, - "documentId": alg_ddo.did, - "transferTxId": "alg_tx_id", - }, - "additionalDatasets": [ - { - "documentId": did, - "transferTxId": "tx_id", - "serviceId": "some other service id", - } - ], - } - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "datasets[1].serviceId" - assert validator.message == "not_found" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": this_is_a_gist, "type": "url"}], -) -def test_service_not_compute(provider_wallet, consumer_address, web3): - ddo = Asset(ddo_dict) - alg_ddo = Asset(alg_ddo_dict) - sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - - data = { - "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, - "algorithm": { - "serviceId": sa_compute.id, - "documentId": alg_ddo.did, - "transferTxId": "alg_tx_id", - }, - } - - def side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if ddo.did == args[1]: - return ddo - if alg_ddo.did == args[1]: - return alg_ddo - - def other_service(*args, **kwargs): - return Service( - index=0, - service_id="smth_else", - service_type="something else", - datatoken_address="0xa", - service_endpoint="test", - encrypted_files="", - timeout=3600, - ) - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - with patch( - "ocean_provider.utils.asset.Asset.get_service_by_id", - side_effect=other_service, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "dataset.serviceId" - assert validator.message == "service_not_access_compute" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": this_is_a_gist, "type": "url"}], -) -def test_fails_trusted(provider_wallet, consumer_address, web3): - """Tests possible failures of the algo validation.""" - ddo = Asset(ddo_dict) - alg_ddo = Asset(alg_ddo_dict) - sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - - # Additional input has other trusted algs - _copy = copy.deepcopy(ddo_dict) - _copy["id"] = "0xtrust" - _copy["services"][0]["compute"]["publisherTrustedAlgorithms"] = [ - {"did": "0xother", "filesChecksum": "mock", "containerSectionChecksum": "mock"} - ] - trust_ddo = Asset(_copy) - trust_sa = get_first_service_by_type(trust_ddo, ServiceType.COMPUTE) - - def side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo, trust_ddo - if ddo.did == args[1]: - return ddo - if alg_ddo.did == args[1]: - return alg_ddo - if trust_ddo.did == args[1]: - return trust_ddo - - data = { - "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, - "algorithm": { - "serviceId": sa_compute.id, - "documentId": alg_ddo.did, - "transferTxId": "alg_tx_id", - }, - "additionalDatasets": [ - { - "documentId": trust_ddo.did, - "transferTxId": "trust_tx_id", - "serviceId": trust_sa.id, - } - ], - } - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "datasets[1]" - assert validator.message == "not_trusted_algo" - - # Additional input has other trusted publishers - _copy = copy.deepcopy(ddo_dict) - _copy["id"] = "0xtrust" - _copy["services"][0]["compute"]["publisherTrustedAlgorithmPublishers"] = ["0xabc"] - _copy["services"][0]["id"] = "compute_2" - trust_ddo = Asset(_copy) - trust_sa = get_first_service_by_type(trust_ddo, ServiceType.COMPUTE) - - data = { - "dataset": { - "documentId": ddo.did, - "transferTxId": "trust_tx_id", - "serviceId": sa.id, - }, - "algorithm": { - "documentId": alg_ddo.did, - "serviceId": sa_compute.id, - "transferTxId": "alg_tx_id", - }, - "additionalDatasets": [ - { - "documentId": trust_ddo.did, - "transferTxId": "trust_tx_id", - "serviceId": trust_sa.id, - } - ], - } - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "datasets[1]" - assert validator.message == "not_trusted_algo_publisher" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch("ocean_provider.validation.algo.get_service_files_list", return_value=None) -def test_fails_no_asset_url(provider_wallet, consumer_address, web3): - ddo = Asset(ddo_dict) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - data = { - "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, - "algorithm": {"serviceId": sa.id, "meta": {}}, - } - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "dataset.serviceId" - assert validator.message == "compute_services_not_in_same_provider" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch("ocean_provider.validation.algo.validate_order", side_effect=Exception("mock")) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": this_is_a_gist, "type": "url"}], -) -def test_fails_validate_order(provider_wallet, consumer_address, web3): - ddo = Asset(ddo_dict) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - data = { - "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, - "algorithm": {"serviceId": sa.id, "meta": {}}, - } - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "dataset.serviceId" - assert validator.message == "order_invalid" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": this_is_a_gist, "type": "url"}], -) -def test_fails_no_service_id(provider_wallet, consumer_address, web3): - ddo = Asset(ddo_dict) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - data = { - "dataset": {"documentId": ddo.did, "serviceId": None, "transferTxId": "tx_id"}, - "algorithm": {"serviceId": sa.id, "meta": {}}, - } - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "dataset.serviceId" - assert validator.message == "missing" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": this_is_a_gist, "type": "url"}], -) -@patch( - "ocean_provider.serializers.StageAlgoSerializer.serialize", - new=Mock(return_value={}), -) -def test_fails_invalid_algorithm_dict(provider_wallet, consumer_address, web3): - ddo = Asset(ddo_dict) - alg_ddo = Asset(alg_ddo_dict) - sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - - data = { - "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, - "algorithm": { - "documentId": alg_ddo.did, - "serviceId": sa_compute.id, - "transferTxId": "alg_tx_id", - }, - } - - def side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if ddo.did == args[1]: - return ddo - if alg_ddo.did == args[1]: - return alg_ddo - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "algorithm" - assert validator.message == "did_not_found" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": this_is_a_gist, "type": "url"}], -) -def test_fails_algorithm_in_use(provider_wallet, consumer_address, web3): - ddo = Asset(ddo_dict) - alg_ddo = Asset(alg_ddo_dict) - sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - - data = { - "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, - "algorithm": { - "documentId": alg_ddo.did, - "serviceId": sa_compute.id, - "transferTxId": "alg_tx_id", - }, - } - - def side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if ddo.did == args[1]: - return ddo - if alg_ddo.did == args[1]: - return alg_ddo - - def record_consume_request_side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if ddo.did == args[0]: - return ddo - if alg_ddo.did == args[0]: - raise Exception("I know Python!") - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - with patch( - "ocean_provider.validation.algo.record_consume_request", - side_effect=record_consume_request_side_effect, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "algorithm" - assert validator.message == "in_use_or_not_on_chain" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": this_is_a_gist, "type": "url"}], -) -def test_fail_wrong_algo_type(provider_wallet, consumer_address, web3): - ddo = Asset(ddo_dict) - alg_ddo = Asset(alg_ddo_dict) - sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - - data = { - "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, - "algorithm": { - "serviceId": sa_compute.id, - "documentId": alg_ddo.did, - "transferTxId": "alg_tx_id", - }, - } - - def side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if ddo.did == args[1]: - return ddo - if alg_ddo.did == args[1]: - return alg_ddo - - def other_service(*args, **kwargs): - return Service( - index=0, - service_id=data["algorithm"]["serviceId"], - service_type="access", - datatoken_address="0xa", - service_endpoint="test", - encrypted_files="", - timeout=3600, - ) - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - with patch( - "ocean_provider.utils.asset.Asset.get_service_by_id", - side_effect=other_service, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "dataset.serviceId" - assert validator.message == "main_service_compute" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": this_is_a_gist, "type": "url"}], -) -def test_fail_allow_raw_false(provider_wallet, consumer_address, web3): - ddo = Asset(ddo_dict) - alg_ddo = Asset(alg_ddo_dict) - sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - ddo.services[0].compute_dict["allowRawAlgorithm"] = False - data = { - "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, - "algorithm": { - "serviceId": sa_compute.id, - "meta": { - "rawcode": "console.log('Hello world'!)", - "format": "docker-image", - "version": "0.1", - "container": { - "entrypoint": "node $ALGO", - "image": "oceanprotocol/algo_dockers", - "tag": "python-branin", - "checksum": "sha256:8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", - }, - }, - }, - } - - def side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if ddo.did == args[1]: - return ddo - if alg_ddo.did == args[1]: - return alg_ddo - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "dataset" - assert validator.message == "no_raw_algo_allowed" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -def test_success_multiple_services_types(provider_wallet, consumer_address, web3): - ddo = Asset(ddo_dict) - alg_ddo = Asset(alg_ddo_dict) - sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - - data = { - "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, - "algorithm": { - "serviceId": sa_compute.id, - "meta": { - "rawcode": "console.log('Hello world'!)", - "format": "docker-image", - "version": "0.1", - "container": { - "entrypoint": "node $ALGO", - "image": "oceanprotocol/algo_dockers", - "tag": "python-branin", - "checksum": "sha256:8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", - }, - }, - }, - "additionalDatasets": [ - {"documentId": ddo.did, "transferTxId": "ddo.did", "serviceId": "access_1"} - ], - "environment": "ocean-compute", - } - - def side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if ddo.did == args[1]: - return ddo - if alg_ddo.did == args[1]: - return alg_ddo - - def another_side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if args[0].type == "access": - return None - return [{"url": this_is_a_gist, "type": "url"}] - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - with patch( - "ocean_provider.validation.algo.get_service_files_list", - side_effect=another_side_effect, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is True - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -def test_fail_missing_algo_meta_documentId(provider_wallet, consumer_address, web3): - ddo = Asset(ddo_dict) - alg_ddo = Asset(alg_ddo_dict) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - - data = { - "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, - "algorithm": {"serviceId": None, "meta": None}, - "additionalDatasets": [ - {"documentId": ddo.did, "transferTxId": "ddo.did", "serviceId": "access_1"} - ], - } - - def side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if ddo.did == args[1]: - return ddo - if alg_ddo.did == args[1]: - return alg_ddo - - def another_side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if args[0].type == "access": - return None - return [{"url": this_is_a_gist, "type": "url"}] - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - with patch( - "ocean_provider.validation.algo.get_service_files_list", - side_effect=another_side_effect, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "algorithm" - assert validator.message == "missing_meta_documentId" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": this_is_a_gist, "type": "url"}], -) -def test_fee_amount_not_paid(provider_wallet, consumer_address, web3): - """Tests happy flow of validator with algo ddo.""" - ddo = Asset(ddo_dict) - alg_ddo = Asset(alg_ddo_dict) - sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - - data = { - "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, - "algorithm": { - "documentId": alg_ddo.did, - "serviceId": sa_compute.id, - "transferTxId": "alg_tx_id", - }, - } - - def side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if ddo.did == args[1]: - return ddo - if alg_ddo.did == args[1]: - return alg_ddo - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - with patch("ocean_provider.validation.algo.get_provider_fee_amount") as mock: - mock.return_value = 10**18 - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "order" - assert validator.message == "fees_not_paid" - - -@pytest.mark.unit -@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) -@patch( - "ocean_provider.validation.algo.validate_order", - return_value=(None, None, provider_fees_event, None), -) -@patch( - "ocean_provider.validation.algo.get_service_files_list", - return_value=[{"url": "http://some.broken.url", "type": "url"}], -) -def test_algo_ddo_file_broken(provider_wallet, consumer_address, web3): - """Tests case where algo checksum can not be computed.""" - ddo = Asset(ddo_dict) - alg_ddo = Asset(alg_ddo_dict) - sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) - sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) - - data = { - "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, - "algorithm": { - "documentId": alg_ddo.did, - "serviceId": sa_compute.id, - "transferTxId": "alg_tx_id", - }, - "environment": "ocean-compute", - } - - def side_effect(*args, **kwargs): - nonlocal ddo, alg_ddo - if ddo.did == args[1]: - return ddo - if alg_ddo.did == args[1]: - return alg_ddo - - with patch( - "ocean_provider.validation.algo.get_asset_from_metadatastore", - side_effect=side_effect, - ): - validator = WorkflowValidator(consumer_address, data) - assert validator.validate() is False - assert validator.resource == "algorithm" - assert validator.message == "file_unavailable" From 6ff9dd30f44d7a797c18306397ea7b53d72ea825 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 3 Apr 2023 14:46:33 +0300 Subject: [PATCH 54/83] Deleted more tests. --- ocean_provider/test/test_user_nonce.py | 96 ---------- tests/test_RBAC.py | 251 ------------------------- tests/test_graphql.py | 143 -------------- tests/test_proof.py | 83 -------- tests/test_routes.py | 140 -------------- tests/test_smartcontract.py | 205 -------------------- 6 files changed, 918 deletions(-) delete mode 100644 ocean_provider/test/test_user_nonce.py delete mode 100644 tests/test_RBAC.py delete mode 100644 tests/test_graphql.py delete mode 100644 tests/test_proof.py delete mode 100644 tests/test_routes.py delete mode 100644 tests/test_smartcontract.py diff --git a/ocean_provider/test/test_user_nonce.py b/ocean_provider/test/test_user_nonce.py deleted file mode 100644 index 829dc292..00000000 --- a/ocean_provider/test/test_user_nonce.py +++ /dev/null @@ -1,96 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -import os -from datetime import datetime -from unittest.mock import patch - -import pytest -import sqlalchemy -from flask_caching import Cache -from ocean_provider import models, user_nonce -from ocean_provider.myapp import app -from ocean_provider.user_nonce import ( - get_nonce, - get_or_create_user_nonce_object, - update_nonce, -) - -cache = Cache( - app, - config={ - "CACHE_TYPE": "redis", - "CACHE_KEY_PREFIX": "ocean_provider", - "CACHE_REDIS_URL": os.getenv("REDIS_CONNECTION"), - }, -) - - -@pytest.mark.unit -def test_get_and_update_nonce(monkeypatch, publisher_address, consumer_address): - # pass through sqlite - monkeypatch.delenv("REDIS_CONNECTION") - - # get_nonce can be used on addresses that are not in the user_nonce table - assert get_nonce("0x0000000000000000000000000000000000000000") is None - assert get_or_create_user_nonce_object( - "0x0000000000000000000000000000000000000000", datetime.utcnow().timestamp() - ) - - # update two times because, if we just pruned, we start from None - update_nonce(publisher_address, datetime.utcnow().timestamp()) - publisher_nonce = get_nonce(publisher_address) - update_nonce(publisher_address, datetime.utcnow().timestamp()) - new_publisher_nonce = get_nonce(publisher_address) - - assert new_publisher_nonce >= publisher_nonce - - # get_nonce doesn't affect the value of nonce - publisher_nonce = get_nonce(publisher_address) - assert get_nonce(publisher_address) == publisher_nonce - - -@pytest.mark.unit -def test_get_and_update_nonce_redis(publisher_address, consumer_address): - # get_nonce can be used on addresses that are not in the user_nonce table - cache.delete("0x0000000000000000000000000000000000000000") - assert get_nonce("0x0000000000000000000000000000000000000000") is None - assert get_or_create_user_nonce_object( - "0x0000000000000000000000000000000000000000", datetime.utcnow().timestamp() - ) - - # update two times because, if we just pruned, we start from None - update_nonce(publisher_address, datetime.utcnow().timestamp()) - publisher_nonce = get_nonce(publisher_address) - update_nonce(publisher_address, datetime.utcnow().timestamp()) - new_publisher_nonce = get_nonce(publisher_address) - - assert new_publisher_nonce >= publisher_nonce - - # get_nonce doesn't affect the value of nonce - publisher_nonce = get_nonce(publisher_address) - assert get_nonce(publisher_address) == publisher_nonce - - -@pytest.mark.unit -def test_update_nonce_exception(monkeypatch, publisher_address): - # pass through sqlite - monkeypatch.delenv("REDIS_CONNECTION") - - # Ensure address exists in database - update_nonce(publisher_address, datetime.utcnow().timestamp()) - - # Create duplicate nonce_object - with patch.object( - user_nonce, - "get_or_create_user_nonce_object", - return_value=models.UserNonce(address=publisher_address, nonce="0"), - ): - with pytest.raises(sqlalchemy.exc.IntegrityError): - update_nonce(publisher_address, datetime.utcnow().timestamp()) - - publisher_nonce = get_nonce(publisher_address) - update_nonce(publisher_address, None) - # no effect - assert publisher_nonce == get_nonce(publisher_address) diff --git a/tests/test_RBAC.py b/tests/test_RBAC.py deleted file mode 100644 index 791e1e26..00000000 --- a/tests/test_RBAC.py +++ /dev/null @@ -1,251 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -import copy -import json -from datetime import datetime - -import pytest -from ocean_provider.constants import BaseURLs -from ocean_provider.exceptions import RequestNotFound -from ocean_provider.utils.accounts import sign_message -from ocean_provider.utils.asset import Asset -from ocean_provider.utils.services import Service, ServiceType -from ocean_provider.validation.provider_requests import RBACValidator -from tests.ddo.ddo_sample1_v4 import json_dict as ddo_sample1_v4 -from tests.ddo.ddo_sample_algorithm_v4 import algorithm_ddo_sample -from tests.helpers.compute_helpers import get_compute_signature -from tests.helpers.ddo_dict_builders import get_compute_service -from tests.test_helpers import get_first_service_by_type - - -@pytest.mark.unit -def test_invalid_request_name(): - req = dict() - with pytest.raises(RequestNotFound) as err: - RBACValidator(request_name="MyRequest", request=req) - assert err.value.args[0] == "Request name is not valid!" - - -encrypt_endpoint = BaseURLs.SERVICES_URL + "/encrypt" - - -@pytest.mark.unit -def test_encrypt_request_payload(consumer_wallet, publisher_wallet, monkeypatch): - monkeypatch.setenv("PRIVATE_PROVIDER", "1") - document = { - "url": "http://localhost:8030" + encrypt_endpoint, - "index": 0, - "checksum": "foo_checksum", - "contentLength": "4535431", - "contentType": "text/csv", - "encoding": "UTF-8", - "compression": "zip", - } - req = { - "data": json.dumps(document), - "publisherAddress": publisher_wallet.address, - } - validator = RBACValidator(request_name="EncryptRequest", request=req) - payload = validator.build_payload() - - assert validator.request == req - assert payload["eventType"] == "encryptUrl" - assert payload["providerAccess"] == "private" - assert payload["component"] == "provider" - assert payload["credentials"] == { - "type": "address", - "value": publisher_wallet.address, - } - - -@pytest.mark.unit -def test_wrong_encrypt_request_payload(consumer_wallet, publisher_wallet, monkeypatch): - monkeypatch.setenv("PRIVATE_PROVIDER", "1") - req = { - "publisherAddress": publisher_wallet.address, - } - validator = RBACValidator(request_name="EncryptRequest", request=req) - with pytest.raises(Exception) as err: - validator.build_payload() - assert err.value.args[0] == "Data to encrypt is empty." - - -@pytest.mark.unit -def test_initialize_request_payload( - client, publisher_wallet, consumer_wallet, provider_address, web3 -): - asset = Asset(ddo_sample1_v4) - service = get_first_service_by_type(asset, ServiceType.ACCESS) - - req = { - "documentId": asset.did, - "serviceId": service.id, - "datatoken": service.datatoken_address, - "consumerAddress": consumer_wallet.address, - } - - validator = RBACValidator(request_name="InitializeRequest", request=req) - payload = validator.build_payload() - assert validator.request == req - assert payload["eventType"] == "initialize" - assert payload["providerAccess"] == "public" - assert payload["component"] == "provider" - assert payload["credentials"] == { - "type": "address", - "value": consumer_wallet.address, - } - assert payload["dids"][0]["did"] == asset.did - assert payload["dids"][0]["serviceId"] == service.id - - -@pytest.mark.unit -def test_access_request_payload( - client, publisher_wallet, consumer_wallet, provider_address, web3 -): - asset = Asset(ddo_sample1_v4) - service = get_first_service_by_type(asset, ServiceType.ACCESS) - - req = { - "documentId": asset.did, - "serviceId": service.id, - "datatoken": service.datatoken_address, - "consumerAddress": consumer_wallet.address, - "transferTxId": "0xsometx", - "fileIndex": 0, - } - - nonce = str(datetime.utcnow().timestamp()) - _msg = f"{asset.did}{nonce}" - req["signature"] = sign_message(_msg, consumer_wallet) - req["nonce"] = nonce - - validator = RBACValidator(request_name="DownloadRequest", request=req) - payload = validator.build_payload() - assert validator.request == req - assert payload["eventType"] == "access" - assert payload["providerAccess"] == "public" - assert payload["component"] == "provider" - assert payload["credentials"] == { - "type": "address", - "value": consumer_wallet.address, - } - assert payload["dids"][0]["did"] == asset.did - assert payload["dids"][0]["serviceId"] == service.id - - -@pytest.mark.unit -def test_compute_payload_without_additional_inputs( - client, publisher_wallet, consumer_wallet, provider_address -): - ddo_sample1 = copy.deepcopy(ddo_sample1_v4) - ddo = Asset(ddo_sample1) - ddo.services.append( - Service.from_json(1, get_compute_service(None, None, "0x0", "0x0")) - ) - - alg_ddo = Asset(algorithm_ddo_sample) - sa = get_first_service_by_type(alg_ddo, ServiceType.COMPUTE) - sa_compute = get_first_service_by_type(ddo, ServiceType.COMPUTE) - - nonce, signature = get_compute_signature(client, consumer_wallet, ddo.did) - req = { - "dataset": { - "documentId": ddo.did, - "serviceId": sa.id, - "transferTxId": "0xsometx", - }, - "algorithm": { - "serviceId": sa_compute.id, - "documentId": alg_ddo.did, - "transferTxId": "0xsomeothertx", - }, - "signature": signature, - "nonce": nonce, - "consumerAddress": consumer_wallet.address, - } - - validator = RBACValidator(request_name="ComputeStartRequest", request=req) - payload = validator.build_payload() - assert validator.request == req - assert payload["eventType"] == "compute" - assert payload["providerAccess"] == "public" - assert payload["component"] == "provider" - assert payload["credentials"] == { - "type": "address", - "value": consumer_wallet.address, - } - assert payload["dids"][0]["did"] == ddo.did - assert payload["dids"][0]["serviceId"] == sa.id - assert payload["algos"][0]["did"] == alg_ddo.did - assert payload["algos"][0]["serviceId"] == sa_compute.id - - -@pytest.mark.unit -def test_compute_request_payload( - client, publisher_wallet, consumer_wallet, provider_address -): - ddo_sample1 = copy.deepcopy(ddo_sample1_v4) - ddo = Asset(ddo_sample1) - ddo.services.append( - Service.from_json( - 1, - get_compute_service( - None, - None, - "0x0000000000000000000000000000000000000000", - "0x0000000000000000000000000000000000000000", - ), - ) - ) - - alg_ddo = Asset(algorithm_ddo_sample) - sa = get_first_service_by_type(alg_ddo, ServiceType.COMPUTE) - sa_compute = get_first_service_by_type(ddo, ServiceType.COMPUTE) - - ddo_sample2 = copy.deepcopy(ddo_sample1_v4) - ddo_sample2["did"] = "0xsomeotherdid" - ddo2 = Asset(ddo_sample2) - sa2 = get_first_service_by_type(ddo2, ServiceType.ACCESS) - - nonce, signature = get_compute_signature(client, consumer_wallet, ddo.did) - - req = { - "dataset": { - "documentId": ddo.did, - "serviceId": sa.id, - "transferTxId": "0xsometx", - }, - "algorithm": { - "documentId": alg_ddo.did, - "transferTxId": "0xsomeothertx", - "serviceId": sa_compute.id, - }, - "signature": signature, - "nonce": nonce, - "consumerAddress": consumer_wallet.address, - "additionalDatasets": [ - { - "documentId": ddo2.did, - "transferTxId": "0xsomeevenothertx", - "serviceId": sa2.id, - } - ], - } - validator = RBACValidator(request_name="ComputeRequest", request=req) - payload = validator.build_payload() - assert validator.request == req - assert payload["eventType"] == "compute" - assert payload["providerAccess"] == "public" - assert payload["component"] == "provider" - assert payload["credentials"] == { - "type": "address", - "value": consumer_wallet.address, - } - assert payload["dids"][0]["did"] == ddo.did - assert payload["dids"][0]["serviceId"] == sa.id - assert payload["algos"][0]["did"] == alg_ddo.did - assert payload["algos"][0]["serviceId"] == sa_compute.id - assert payload["additionalDids"][0]["did"] == ddo2.did - assert payload["additionalDids"][0]["serviceId"] == sa2.id diff --git a/tests/test_graphql.py b/tests/test_graphql.py deleted file mode 100644 index 89556af9..00000000 --- a/tests/test_graphql.py +++ /dev/null @@ -1,143 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -import json -from datetime import datetime - -import pytest -from ocean_provider.constants import BaseURLs -from ocean_provider.utils.accounts import sign_message -from ocean_provider.utils.provider_fees import get_provider_fees -from ocean_provider.utils.services import ServiceType -from tests.test_helpers import ( - get_first_service_by_type, - get_registered_asset, - mint_100_datatokens, - start_order, -) - - -@pytest.mark.integration -def test_download_graphql_asset(client, publisher_wallet, consumer_wallet, web3): - unencrypted_files_list = [ - { - "type": "graphql", - "url": "http://172.15.0.15:8000/subgraphs/name/oceanprotocol/ocean-subgraph", - "query": """ - query{ - nfts(orderBy: createdTimestamp,orderDirection:desc){ - id - symbol - createdTimestamp - } - } - """, - } - ] - asset = get_registered_asset( - publisher_wallet, unencrypted_files_list=unencrypted_files_list - ) - service = get_first_service_by_type(asset, ServiceType.ACCESS) - mint_100_datatokens( - web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - ) - tx_id, _ = start_order( - web3, - service.datatoken_address, - consumer_wallet.address, - service.index, - get_provider_fees(asset, service, consumer_wallet.address, 0), - consumer_wallet, - ) - - payload = { - "documentId": asset.did, - "serviceId": service.id, - "consumerAddress": consumer_wallet.address, - "transferTxId": tx_id, - "fileIndex": 0, - } - - download_endpoint = BaseURLs.SERVICES_URL + "/download" - - # Consume using url index and signature (with nonce) - nonce = str(datetime.utcnow().timestamp()) - _msg = f"{asset.did}{nonce}" - payload["signature"] = sign_message(_msg, consumer_wallet) - payload["nonce"] = nonce - response = client.get( - service.service_endpoint + download_endpoint, query_string=payload - ) - assert response.status_code == 200, f"{response.data}" - - -@pytest.mark.integration -def test_download_graphql_asset_with_userdata( - client, publisher_wallet, consumer_wallet, web3 -): - unencrypted_files_list = [ - { - "type": "graphql", - "url": "http://172.15.0.15:8000/subgraphs/name/oceanprotocol/ocean-subgraph", - "query": """ - query nfts($nftAddress: String){ - nfts(where: {id:$nftAddress},orderBy: createdTimestamp,orderDirection:desc){ - id - symbol - createdTimestamp - } - } - """, - } - ] - asset = get_registered_asset( - publisher_wallet, - unencrypted_files_list=unencrypted_files_list, - custom_userdata=[ - { - "name": "nftAddress", - "type": "text", - "label": "nftAddress", - "required": True, - "description": "Nft to search for", - } - ], - ) - service = get_first_service_by_type(asset, ServiceType.ACCESS) - mint_100_datatokens( - web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - ) - tx_id, _ = start_order( - web3, - service.datatoken_address, - consumer_wallet.address, - service.index, - get_provider_fees(asset, service, consumer_wallet.address, 0), - consumer_wallet, - ) - - payload = { - "documentId": asset.did, - "serviceId": service.id, - "consumerAddress": consumer_wallet.address, - "transferTxId": tx_id, - "fileIndex": 0, - "userdata": json.dumps({"nftAddress": asset.nftAddress.lower()}), - } - - download_endpoint = BaseURLs.SERVICES_URL + "/download" - # Consume using url index and signature (with nonce) - nonce = str(datetime.utcnow().timestamp()) - _msg = f"{asset.did}{nonce}" - payload["signature"] = sign_message(_msg, consumer_wallet) - payload["nonce"] = nonce - response = client.get( - service.service_endpoint + download_endpoint, query_string=payload - ) - assert response.status_code == 200, f"{response.data}" - reply = json.loads(response.data) - assert ( - len(reply["data"]["nfts"]) == 1 - ) # make sure our parametrized query works, otherwise we will get a lot of nfts - assert reply["data"]["nfts"][0]["id"] == asset.nftAddress.lower() diff --git a/tests/test_proof.py b/tests/test_proof.py deleted file mode 100644 index 3e29c37d..00000000 --- a/tests/test_proof.py +++ /dev/null @@ -1,83 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -import json -from datetime import datetime -from unittest.mock import Mock, patch - -import pytest -from ocean_provider.utils.accounts import sign_message -from ocean_provider.utils.proof import send_proof -from ocean_provider.utils.provider_fees import get_provider_fees -from ocean_provider.utils.services import ServiceType -from requests.models import Response -from tests.test_helpers import ( - get_first_service_by_type, - get_registered_asset, - mint_100_datatokens, - start_order, -) - - -@pytest.mark.unit -def test_no_proof_setup(client): - assert send_proof(None, None, None, None, None, None, None) is None - - -@pytest.mark.unit -def test_http_proof(client, monkeypatch): - monkeypatch.setenv("USE_HTTP_PROOF", "http://test.com") - provider_data = json.dumps({"test_data": "test_value"}) - - with patch("requests.post") as mock: - response = Mock(spec=Response) - response.json.return_value = {"a valid response": ""} - response.status_code = 200 - mock.return_value = response - - assert send_proof(8996, b"1", provider_data, None, None, None, None) is True - - mock.assert_called_once() - - with patch("requests.post") as mock: - mock.side_effect = Exception("Boom!") - - assert send_proof(8996, b"1", provider_data, None, None, None, None) is None - - mock.assert_called_once() - - -@pytest.mark.integration -def test_chain_proof(client, monkeypatch, web3, publisher_wallet, consumer_wallet): - monkeypatch.setenv("USE_CHAIN_PROOF", "1") - provider_data = json.dumps({"test_data": "test_value"}) - - asset = get_registered_asset(publisher_wallet) - service = get_first_service_by_type(asset, ServiceType.ACCESS) - mint_100_datatokens( - web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - ) - tx_id, receipt = start_order( - web3, - service.datatoken_address, - consumer_wallet.address, - service.index, - get_provider_fees(asset, service, consumer_wallet.address, 0), - consumer_wallet, - ) - - nonce = str(datetime.utcnow().timestamp()) - - consumer_data = _msg = f"{asset.did}{nonce}" - signature = sign_message(_msg, consumer_wallet) - - assert send_proof( - 8996, - receipt.transactionHash, - provider_data, - consumer_data, - signature, - consumer_wallet.address, - service.datatoken_address, - ) diff --git a/tests/test_routes.py b/tests/test_routes.py deleted file mode 100644 index 5bc9891a..00000000 --- a/tests/test_routes.py +++ /dev/null @@ -1,140 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -import json -from datetime import datetime - -import pytest -from ocean_provider.constants import BaseURLs -from ocean_provider.run import get_services_endpoints -from ocean_provider.user_nonce import get_nonce, update_nonce -from ocean_provider.utils.accounts import sign_message -from tests.test_helpers import get_registered_asset - - -@pytest.mark.unit -def test_expose_endpoints(client): - get_response = client.get("/") - result = get_response.get_json() - services_endpoints = get_services_endpoints() - assert "serviceEndpoints" in result - assert "software" in result - assert "version" in result - assert "chainIds" in result - assert "providerAddresses" in result - assert get_response.status == "200 OK" - assert len(result["serviceEndpoints"]) == len(services_endpoints) - - -@pytest.mark.unit -def test_spec(client): - response = client.get("/spec") - assert response.status == "200 OK" - - -@pytest.mark.unit -def test_root(client): - response = client.get("/") - assert response.status == "200 OK" - - -@pytest.mark.unit -def test_invalid_endpoint(client, caplog): - response = client.get("invalid/endpoint", query_string={"hello": "world"}) - assert response.status == "404 NOT FOUND" - # TODO: Capture and verify INFO log from log_incoming_request using caplog - - -@pytest.mark.unit -def test_empty_payload_encryption(client): - encrypt_endpoint = BaseURLs.SERVICES_URL + "/encrypt" - publish = client.post(encrypt_endpoint, data=None, content_type="application/json") - assert publish.status_code == 400 - - -@pytest.mark.integration -def test_encrypt_endpoint(client, provider_wallet, publisher_wallet): - asset = get_registered_asset(publisher_wallet) - files_list_str = '["https://raw.githubusercontent.com/tbertinmahieux/MSongsDB/master/Tasks_Demos/CoverSongs/shs_dataset_test.txt"]' - - nonce = datetime.utcnow().timestamp() - msg = f"{asset.did}{nonce}" - signature = sign_message(msg, provider_wallet) - - payload = { - "documentId": asset.did, - "signature": signature, - "document": files_list_str, - "publisherAddress": provider_wallet.address, - } - encrypt_endpoint = BaseURLs.SERVICES_URL + "/encrypt?chainId=8996" - response = client.post( - encrypt_endpoint, json=payload, content_type="application/octet-stream" - ) - assert response.content_type == "text/plain" - assert response.data - assert response.status_code == 201 - - -@pytest.mark.unit -def test_get_nonce(client, publisher_wallet): - address = publisher_wallet.address - # Ensure address exists in database - update_nonce(address, datetime.utcnow().timestamp()) - - endpoint = BaseURLs.SERVICES_URL + "/nonce" - response = client.get( - endpoint + "?" + f"&userAddress={address}", content_type="application/json" - ) - assert ( - response.status_code == 200 and response.data - ), f"get nonce endpoint failed: response status {response.status}, data {response.data}" - - value = response.json if response.json else json.loads(response.data) - assert value["nonce"] == get_nonce(address) - - -@pytest.mark.unit -def test_validate_container(client): - endpoint = BaseURLs.SERVICES_URL + "/validateContainer" - - valid_payload = { - "entrypoint": "node $ALGO", - "image": "oceanprotocol/algo_dockers", - "tag": "python-branin", - "checksum": "sha256:8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", - } - - response = client.post(endpoint, json=valid_payload) - assert response.status_code == 200 - - invalid_payload = { - "entrypoint": "node $ALGO", - "checksum": "sha256:8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", - } - - response = client.post(endpoint, json=invalid_payload) - assert response.status_code == 400 - assert response.json["error"] == "missing_entrypoint_image_checksum" - - another_valid_payload = { - "entrypoint": "node $ALGO", - "image": "node", # missing library prefix - "tag": "latest", - "checksum": "sha256:5c918be3339c8460d13a38e2fc7c027af1cab382b36561f90d3c03342fa866a4", - } - response = client.post(endpoint, json=another_valid_payload) - assert response.status_code == 200 - - invalid_payload = { - "entrypoint": "node $ALGO", - "image": "doesntexist", - "tag": "blabla", - # doesn't start with sha256: - "checksum": "8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", - } - - response = client.post(endpoint, json=invalid_payload) - assert response.status_code == 400 - assert response.json["error"] == "checksum_prefix" diff --git a/tests/test_smartcontract.py b/tests/test_smartcontract.py deleted file mode 100644 index d25b7f96..00000000 --- a/tests/test_smartcontract.py +++ /dev/null @@ -1,205 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -import json -import os -from datetime import datetime - -import pytest -from ocean_provider.constants import BaseURLs -from ocean_provider.utils.accounts import sign_message -from ocean_provider.utils.address import get_contract_address -from ocean_provider.utils.provider_fees import get_provider_fees -from ocean_provider.utils.services import ServiceType -from tests.test_helpers import ( - get_first_service_by_type, - get_registered_asset, - mint_100_datatokens, - start_order, -) - - -@pytest.mark.integration -def test_download_smartcontract_asset(client, publisher_wallet, consumer_wallet, web3): - # publish asset, that calls Router's swapOceanFee function (does not need params) - router_address = get_contract_address(os.getenv("ADDRESS_FILE"), "Router", 8996) - abi = { - "inputs": [], - "name": "swapOceanFee", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - } - unencrypted_files_list = [ - { - "type": "smartcontract", - "address": router_address, - "abi": abi, - "chainId": 8996, - } - ] - asset = get_registered_asset( - publisher_wallet, unencrypted_files_list=unencrypted_files_list - ) - service = get_first_service_by_type(asset, ServiceType.ACCESS) - mint_100_datatokens( - web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - ) - tx_id, _ = start_order( - web3, - service.datatoken_address, - consumer_wallet.address, - service.index, - get_provider_fees(asset, service, consumer_wallet.address, 0), - consumer_wallet, - ) - - payload = { - "documentId": asset.did, - "serviceId": service.id, - "consumerAddress": consumer_wallet.address, - "transferTxId": tx_id, - "fileIndex": 0, - } - - download_endpoint = BaseURLs.SERVICES_URL + "/download" - - # Consume using url index and signature (with nonce) - nonce = str(datetime.utcnow().timestamp()) - _msg = f"{asset.did}{nonce}" - payload["signature"] = sign_message(_msg, consumer_wallet) - payload["nonce"] = nonce - response = client.get( - service.service_endpoint + download_endpoint, query_string=payload - ) - assert response.status_code == 200, f"{response.data}" - - -@pytest.mark.integration -def test_download_smartcontract_asset_with_userdata( - client, publisher_wallet, consumer_wallet, web3 -): - # publish asset, that calls Router's getOPCFee for a provided baseToken userdata - router_address = get_contract_address(os.getenv("ADDRESS_FILE"), "Router", 8996) - abi = { - "inputs": [{"internalType": "address", "name": "baseToken", "type": "address"}], - "name": "getOPCFee", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - } - unencrypted_files_list = [ - { - "type": "smartcontract", - "address": router_address, - "abi": abi, - "chainId": 8996, - } - ] - asset = get_registered_asset( - publisher_wallet, - unencrypted_files_list=unencrypted_files_list, - custom_userdata=[ - { - "name": "baseToken", - "type": "text", - "label": "baseToken", - "required": True, - "description": "baseToken to check for fee", - } - ], - ) - service = get_first_service_by_type(asset, ServiceType.ACCESS) - mint_100_datatokens( - web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - ) - tx_id, _ = start_order( - web3, - service.datatoken_address, - consumer_wallet.address, - service.index, - get_provider_fees(asset, service, consumer_wallet.address, 0), - consumer_wallet, - ) - - payload = { - "documentId": asset.did, - "serviceId": service.id, - "consumerAddress": consumer_wallet.address, - "transferTxId": tx_id, - "fileIndex": 0, - "userdata": json.dumps({"baseToken": asset.nftAddress.lower()}), - } - - download_endpoint = BaseURLs.SERVICES_URL + "/download" - # Consume using url index and signature (with nonce) - nonce = str(datetime.utcnow().timestamp()) - _msg = f"{asset.did}{nonce}" - payload["signature"] = sign_message(_msg, consumer_wallet) - payload["nonce"] = nonce - response = client.get( - service.service_endpoint + download_endpoint, query_string=payload - ) - assert response.status_code == 200, f"{response.data}" - - -@pytest.mark.integration -def test_download_smartcontract_asset_with_pure_function( - client, publisher_wallet, consumer_wallet, web3 -): - # publish dummy asset, to get a datatoken deployed - dummy_asset = get_registered_asset(publisher_wallet) - dummy_service = get_first_service_by_type(dummy_asset, ServiceType.ACCESS) - # create abi for getId - abi = { - "inputs": [], - "name": "getId", - "outputs": [{"internalType": "uint8", "name": "", "type": "uint8"}], - "stateMutability": "pure", - "type": "function", - } - - unencrypted_files_list = [ - { - "type": "smartcontract", - "address": dummy_service.datatoken_address, - "abi": abi, - "chainId": 8996, - } - ] - asset = get_registered_asset( - publisher_wallet, unencrypted_files_list=unencrypted_files_list - ) - service = get_first_service_by_type(asset, ServiceType.ACCESS) - mint_100_datatokens( - web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - ) - tx_id, _ = start_order( - web3, - service.datatoken_address, - consumer_wallet.address, - service.index, - get_provider_fees(asset, service, consumer_wallet.address, 0), - consumer_wallet, - ) - - payload = { - "documentId": asset.did, - "serviceId": service.id, - "consumerAddress": consumer_wallet.address, - "transferTxId": tx_id, - "fileIndex": 0, - } - - download_endpoint = BaseURLs.SERVICES_URL + "/download" - - # Consume using url index and signature (with nonce) - nonce = str(datetime.utcnow().timestamp()) - _msg = f"{asset.did}{nonce}" - payload["signature"] = sign_message(_msg, consumer_wallet) - payload["nonce"] = nonce - response = client.get( - service.service_endpoint + download_endpoint, query_string=payload - ) - assert response.status_code == 200, f"{response.data}" From 9b33877818103f27d9e104b52c5ffd949fb519c2 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 3 Apr 2023 18:48:33 +0300 Subject: [PATCH 55/83] Removed the graph from CI. --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 53e37eda..886c0ba8 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -33,7 +33,7 @@ jobs: - name: Run Barge working-directory: ${{ github.workspace }}/barge run: | - bash -x start_ocean.sh --no-dashboard 2>&1 --with-rbac --with-provider2 --with-c2d --with-thegraph > start_ocean.log & + bash -x start_ocean.sh --no-dashboard 2>&1 --with-rbac --with-provider2 --with-c2d > start_ocean.log & - name: Wait for contracts deployment and C2D cluster to be ready working-directory: ${{ github.workspace }}/barge run: | From 5cd5402242b45a72dc6b54be602650631cd41e0b Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 3 Apr 2023 20:13:56 +0300 Subject: [PATCH 56/83] Added back the tests. --- ocean_provider/test/test_user_nonce.py | 96 ++ ocean_provider/utils/test/test_accounts.py | 54 + ocean_provider/utils/test/test_address.py | 31 + ocean_provider/utils/test/test_basics.py | 156 +++ ocean_provider/utils/test/test_compute.py | 30 + ocean_provider/utils/test/test_credentials.py | 90 ++ ocean_provider/utils/test/test_currency.py | 96 ++ ocean_provider/utils/test/test_encyption.py | 35 + .../utils/test/test_error_responses.py | 70 ++ .../utils/test/test_provider_fees.py | 63 + ocean_provider/utils/test/test_url.py | 93 ++ ocean_provider/utils/test/test_util.py | 445 ++++++++ .../validation/test/test_algo_validation.py | 1012 +++++++++++++++++ tests/test_RBAC.py | 251 ++++ tests/test_graphql.py | 143 +++ tests/test_proof.py | 83 ++ tests/test_routes.py | 140 +++ tests/test_smartcontract.py | 205 ++++ 18 files changed, 3093 insertions(+) create mode 100644 ocean_provider/test/test_user_nonce.py create mode 100644 ocean_provider/utils/test/test_accounts.py create mode 100644 ocean_provider/utils/test/test_address.py create mode 100644 ocean_provider/utils/test/test_basics.py create mode 100644 ocean_provider/utils/test/test_compute.py create mode 100644 ocean_provider/utils/test/test_credentials.py create mode 100644 ocean_provider/utils/test/test_currency.py create mode 100644 ocean_provider/utils/test/test_encyption.py create mode 100644 ocean_provider/utils/test/test_error_responses.py create mode 100644 ocean_provider/utils/test/test_provider_fees.py create mode 100644 ocean_provider/utils/test/test_url.py create mode 100644 ocean_provider/utils/test/test_util.py create mode 100644 ocean_provider/validation/test/test_algo_validation.py create mode 100644 tests/test_RBAC.py create mode 100644 tests/test_graphql.py create mode 100644 tests/test_proof.py create mode 100644 tests/test_routes.py create mode 100644 tests/test_smartcontract.py diff --git a/ocean_provider/test/test_user_nonce.py b/ocean_provider/test/test_user_nonce.py new file mode 100644 index 00000000..829dc292 --- /dev/null +++ b/ocean_provider/test/test_user_nonce.py @@ -0,0 +1,96 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import os +from datetime import datetime +from unittest.mock import patch + +import pytest +import sqlalchemy +from flask_caching import Cache +from ocean_provider import models, user_nonce +from ocean_provider.myapp import app +from ocean_provider.user_nonce import ( + get_nonce, + get_or_create_user_nonce_object, + update_nonce, +) + +cache = Cache( + app, + config={ + "CACHE_TYPE": "redis", + "CACHE_KEY_PREFIX": "ocean_provider", + "CACHE_REDIS_URL": os.getenv("REDIS_CONNECTION"), + }, +) + + +@pytest.mark.unit +def test_get_and_update_nonce(monkeypatch, publisher_address, consumer_address): + # pass through sqlite + monkeypatch.delenv("REDIS_CONNECTION") + + # get_nonce can be used on addresses that are not in the user_nonce table + assert get_nonce("0x0000000000000000000000000000000000000000") is None + assert get_or_create_user_nonce_object( + "0x0000000000000000000000000000000000000000", datetime.utcnow().timestamp() + ) + + # update two times because, if we just pruned, we start from None + update_nonce(publisher_address, datetime.utcnow().timestamp()) + publisher_nonce = get_nonce(publisher_address) + update_nonce(publisher_address, datetime.utcnow().timestamp()) + new_publisher_nonce = get_nonce(publisher_address) + + assert new_publisher_nonce >= publisher_nonce + + # get_nonce doesn't affect the value of nonce + publisher_nonce = get_nonce(publisher_address) + assert get_nonce(publisher_address) == publisher_nonce + + +@pytest.mark.unit +def test_get_and_update_nonce_redis(publisher_address, consumer_address): + # get_nonce can be used on addresses that are not in the user_nonce table + cache.delete("0x0000000000000000000000000000000000000000") + assert get_nonce("0x0000000000000000000000000000000000000000") is None + assert get_or_create_user_nonce_object( + "0x0000000000000000000000000000000000000000", datetime.utcnow().timestamp() + ) + + # update two times because, if we just pruned, we start from None + update_nonce(publisher_address, datetime.utcnow().timestamp()) + publisher_nonce = get_nonce(publisher_address) + update_nonce(publisher_address, datetime.utcnow().timestamp()) + new_publisher_nonce = get_nonce(publisher_address) + + assert new_publisher_nonce >= publisher_nonce + + # get_nonce doesn't affect the value of nonce + publisher_nonce = get_nonce(publisher_address) + assert get_nonce(publisher_address) == publisher_nonce + + +@pytest.mark.unit +def test_update_nonce_exception(monkeypatch, publisher_address): + # pass through sqlite + monkeypatch.delenv("REDIS_CONNECTION") + + # Ensure address exists in database + update_nonce(publisher_address, datetime.utcnow().timestamp()) + + # Create duplicate nonce_object + with patch.object( + user_nonce, + "get_or_create_user_nonce_object", + return_value=models.UserNonce(address=publisher_address, nonce="0"), + ): + with pytest.raises(sqlalchemy.exc.IntegrityError): + update_nonce(publisher_address, datetime.utcnow().timestamp()) + + publisher_nonce = get_nonce(publisher_address) + update_nonce(publisher_address, None) + # no effect + assert publisher_nonce == get_nonce(publisher_address) diff --git a/ocean_provider/utils/test/test_accounts.py b/ocean_provider/utils/test/test_accounts.py new file mode 100644 index 00000000..0e9f2943 --- /dev/null +++ b/ocean_provider/utils/test/test_accounts.py @@ -0,0 +1,54 @@ +import os +from datetime import datetime, timedelta + +import pytest +from ocean_provider.exceptions import InvalidSignatureError +from ocean_provider.user_nonce import update_nonce +from ocean_provider.utils.accounts import ( + get_private_key, + sign_message, + verify_signature, +) + + +@pytest.mark.unit +def test_get_private_key(publisher_wallet): + assert ( + str(get_private_key(publisher_wallet)).lower() + == os.getenv("TEST_PRIVATE_KEY1").lower() + ) + + +@pytest.mark.unit +def test_verify_signature(consumer_wallet, publisher_wallet): + update_nonce(consumer_wallet.address, datetime.utcnow().timestamp()) + + nonce = datetime.utcnow().timestamp() + did = "did:op:test" + msg = f"{consumer_wallet.address}{did}{nonce}" + msg_w_nonce = f"{consumer_wallet.address}{did}" + signature = sign_message(msg, consumer_wallet) + + assert verify_signature(consumer_wallet.address, signature, msg_w_nonce, nonce) + + nonce = datetime.utcnow().timestamp() + did = "did:op:test" + msg = f"{consumer_wallet.address}{did}{nonce}" + msg_w_nonce = f"{consumer_wallet.address}{did}" + signature = sign_message(msg, consumer_wallet) + + with pytest.raises(InvalidSignatureError) as e_info: + verify_signature(publisher_wallet.address, signature, msg_w_nonce, nonce) + + assert f"Invalid signature {signature} for ethereum address" in e_info.value.args[0] + + nonce = (datetime.utcnow() - timedelta(days=7)).timestamp() + did = "did:op:test" + msg = f"{consumer_wallet.address}{did}{nonce}" + msg_w_nonce = f"{consumer_wallet.address}{did}" + signature = sign_message(msg, consumer_wallet) + # expired nonce + with pytest.raises(InvalidSignatureError) as e_info: + verify_signature(consumer_wallet.address, signature, msg_w_nonce, nonce) + + assert e_info.value.args[0].startswith("Invalid signature expected nonce") diff --git a/ocean_provider/utils/test/test_address.py b/ocean_provider/utils/test/test_address.py new file mode 100644 index 00000000..ce46ce6d --- /dev/null +++ b/ocean_provider/utils/test/test_address.py @@ -0,0 +1,31 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import os + +import pytest +from ocean_provider.utils.address import get_address_json, get_contract_address + + +@pytest.mark.unit +def test_get_address_json(): + address_json = get_address_json(os.getenv("ADDRESS_FILE")) + assert address_json["development"]["chainId"] == 8996 + assert address_json["development"]["Ocean"].startswith("0x") + + +@pytest.mark.unit +def test_get_contract_address(): + assert get_contract_address( + os.getenv("ADDRESS_FILE"), "ERC721Factory", 8996 + ).startswith("0x") + + +@pytest.mark.unit +def test_get_address_json_missing_var(monkeypatch): + monkeypatch.delenv("ADDRESS_FILE") + address_json = get_address_json(os.getenv("ADDRESS_FILE")) + + assert address_json["goerli"]["chainId"] == 5 + assert address_json["goerli"]["Ocean"].startswith("0x") diff --git a/ocean_provider/utils/test/test_basics.py b/ocean_provider/utils/test/test_basics.py new file mode 100644 index 00000000..e5ecc41c --- /dev/null +++ b/ocean_provider/utils/test/test_basics.py @@ -0,0 +1,156 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +from datetime import datetime, timedelta + +import pytest +from ocean_provider.utils.basics import ( + decode_keyed, + get_configured_chains, + get_provider_addresses, + get_provider_private_key, + get_value_from_decoded_env, + get_web3, + get_web3_connection_provider, + send_ether, + validate_timestamp, +) +from ocean_provider.utils.currency import to_wei + + +@pytest.mark.unit +def test_get_web3_connection_provider(monkeypatch): + # typical http uri "http://foo.com" + provider = get_web3_connection_provider("http://foo.com") + assert provider.endpoint_uri == "http://foo.com" + + # typical https uri "https://bar.com" + provider = get_web3_connection_provider("https://bar.com") + assert provider.endpoint_uri == "https://bar.com" + + # non-supported name + with pytest.raises(AssertionError): + get_web3_connection_provider("not_network_name") + + # typical websockets uri "wss://foo.com" + provider = get_web3_connection_provider("wss://bah.com") + assert provider.endpoint_uri == "wss://bah.com" + + +@pytest.mark.unit +def test_send_ether(publisher_wallet, consumer_address): + assert send_ether( + get_web3(8996), publisher_wallet, consumer_address, to_wei(1) + ), "Send ether was unsuccessful." + + +@pytest.mark.unit +def test_validate_timestamp(): + timestamp_future = int((datetime.utcnow() + timedelta(hours=1)).timestamp()) + assert validate_timestamp(timestamp_future) + assert validate_timestamp(1644831664000) is False + assert validate_timestamp(str(timestamp_future)) + + timestamp_past = (datetime.utcnow() - timedelta(hours=1)).timestamp() + assert validate_timestamp(timestamp_past) is False + + +@pytest.mark.unit +def test_decode_keyed(monkeypatch): + monkeypatch.setenv("TEST_ENV", '{"valid": "json"}') + assert decode_keyed("TEST_ENV") == {"valid": "json"} + monkeypatch.setenv("TEST_ENV", '{"invalid json"}') + assert not decode_keyed("TEST_ENV") + monkeypatch.setenv("TEST_ENV", "simple string") + assert not decode_keyed("TEST_ENV") + + +@pytest.mark.unit +def test_get_configured_chains(monkeypatch): + monkeypatch.setenv("NETWORK_URL", '{"3": "http://127.0.0.1:8545", "15": "fifteen"}') + assert get_configured_chains() == [3, 15] + + monkeypatch.setenv("NETWORK_URL", "http://127.0.0.1:8545") + assert get_configured_chains() == [8996] + + monkeypatch.delenv("NETWORK_URL") + with pytest.raises(Exception, match="No chains configured"): + get_configured_chains() + + +@pytest.mark.unit +def test_get_value_from_decoded_env(monkeypatch): + monkeypatch.setenv("SOME_ENV", '{"3": "three", "15": "fifteen"}') + assert get_value_from_decoded_env(3, "SOME_ENV") == "three" + + with pytest.raises(Exception, match="Unconfigured chain_id"): + get_value_from_decoded_env(7, "SOME_ENV") + + with pytest.raises(Exception, match="Unconfigured chain_id"): + get_value_from_decoded_env(None, "SOME_ENV") + + monkeypatch.setenv("SOME_ENV", "simple string") + assert get_value_from_decoded_env(3, "SOME_ENV") == "simple string" + + +@pytest.mark.unit +def test_get_provider_addresses(monkeypatch): + monkeypatch.setenv("NETWORK_URL", '{"3": "http://127.0.0.1:8545"}') + monkeypatch.setenv( + "PROVIDER_PRIVATE_KEY", + '{"3": "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215"}', + ) + assert 3 in get_provider_addresses() + + monkeypatch.setenv("NETWORK_URL", "http://127.0.0.1:8545") + monkeypatch.setenv( + "PROVIDER_PRIVATE_KEY", + "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215", + ) + assert 8996 in get_provider_addresses() + + monkeypatch.setenv("NETWORK_URL", '{"3": "http://127.0.0.1:8545"}') + monkeypatch.setenv( + "PROVIDER_PRIVATE_KEY", + "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215", + ) + with pytest.raises(Exception, match="must both be single or both json encoded"): + get_provider_addresses() + + monkeypatch.setenv( + "PROVIDER_PRIVATE_KEY", + '{"3": "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215"}', + ) + monkeypatch.setenv("NETWORK_URL", "http://127.0.0.1:8545") + with pytest.raises(Exception, match="must both be single or both json encoded"): + get_provider_addresses() + + +@pytest.mark.unit +def test_get_provider_private_key(monkeypatch): + monkeypatch.delenv("UNIVERSAL_PRIVATE_KEY") + monkeypatch.setenv( + "PROVIDER_PRIVATE_KEY", + '{"3": "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215"}', + ) + assert get_provider_private_key(3).startswith("0xfd5c1") + + with pytest.raises( + Exception, + match="Must define UNIVERSAL_PRIVATE_KEY or a single PROVIDER_PRIVATE_KEY.", + ): + get_provider_private_key(None, use_universal_key=True) + + monkeypatch.setenv( + "PROVIDER_PRIVATE_KEY", + "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215", + ) + assert get_provider_private_key(8996).startswith("0xfd5c1") + + monkeypatch.delenv("PROVIDER_PRIVATE_KEY") + monkeypatch.setenv( + "UNIVERSAL_PRIVATE_KEY", + "0xfd5c1ccea015b6d663618850824154a3b3fb2882c46cefb05b9a93fea8c3d215", + ) + assert get_provider_private_key(None, use_universal_key=True).startswith("0xfd5c1") diff --git a/ocean_provider/utils/test/test_compute.py b/ocean_provider/utils/test/test_compute.py new file mode 100644 index 00000000..b9d247ec --- /dev/null +++ b/ocean_provider/utils/test/test_compute.py @@ -0,0 +1,30 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import logging + +import pytest +from ocean_provider.utils.compute import ( + get_compute_endpoint, + get_compute_result_endpoint, +) + +test_logger = logging.getLogger(__name__) + + +@pytest.mark.unit +def test_get_compute_endpoint(monkeypatch): + monkeypatch.setenv("OPERATOR_SERVICE_URL", "http://with-slash.com/") + assert get_compute_endpoint() == "http://with-slash.com/api/v1/operator/compute" + assert ( + get_compute_result_endpoint() + == "http://with-slash.com/api/v1/operator/getResult" + ) + + monkeypatch.setenv("OPERATOR_SERVICE_URL", "http://without-slash.com") + assert get_compute_endpoint() == "http://without-slash.com/api/v1/operator/compute" + assert ( + get_compute_result_endpoint() + == "http://without-slash.com/api/v1/operator/getResult" + ) diff --git a/ocean_provider/utils/test/test_credentials.py b/ocean_provider/utils/test/test_credentials.py new file mode 100644 index 00000000..477c9b7d --- /dev/null +++ b/ocean_provider/utils/test/test_credentials.py @@ -0,0 +1,90 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +from copy import deepcopy + +import pytest +from ocean_provider.utils.asset import Asset +from ocean_provider.utils.consumable import ConsumableCodes, MalformedCredential +from ocean_provider.utils.credentials import AddressCredential +from tests.ddo.ddo_sa_sample_with_credentials_v4 import json_dict +from tests.test_helpers import get_resource_path + + +@pytest.mark.unit +def test_asset_credentials_addresses_both(): + """Tests asset credentials when both deny and allow lists exist on the asset.""" + sample_asset_path = get_resource_path("ddo", "ddo_sa_sample_with_credentials.json") + assert sample_asset_path.exists(), "{} does not exist!".format(sample_asset_path) + + ddo = deepcopy(json_dict) + asset = Asset(ddo) + + address_credential = AddressCredential(asset) + assert address_credential.get_addresses_of_class("allow") == ["0x123", "0x456a"] + assert address_credential.get_addresses_of_class("deny") == ["0x2222", "0x333"] + assert ( + address_credential.validate_access({"type": "address", "value": "0x111"}) + == ConsumableCodes.CREDENTIAL_NOT_IN_ALLOW_LIST + ) + assert ( + address_credential.validate_access({"type": "address", "value": "0x456A"}) + == ConsumableCodes.OK + ) + # if "allow" exists, "deny" is not checked anymore + + +@pytest.mark.unit +def test_asset_credentials_addresses_only_deny(): + """Tests asset credentials when only the deny list exists on the asset.""" + sample_asset_path = get_resource_path("ddo", "ddo_sa_sample_with_credentials.json") + assert sample_asset_path.exists(), "{} does not exist!".format(sample_asset_path) + + ddo = deepcopy(json_dict) + asset = Asset(ddo) + + # remove allow to test the behaviour of deny + asset.credentials.pop("allow") + + address_credential = AddressCredential(asset) + assert address_credential.get_addresses_of_class("allow") == [] + assert address_credential.get_addresses_of_class("deny") == ["0x2222", "0x333"] + assert ( + address_credential.validate_access({"type": "address", "value": "0x111"}) + == ConsumableCodes.OK + ) + assert ( + address_credential.validate_access({"type": "address", "value": "0x333"}) + == ConsumableCodes.CREDENTIAL_IN_DENY_LIST + ) + + credential = {"type": "address", "value": ""} + with pytest.raises(MalformedCredential): + address_credential.validate_access(credential) + + +@pytest.mark.unit +def test_asset_credentials_addresses_no_access_list(): + """Tests asset credentials when neither deny, nor allow lists exist on the asset.""" + sample_asset_path = get_resource_path("ddo", "ddo_sa_sample_with_credentials.json") + assert sample_asset_path.exists(), "{} does not exist!".format(sample_asset_path) + + ddo = deepcopy(json_dict) + asset = Asset(ddo) + + # if "allow" OR "deny" exist, we need a credential, + # so remove both to test the behaviour of no credential supplied + address_credential = AddressCredential(asset) + asset.credentials.pop("allow") + asset.credentials.pop("deny") + + assert address_credential.validate_access() == ConsumableCodes.OK + + # test that we can use another credential if address is not required + assert ( + asset.is_consumable( + {"type": "somethingelse", "value": "test"}, with_connectivity_check=False + ) + == ConsumableCodes.OK + ) diff --git a/ocean_provider/utils/test/test_currency.py b/ocean_provider/utils/test/test_currency.py new file mode 100644 index 00000000..2ca65a1e --- /dev/null +++ b/ocean_provider/utils/test/test_currency.py @@ -0,0 +1,96 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +from decimal import Decimal, localcontext + +import pytest +from ocean_provider.utils.currency import ( + ETHEREUM_DECIMAL_CONTEXT, + MAX_ETHER, + MAX_WEI, + MIN_ETHER, + MIN_WEI, + parse_units, + to_wei, +) + +USDT_DECIMALS = 6 +MIN_USDT = Decimal("0.000001") +MAX_USDT = Decimal(MAX_WEI).scaleb(-USDT_DECIMALS, context=ETHEREUM_DECIMAL_CONTEXT) + +SEVEN_DECIMALS = 7 +MIN_SEVEN = Decimal("0.0000001") +MAX_SEVEN = Decimal(MAX_WEI).scaleb(-SEVEN_DECIMALS, context=ETHEREUM_DECIMAL_CONTEXT) + + +@pytest.mark.unit +def test_to_wei(): + """Test the to_wei function""" + assert to_wei(Decimal("0")) == 0, "Zero ether (Decimal) should equal zero wei" + assert to_wei("0") == 0, "Zero ether (string) should equal zero wei" + assert to_wei(0) == 0, "Zero ether (int) should equal zero wei" + assert ( + to_wei(Decimal("0.123456789123456789")) == 123456789_123456789 + ), "Conversion from ether (Decimal) to wei failed." + assert ( + to_wei("0.123456789123456789") == 123456789_123456789 + ), "Conversion from ether (string) to wei failed." + assert ( + to_wei(1) == 1_000000000_000000000 + ), "Conversion from ether (int) to wei failed." + + assert ( + to_wei("0.1234567891234567893") == 123456789_123456789 + ), "Conversion from ether to wei failed, supposed to round towards 0 (aka. truncate)." + assert ( + to_wei("0.1234567891234567897") == 123456789_123456789 + ), "Conversion from ether to wei failed, supposed to round towards 0 (aka. truncate)." + + assert ( + to_wei(MIN_ETHER) == MIN_WEI + ), "Conversion from minimum ether to minimum wei failed." + + assert ( + to_wei(MAX_ETHER) == MAX_WEI + ), "Conversion from maximum ether to maximum wei failed." + + # Use ETHEREUM_DECIMAL_CONTEXT when performing arithmetic on MAX_ETHER + with localcontext(ETHEREUM_DECIMAL_CONTEXT): + with pytest.raises(ValueError): + to_wei(MAX_ETHER + 1) + + +@pytest.mark.unit +def test_parse_units(): + """Test the parse_units function""" + assert parse_units("0", USDT_DECIMALS) == 0 + assert parse_units("0.123456789123456789", USDT_DECIMALS) == 123456 + assert parse_units("1.123456789123456789", USDT_DECIMALS) == 1_123456 + assert parse_units("5278.02", USDT_DECIMALS) == 5278_020000 + assert parse_units(MIN_USDT, USDT_DECIMALS) == MIN_WEI + assert parse_units(MAX_USDT, USDT_DECIMALS) == MAX_WEI + + # Use ETHEREUM_DECIMAL_CONTEXT when performing arithmetic on MAX_USDT + with localcontext(ETHEREUM_DECIMAL_CONTEXT): + with pytest.raises(ValueError): + parse_units(MAX_USDT + 1, USDT_DECIMALS) + + assert parse_units("0", "mwei") == 0 + assert parse_units("0.123456789123456789", "mwei") == 123456 + assert parse_units("1.123456789123456789", "mwei") == 1_123456 + assert parse_units("5278.02", "mwei") == 5278_020000 + assert parse_units(MIN_USDT, "mwei") == MIN_WEI + assert parse_units(MAX_USDT, "mwei") == MAX_WEI + + # Use ETHEREUM_DECIMAL_CONTEXT when performing arithmetic on MAX_USDT + with localcontext(ETHEREUM_DECIMAL_CONTEXT): + with pytest.raises(ValueError): + parse_units(MAX_USDT + 1, "mwei") + + assert parse_units("0", SEVEN_DECIMALS) == 0 + assert parse_units("0.123456789", SEVEN_DECIMALS) == 1234567 + assert parse_units("1.123456789", SEVEN_DECIMALS) == 1_1234567 + assert parse_units("5278.02", SEVEN_DECIMALS) == 5278_0200000 + assert parse_units(MIN_SEVEN, SEVEN_DECIMALS) == MIN_WEI + assert parse_units(MAX_SEVEN, SEVEN_DECIMALS) == MAX_WEI diff --git a/ocean_provider/utils/test/test_encyption.py b/ocean_provider/utils/test/test_encyption.py new file mode 100644 index 00000000..64efaf8d --- /dev/null +++ b/ocean_provider/utils/test/test_encyption.py @@ -0,0 +1,35 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import pytest +from ocean_provider.utils.encryption import do_decrypt, do_encrypt +from web3.main import Web3 + + +@pytest.mark.unit +def test_encryption_with_bytes(provider_wallet): + test_string = "hello_world" + test_bytes = Web3.toBytes(text=test_string) + result = do_encrypt(test_bytes, provider_wallet) + assert result.startswith("0x") + assert do_decrypt(result, provider_wallet) == test_bytes + + +@pytest.mark.unit +def test_encryption_with_hexstr(provider_wallet): + test_string = '["https://raw.githubusercontent.com/tbertinmahieux/MSongsDB/master/Tasks_Demos/CoverSongs/shs_dataset_test.txt"]' + result = do_encrypt(Web3.toHex(text=test_string), provider_wallet) + assert result.startswith("0x") + assert do_decrypt(result, provider_wallet) == Web3.toBytes(text=test_string) + + +@pytest.mark.unit +def test_encryption_with_text(provider_wallet): + test_string = ( + '["https://raw.githubusercontent.com/tbertinmahieux/MSongsDB/master/Tasks_Demos/CoverSongs/shs_dataset_test.txt", ' + '"https://raw.githubusercontent.com/tbertinmahieux/MSongsDB/master/Tasks_Demos/CoverSongs/shs_dataset_test.txt"]' + ) + result = do_encrypt(test_string, provider_wallet) + assert result.startswith("0x") + assert do_decrypt(result, provider_wallet) == Web3.toBytes(text=test_string) diff --git a/ocean_provider/utils/test/test_error_responses.py b/ocean_provider/utils/test/test_error_responses.py new file mode 100644 index 00000000..3a12155a --- /dev/null +++ b/ocean_provider/utils/test/test_error_responses.py @@ -0,0 +1,70 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import logging + +import pytest +from ocean_provider.run import app, handle_error + +test_logger = logging.getLogger(__name__) + + +@pytest.mark.unit +def test_service_unavailable(caplog): + context = {"item1": "test1", "item2": "test2"} + + with app.test_request_context(json=context): + e = Exception("test message") + response = handle_error(e) + assert response.status_code == 503 + response = response.json + assert response["error"] == "test message" + assert response["context"] == context + + +@pytest.mark.unit +def test_service_unavailable_strip_infura_project_id(): + """Test that service_unavilable strips out URLs.""" + + context = {"item1": "test1", "item2": "test2"} + + # HTTP Infura URL (rinkeby) + with app.test_request_context(json=context): + e = Exception( + "429 Client Error: Too Many Requests for url: " + "https://rinkeby.infura.io/v3/ffffffffffffffffffffffffffffffff" + ) + response = handle_error(e) + assert ( + response.json["error"] == "429 Client Error: Too Many Requests for url: " + "" + ) + + # Websocket Infura URL (ropsten) + with app.test_request_context(json=context): + e = Exception( + "429 Client Error: Too Many Requests for url: " + "wss://ropsten.infura.io/ws/v3/ffffffffffffffffffffffffffffffff" + ) + response = handle_error(e) + assert ( + response.json["error"] == "429 Client Error: Too Many Requests for url: " + "" + ) + + # No URL + with app.test_request_context(json=context): + e = Exception("string without a URL in it") + response = handle_error(e) + assert response.json["error"] == "string without a URL in it" + + # Two URLs + with app.test_request_context(json=context): + e = Exception("Two URLs: wss://google.com https://google.com") + response = handle_error(e) + assert ( + response.json["error"] == "Two URLs: " + " " + "" + ) diff --git a/ocean_provider/utils/test/test_provider_fees.py b/ocean_provider/utils/test/test_provider_fees.py new file mode 100644 index 00000000..f4bcfa2a --- /dev/null +++ b/ocean_provider/utils/test/test_provider_fees.py @@ -0,0 +1,63 @@ +from unittest.mock import patch + +import pytest +from freezegun import freeze_time +from ocean_provider.utils.currency import to_wei +from ocean_provider.utils.provider_fees import get_provider_fee_amount +from tests.helpers.compute_helpers import get_future_valid_until +from tests.test_helpers import ( + BLACK_HOLE_ADDRESS, + deploy_data_nft, + deploy_datatoken, + get_ocean_token_address, +) + + +@pytest.mark.unit +@freeze_time("Feb 11th, 2012 00:00") +def test_get_provider_fee_amount(web3, publisher_wallet): + valid_until = get_future_valid_until() + assert ( + get_provider_fee_amount( + valid_until, + "ocean-compute", + web3, + "0x0000000000000000000000000000000000000000", + ) + == 0 + ) + + data_nft_address = deploy_data_nft( + web3, + "Data NFT Name", + "DATANFTSYMBOL", + 1, + BLACK_HOLE_ADDRESS, + BLACK_HOLE_ADDRESS, + "", + publisher_wallet, + ) + + datatoken_address = deploy_datatoken( + web3=web3, + data_nft_address=data_nft_address, + template_index=1, + name="Datatoken 1", + symbol="DT1", + minter=publisher_wallet.address, + fee_manager=publisher_wallet.address, + publishing_market=BLACK_HOLE_ADDRESS, + publishing_market_fee_token=get_ocean_token_address(web3), + cap=to_wei(1000), + publishing_market_fee_amount=0, + from_wallet=publisher_wallet, + ) + + with patch("ocean_provider.utils.provider_fees.get_c2d_environments") as mock: + mock.return_value = [{"id": "ocean-compute", "priceMin": 60}] + assert ( + get_provider_fee_amount( + valid_until, "ocean-compute", web3, datatoken_address + ) + == 3600000000000000000000 + ) diff --git a/ocean_provider/utils/test/test_url.py b/ocean_provider/utils/test/test_url.py new file mode 100644 index 00000000..afbb60b1 --- /dev/null +++ b/ocean_provider/utils/test/test_url.py @@ -0,0 +1,93 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import logging +from unittest.mock import Mock, patch + +import pytest +from ocean_provider.utils.url import ( + get_redirect, + is_safe_url, + is_this_same_provider, + is_url, +) +from requests.models import Response + +test_logger = logging.getLogger(__name__) + + +@pytest.mark.unit +def test_is_url(): + assert is_url("https://jsonplaceholder.typicode.com/") is True + assert is_url("127.0.0.1") is False + assert is_url("169.254.169.254") is False + assert is_url("http://169.254.169.254/latest/meta-data/hostname") is True + + +@pytest.mark.unit +def test_is_safe_url(): + assert is_safe_url("https://jsonplaceholder.typicode.com/") is True + assert is_safe_url("127.0.0.1") is False + assert is_safe_url("169.254.169.254") is False + assert is_safe_url("http://169.254.169.254/latest/meta-data/hostname") is False + + assert is_safe_url("https://bit.ly/3zqzc4m") is True # jsonplaceholder example + assert is_safe_url("https://bit.ly/3znh0Zg") is False # meta-data/hostname example + + assert is_safe_url("blabla") is False + + +@pytest.mark.unit +def test_is_same_provider(): + assert is_this_same_provider("http://localhost:8030", 8996) + + +@pytest.mark.unit +def test_get_redirect(): + assert ( + get_redirect("https://bit.ly/3zqzc4m") + == "https://jsonplaceholder.typicode.com/" + ) + + redirect_response = Mock(spec=Response) + redirect_response.is_redirect = True + redirect_response.status_code = 200 + redirect_response.headers = {"Location": "/root-relative.html"} + + normal_response = Mock(spec=Response) + normal_response.is_redirect = False + normal_response.status_code = 200 + + with patch("ocean_provider.utils.url.requests.head") as mock: + mock.side_effect = [redirect_response, normal_response] + assert ( + get_redirect("https://some-url.com:3000/index") + == "https://some-url.com:3000/root-relative.html" + ) + + redirect_response = Mock(spec=Response) + redirect_response.is_redirect = True + redirect_response.status_code = 200 + redirect_response.headers = {"Location": "relative.html"} + + normal_response = Mock(spec=Response) + normal_response.is_redirect = False + normal_response.status_code = 200 + + with patch("ocean_provider.utils.url.requests.head") as mock: + mock.side_effect = [redirect_response, normal_response] + assert ( + get_redirect("https://some-url.com:3000/index") + == "https://some-url.com:3000/index/relative.html" + ) + + redirect_response = Mock(spec=Response) + redirect_response.is_redirect = True + redirect_response.status_code = 200 + redirect_response.headers = {"Location": "https://some-url.com:3000/index"} + + with patch("ocean_provider.utils.url.requests.head") as mock: + mock.return_value = redirect_response + assert get_redirect("https://some-url.com:3000/index") is None + assert mock.call_count == 6 diff --git a/ocean_provider/utils/test/test_util.py b/ocean_provider/utils/test/test_util.py new file mode 100644 index 00000000..7189251d --- /dev/null +++ b/ocean_provider/utils/test/test_util.py @@ -0,0 +1,445 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import copy +import json +import logging +import mimetypes +from copy import deepcopy +from unittest.mock import Mock, patch + +import ipfshttpclient +import pytest +import requests +from flask import Request +from ocean_provider.file_types.file_types_factory import FilesTypeFactory +from ocean_provider.utils.asset import Asset +from ocean_provider.utils.encryption import do_encrypt +from ocean_provider.utils.services import Service +from ocean_provider.utils.util import ( + get_service_files_list, + get_service_files_list_old_structure, + msg_hash, +) +from tests.ddo.ddo_sample1_v4 import json_dict as ddo_sample1_v4 +from tests.helpers.constants import ARWEAVE_TRANSACTION_ID +from web3.main import Web3 +from werkzeug.utils import get_content_type + +test_logger = logging.getLogger(__name__) + + +@pytest.mark.unit +def test_msg_hash(): + msg = "Hello World!" + hashed = msg_hash(msg) + expected = "7f83b1657ff1fc53b92dc18148a1d65dfc2d4b1fa3d677284addd200126d9069" + assert hashed == expected + + +@pytest.mark.unit +def test_build_download_response(): + request = Mock() + request.range = None + + class Dummy: + pass + + mocked_response = Dummy() + mocked_response.content = b"asdsadf" + mocked_response.status_code = 200 + mocked_response.headers = {} + + filename = "<>.xml" + content_type = mimetypes.guess_type(filename)[0] + url_object = {"url": f"https://source-lllllll.cccc/{filename}", "type": "url"} + _, instance = FilesTypeFactory.validate_and_create(url_object) + with patch( + "ocean_provider.file_types.definitions.is_safe_url", + side_effect=[True], + ): + with patch( + "requests.get", + side_effect=[mocked_response], + ): + response = instance.build_download_response(request) + + assert response.headers["content-type"] == content_type + assert ( + response.headers.get_all("Content-Disposition")[0] + == f"attachment;filename={filename}" + ) + + filename = "<>" + url_object = {"url": f"https://source-lllllll.cccc/{filename}", "type": "url"} + _, instance = FilesTypeFactory.validate_and_create(url_object) + with patch( + "ocean_provider.file_types.definitions.is_safe_url", + side_effect=[True], + ): + with patch( + "requests.get", + side_effect=[mocked_response], + ): + response = instance.build_download_response(request) + assert response.headers["content-type"] == get_content_type( + response.default_mimetype, response.charset + ) + assert ( + response.headers.get_all("Content-Disposition")[0] + == f"attachment;filename={filename}" + ) + + filename = "<>" + url_object = {"url": f"https://source-lllllll.cccc/{filename}", "type": "url"} + _, instance = FilesTypeFactory.validate_and_create(url_object) + instance.checked_details = {"contentType": content_type} + with patch( + "ocean_provider.file_types.definitions.is_safe_url", + side_effect=[True], + ): + with patch( + "requests.get", + side_effect=[mocked_response], + ): + response = instance.build_download_response(request) + assert response.headers["content-type"] == content_type + + matched_cd = ( + f"attachment;filename={filename + mimetypes.guess_extension(content_type)}" + ) + assert response.headers.get_all("Content-Disposition")[0] == matched_cd + + mocked_response_with_attachment = deepcopy(mocked_response) + attachment_file_name = "test.xml" + mocked_response_with_attachment.headers = { + "content-disposition": f"attachment;filename={attachment_file_name}" + } + + url_object = {"url": "https://source-lllllll.cccc/not-a-filename", "type": "url"} + _, instance = FilesTypeFactory.validate_and_create(url_object) + with patch( + "ocean_provider.file_types.definitions.is_safe_url", + side_effect=[True], + ): + with patch( + "requests.get", + side_effect=[mocked_response_with_attachment], + ): + response = instance.build_download_response(request) + assert ( + response.headers["content-type"] + == mimetypes.guess_type(attachment_file_name)[0] + ) # noqa + + matched_cd = f"attachment;filename={attachment_file_name}" + assert response.headers.get_all("Content-Disposition")[0] == matched_cd + + mocked_response_with_content_type = deepcopy(mocked_response) + response_content_type = "text/csv" + mocked_response_with_content_type.headers = {"content-type": response_content_type} + + filename = "filename.txt" + url_object = { + "url": f"https://source-lllllll.cccc/{filename}", + "type": "url", + "headers": {"APIKEY": "sample"}, + } + _, instance = FilesTypeFactory.validate_and_create(url_object) + with patch( + "ocean_provider.file_types.definitions.is_safe_url", + side_effect=[True], + ): + with patch( + "requests.get", + side_effect=[mocked_response_with_content_type], + ): + response = instance.build_download_response(request) + assert response.headers["content-type"] == response_content_type + assert ( + response.headers.get_all("Content-Disposition")[0] + == f"attachment;filename={filename}" + ) + + +@pytest.mark.unit +def test_httpbin(): + request = Mock(spec=Request) + request.range = None + request.headers = {} + + try: + url_object = { + "url": "https://httpbin.org/get", + "type": "url", + "method": "GET", + "userdata": {"test_param": "OCEAN value"}, + } + _, instance = FilesTypeFactory.validate_and_create(url_object) + response = instance.build_download_response(request) + assert response.json["args"] == {"test_param": "OCEAN value"} + + url_object["url"] = "https://httpbin.org/headers" + url_object["headers"] = {"test_header": "OCEAN header", "Range": "DDO range"} + _, instance = FilesTypeFactory.validate_and_create(url_object) + response = instance.build_download_response(request) + # no request range, but DDO range exists + assert response.headers.get("Range") == "DDO range" + + url_object["headers"] = {} + _, instance = FilesTypeFactory.validate_and_create(url_object) + response = instance.build_download_response(request) + # no request range and no DDO range + assert response.headers.get("Range") is None + + _, instance = FilesTypeFactory.validate_and_create(url_object) + request.range = 200 + request.headers = {"Range": "200"} + response = instance.build_download_response(request) + # request range and no DDO range + assert response.headers.get("Range") == "200" + + url_object["headers"] = {"test_header": "OCEAN header", "Range": "DDO range"} + _, instance = FilesTypeFactory.validate_and_create(url_object) + request.range = 200 + request.headers = {"Range": "200"} + response = instance.build_download_response(request) + # request range and DDO range, will favor DDO range + assert response.headers.get("Range") == "DDO range" + + request.range = None + request.headers = {} + url_object = { + "url": "https://httpbin.org/post", + "type": "url", + "method": "POST", + "userdata": {"test_param": "OCEAN POST value"}, + } + _, instance = FilesTypeFactory.validate_and_create(url_object) + response = instance.build_download_response(request) + assert response.json["json"]["test_param"] == "OCEAN POST value" + except requests.exceptions.ReadTimeout: + # skippable error due to httpbin downtime + logging.warning("test failed due to httpbin downtime") + return + + +@pytest.mark.unit +def test_get_service_files_list(provider_wallet): + ddo_sample1 = copy.deepcopy(ddo_sample1_v4) + ddo = Asset(ddo_sample1) + service = Mock(template=Service) + service.datatoken_address = "0x0000000000000000000000000000000000000000" + service.type = "access" + + encrypted_files_str = json.dumps( + { + "nftAddress": "0x0000000000000000000000000000000000000000", + "datatokenAddress": "0x0000000000000000000000000000000000000000", + "files": ["test1", "test2"], + }, + separators=(",", ":"), + ) + service.encrypted_files = do_encrypt( + Web3.toHex(text=encrypted_files_str), provider_wallet + ) + assert ["test1", "test2"] == get_service_files_list(service, provider_wallet, ddo) + + # empty and raw + service.encrypted_files = "" + assert get_service_files_list(service, provider_wallet, ddo) is None + + # empty and encrypted + encrypted_files_str = "" + service.encrypted_files = do_encrypt( + Web3.toHex(text=encrypted_files_str), provider_wallet + ) + assert get_service_files_list(service, provider_wallet, ddo) is None + + # not a dict + encrypted_files_str = json.dumps([], separators=(",", ":")) + service.encrypted_files = do_encrypt( + Web3.toHex(text=encrypted_files_str), provider_wallet + ) + + assert get_service_files_list(service, provider_wallet, ddo) is None + + # files not a list + encrypted_files_str = json.dumps( + { + "nftAddress": "0x0000000000000000000000000000000000000000", + "datatokenAddress": "0x0000000000000000000000000000000000000000", + "files": {"some_dict": "test"}, + }, + separators=(",", ":"), + ) + service.encrypted_files = do_encrypt( + Web3.toHex(text=encrypted_files_str), provider_wallet + ) + + assert get_service_files_list(service, provider_wallet, ddo) is None + + # missing nftAddress + encrypted_files_str = json.dumps( + { + "datatokenAddress": "0x0000000000000000000000000000000000000000", + "files": {"some_dict": "test"}, + }, + separators=(",", ":"), + ) + service.encrypted_files = do_encrypt( + Web3.toHex(text=encrypted_files_str), provider_wallet + ) + + assert get_service_files_list(service, provider_wallet, ddo) is None + + # wrong nftAddress + encrypted_files_str = json.dumps( + { + "nftAddress": "0x0000000000000000000000000000000000000001", + "datatokenAddress": "0x0000000000000000000000000000000000000000", + "files": {"some_dict": "test"}, + }, + separators=(",", ":"), + ) + service.encrypted_files = do_encrypt( + Web3.toHex(text=encrypted_files_str), provider_wallet + ) + + assert get_service_files_list(service, provider_wallet, ddo) is None + + +@pytest.mark.unit +def test_get_service_files_list_old_structure(provider_wallet): + service = Mock(template=Service) + encrypted_files_str = json.dumps(["test1", "test2"], separators=(",", ":")) + service.encrypted_files = do_encrypt( + Web3.toHex(text=encrypted_files_str), provider_wallet + ) + assert ["test1", "test2"] == get_service_files_list_old_structure( + service, provider_wallet + ) + + # empty and raw + service.encrypted_files = "" + assert get_service_files_list(service, provider_wallet) is None + + # empty and encrypted + encrypted_files_str = "" + service.encrypted_files = do_encrypt( + Web3.toHex(text=encrypted_files_str), provider_wallet + ) + assert get_service_files_list_old_structure(service, provider_wallet) is None + + # not a list + encrypted_files_str = json.dumps({"test": "test"}, separators=(",", ":")) + service.encrypted_files = do_encrypt( + Web3.toHex(text=encrypted_files_str), provider_wallet + ) + + assert get_service_files_list_old_structure(service, provider_wallet) is None + + +@pytest.mark.unit +def test_validate_url_object(): + result, message = FilesTypeFactory.validate_and_create({}) + assert result is False + assert message == "cannot decrypt files for this service." + + result, message = FilesTypeFactory.validate_and_create({"type": "invalid"}) + assert result is False + assert message == "Unsupported type invalid" + + result, message = FilesTypeFactory.validate_and_create( + {"type": "ipfs", "but_hash": "missing"} + ) + assert result is False + assert message == "malformed service files, missing required keys." + + result, message = FilesTypeFactory.validate_and_create( + {"type": "arweave", "but_transactionId": "missing"} + ) + assert result is False + assert message == "malformed service files, missing transactionId." + + result, message = FilesTypeFactory.validate_and_create( + {"type": "url", "url": "x", "headers": "not_a_dict"} + ) + assert result is False + assert message == "malformed file object." + + result, message = FilesTypeFactory.validate_and_create( + {"type": "url", "url": "x", "headers": '{"dict": "but_stringified"}'} + ) + # we purposefully require a dictionary + assert result is False + assert message == "malformed file object." + + result, message = FilesTypeFactory.validate_and_create( + {"type": "url", "url": "x", "headers": {"dict": "dict_key"}} + ) + assert result is True + + url_object = { + "url": "x", + "type": "url", + "method": "DELETE", + } + result, message = FilesTypeFactory.validate_and_create(url_object) + assert result is False + assert message == "Unsafe method delete." + + +@pytest.mark.unit +def test_build_download_response_ipfs(): + client = ipfshttpclient.connect("/dns/172.15.0.16/tcp/5001/http") + cid = client.add("./tests/resources/ddo_sample_file.txt")["Hash"] + url_object = {"type": "ipfs", "hash": cid} + + request = Mock() + request.range = None + + _, instance = FilesTypeFactory.validate_and_create(url_object) + download_url = instance.get_download_url() + print(f"got ipfs download url: {download_url}") + assert download_url and download_url.endswith(f"ipfs/{cid}") + + response = instance.build_download_response(request) + assert response.data, f"got no data {response.data}" + + # Assert that Content-Disposition header doesn't leak CID + assert cid not in response.headers["Content-Disposition"] + + +@pytest.mark.unit +def test_build_download_response_arweave(monkeypatch): + """Test the special cases relevant only to Arweave""" + transaction_id = ARWEAVE_TRANSACTION_ID + url_object = { + "type": "arweave", + "transactionId": ARWEAVE_TRANSACTION_ID, + } + + request = Mock() + request.range = None + + _, instance = FilesTypeFactory.validate_and_create(url_object) + assert ( + instance.get_download_url() == f"https://arweave.net/{ARWEAVE_TRANSACTION_ID}" + ) + + response = instance.build_download_response(request) + assert response.status == "200 OK" + assert response.data, f"got no data {response.data}" + + # Assert that Content-Disposition header doesn't leak transaction ID + assert transaction_id not in response.headers["Content-Disposition"] + + # Unset ARWEAVE_GATEWAY + monkeypatch.delenv("ARWEAVE_GATEWAY") + with pytest.raises( + Exception, + match="No ARWEAVE_GATEWAY defined, can not resolve arweave transaction id.", + ): + instance.get_download_url() diff --git a/ocean_provider/validation/test/test_algo_validation.py b/ocean_provider/validation/test/test_algo_validation.py new file mode 100644 index 00000000..e94f9555 --- /dev/null +++ b/ocean_provider/validation/test/test_algo_validation.py @@ -0,0 +1,1012 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import copy +from unittest.mock import Mock, patch + +import pytest +from ocean_provider.utils.asset import Asset +from ocean_provider.utils.services import Service, ServiceType +from ocean_provider.validation.algo import WorkflowValidator +from tests.ddo.ddo_sample1_compute import alg_ddo_dict, ddo_dict +from tests.helpers.compute_helpers import get_future_valid_until +from tests.test_helpers import get_first_service_by_type + +provider_fees_event = Mock() +provider_fees_event.args.providerData = {"environment": "ocean-compute"} +provider_fees_event.args.validUntil = get_future_valid_until() +provider_fees_event.args.providerFeeAmount = 0 + +this_is_a_gist = "https://gist.githubusercontent.com/calina-c/5e8c965962bc0240eab516cb7a180670/raw/6e6cd245c039a9aac0a488857c6927d39eaafe4d/sprintf-py-conversions" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": this_is_a_gist, "type": "url"}], +) +def test_passes_algo_ddo(provider_wallet, consumer_address, web3): + """Tests happy flow of validator with algo ddo.""" + ddo = Asset(ddo_dict) + alg_ddo = Asset(alg_ddo_dict) + sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + + data = { + "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, + "algorithm": { + "documentId": alg_ddo.did, + "serviceId": sa_compute.id, + "transferTxId": "alg_tx_id", + }, + "environment": "ocean-compute", + } + + def side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if ddo.did == args[1]: + return ddo + if alg_ddo.did == args[1]: + return alg_ddo + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is True + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": this_is_a_gist, "type": "url"}], +) +def test_passes_raw(provider_wallet, consumer_address, web3): + """Tests happy flow of validator with raw algo.""" + ddo = Asset(ddo_dict) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + data = { + "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, + "algorithm": { + "serviceId": sa.id, + "meta": { + "rawcode": "console.log('Hello world'!)", + "format": "docker-image", + "version": "0.1", + "container": { + "entrypoint": "node $ALGO", + "image": "oceanprotocol/algo_dockers", + "tag": "python-branin", + "checksum": "sha256:8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", + }, + }, + }, + "environment": "ocean-compute", + } + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is True + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": this_is_a_gist, "type": "url"}], +) +def test_fails_not_an_algo(provider_wallet, consumer_address, web3): + """Tests happy flow of validator with algo ddo.""" + _copy = copy.deepcopy(ddo_dict) + _copy["services"][0]["compute"]["publisherTrustedAlgorithms"] = [] + ddo = Asset(_copy) + did = ddo.did + alg_ddo = Asset(alg_ddo_dict) + sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + + data = { + "dataset": {"documentId": did, "transferTxId": "tx_id", "serviceId": sa.id}, + "algorithm": { + "documentId": did, + "serviceId": sa_compute.id, + "transferTxId": "alg_tx_id", + }, + } + + def side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if ddo.did == args[1]: + return ddo + if alg_ddo.did == args[1]: + return alg_ddo + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "algorithm" + assert validator.message == "not_algo" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": this_is_a_gist, "type": "url"}], +) +def test_fails_meta_issues(provider_wallet, consumer_address, web3): + """Tests happy flow of validator with raw algo.""" + ddo = Asset(ddo_dict) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + """Tests happy flow of validator with algo ddo and raw algo.""" + data = { + "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, + "algorithm": {"serviceId": sa.id, "meta": {}}, + } + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "algorithm" + assert validator.message == "meta_oneof_url_rawcode_remote" + + # algorithmMeta container is empty + data = { + "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, + "algorithm": { + "serviceId": sa.id, + "meta": { + "rawcode": "console.log('Hello world'!)", + "format": "docker-image", + "version": "0.1", + "container": {}, + }, + }, + } + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "algorithm.container" + assert validator.message == "missing_entrypoint_image_checksum" + + # algorithmMeta container is missing image + data = { + "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, + "algorithm": { + "serviceId": sa.id, + "meta": { + "rawcode": "console.log('Hello world'!)", + "format": "docker-image", + "version": "0.1", + "container": {"entrypoint": "node $ALGO", "tag": "10"}, + }, + }, + } + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "algorithm.container" + assert validator.message == "missing_entrypoint_image_checksum" + + # algorithmMeta container checksum does not start with sha256 + data = { + "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, + "algorithm": { + "serviceId": sa.id, + "meta": { + "rawcode": "console.log('Hello world'!)", + "format": "docker-image", + "version": "0.1", + "container": { + "entrypoint": "node $ALGO", + "image": "oceanprotocol/algo_dockers", + "tag": "python-branin", + "checksum": "8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", + }, + }, + }, + } + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "algorithm.container" + assert validator.message == "checksum_prefix" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": this_is_a_gist, "type": "url"}], +) +def test_additional_datasets(provider_wallet, consumer_address, web3): + ddo = Asset(ddo_dict) + alg_ddo = Asset(alg_ddo_dict) + sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + + data = { + "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, + "algorithm": { + "documentId": alg_ddo.did, + "serviceId": sa_compute.id, + "transferTxId": "alg_tx_id", + }, + "additionalDatasets": "", + "environment": "ocean-compute", + } + + def side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if ddo.did == args[1]: + return ddo + if alg_ddo.did == args[1]: + return alg_ddo + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + validator = WorkflowValidator(consumer_address, data) + # basically the same test as test_passes_algo_ddo, additionalDatasets is empty + assert validator.validate() is True + + # additional input is invalid + data = { + "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, + "algorithm": { + "serviceId": sa_compute.id, + "documentId": alg_ddo.did, + "transferTxId": "alg_tx_id", + }, + "additionalDatasets": "i can not be decoded in json!", + } + + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "additional_input" + assert validator.message == "invalid" + + did = ddo.did + + # Missing did in additional input + data = { + "dataset": {"documentId": did, "transferTxId": "tx_id", "serviceId": sa.id}, + "algorithm": { + "serviceId": sa_compute.id, + "documentId": alg_ddo.did, + "transferTxId": "alg_tx_id", + }, + "additionalDatasets": [{"transferTxId": "tx_id", "serviceId": sa.id}], + } + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "datasets[1].documentId" + assert validator.message == "missing" + + # Did is not valid + data = { + "dataset": {"documentId": did, "transferTxId": "tx_id", "serviceId": sa.id}, + "algorithm": { + "serviceId": sa_compute.id, + "documentId": alg_ddo.did, + "transferTxId": "alg_tx_id", + }, + "additionalDatasets": [ + { + "documentId": "i am not a did", + "transferTxId": "tx_id", + "serviceId": sa.id, + } + ], + } + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "datasets[1].documentId" + assert validator.message == "did_not_found" + + data = { + "dataset": {"documentId": did, "transferTxId": "tx_id", "serviceId": sa.id}, + "algorithm": { + "serviceId": sa_compute.id, + "documentId": alg_ddo.did, + "transferTxId": "alg_tx_id", + }, + "additionalDatasets": [ + { + "documentId": did, + "transferTxId": "tx_id", + "serviceId": "some other service id", + } + ], + } + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "datasets[1].serviceId" + assert validator.message == "not_found" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": this_is_a_gist, "type": "url"}], +) +def test_service_not_compute(provider_wallet, consumer_address, web3): + ddo = Asset(ddo_dict) + alg_ddo = Asset(alg_ddo_dict) + sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + + data = { + "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, + "algorithm": { + "serviceId": sa_compute.id, + "documentId": alg_ddo.did, + "transferTxId": "alg_tx_id", + }, + } + + def side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if ddo.did == args[1]: + return ddo + if alg_ddo.did == args[1]: + return alg_ddo + + def other_service(*args, **kwargs): + return Service( + index=0, + service_id="smth_else", + service_type="something else", + datatoken_address="0xa", + service_endpoint="test", + encrypted_files="", + timeout=3600, + ) + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + with patch( + "ocean_provider.utils.asset.Asset.get_service_by_id", + side_effect=other_service, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "dataset.serviceId" + assert validator.message == "service_not_access_compute" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": this_is_a_gist, "type": "url"}], +) +def test_fails_trusted(provider_wallet, consumer_address, web3): + """Tests possible failures of the algo validation.""" + ddo = Asset(ddo_dict) + alg_ddo = Asset(alg_ddo_dict) + sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + + # Additional input has other trusted algs + _copy = copy.deepcopy(ddo_dict) + _copy["id"] = "0xtrust" + _copy["services"][0]["compute"]["publisherTrustedAlgorithms"] = [ + {"did": "0xother", "filesChecksum": "mock", "containerSectionChecksum": "mock"} + ] + trust_ddo = Asset(_copy) + trust_sa = get_first_service_by_type(trust_ddo, ServiceType.COMPUTE) + + def side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo, trust_ddo + if ddo.did == args[1]: + return ddo + if alg_ddo.did == args[1]: + return alg_ddo + if trust_ddo.did == args[1]: + return trust_ddo + + data = { + "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, + "algorithm": { + "serviceId": sa_compute.id, + "documentId": alg_ddo.did, + "transferTxId": "alg_tx_id", + }, + "additionalDatasets": [ + { + "documentId": trust_ddo.did, + "transferTxId": "trust_tx_id", + "serviceId": trust_sa.id, + } + ], + } + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "datasets[1]" + assert validator.message == "not_trusted_algo" + + # Additional input has other trusted publishers + _copy = copy.deepcopy(ddo_dict) + _copy["id"] = "0xtrust" + _copy["services"][0]["compute"]["publisherTrustedAlgorithmPublishers"] = ["0xabc"] + _copy["services"][0]["id"] = "compute_2" + trust_ddo = Asset(_copy) + trust_sa = get_first_service_by_type(trust_ddo, ServiceType.COMPUTE) + + data = { + "dataset": { + "documentId": ddo.did, + "transferTxId": "trust_tx_id", + "serviceId": sa.id, + }, + "algorithm": { + "documentId": alg_ddo.did, + "serviceId": sa_compute.id, + "transferTxId": "alg_tx_id", + }, + "additionalDatasets": [ + { + "documentId": trust_ddo.did, + "transferTxId": "trust_tx_id", + "serviceId": trust_sa.id, + } + ], + } + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "datasets[1]" + assert validator.message == "not_trusted_algo_publisher" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch("ocean_provider.validation.algo.get_service_files_list", return_value=None) +def test_fails_no_asset_url(provider_wallet, consumer_address, web3): + ddo = Asset(ddo_dict) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + data = { + "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, + "algorithm": {"serviceId": sa.id, "meta": {}}, + } + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "dataset.serviceId" + assert validator.message == "compute_services_not_in_same_provider" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch("ocean_provider.validation.algo.validate_order", side_effect=Exception("mock")) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": this_is_a_gist, "type": "url"}], +) +def test_fails_validate_order(provider_wallet, consumer_address, web3): + ddo = Asset(ddo_dict) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + data = { + "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, + "algorithm": {"serviceId": sa.id, "meta": {}}, + } + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "dataset.serviceId" + assert validator.message == "order_invalid" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": this_is_a_gist, "type": "url"}], +) +def test_fails_no_service_id(provider_wallet, consumer_address, web3): + ddo = Asset(ddo_dict) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + data = { + "dataset": {"documentId": ddo.did, "serviceId": None, "transferTxId": "tx_id"}, + "algorithm": {"serviceId": sa.id, "meta": {}}, + } + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", side_effect=[ddo] + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "dataset.serviceId" + assert validator.message == "missing" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": this_is_a_gist, "type": "url"}], +) +@patch( + "ocean_provider.serializers.StageAlgoSerializer.serialize", + new=Mock(return_value={}), +) +def test_fails_invalid_algorithm_dict(provider_wallet, consumer_address, web3): + ddo = Asset(ddo_dict) + alg_ddo = Asset(alg_ddo_dict) + sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + + data = { + "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, + "algorithm": { + "documentId": alg_ddo.did, + "serviceId": sa_compute.id, + "transferTxId": "alg_tx_id", + }, + } + + def side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if ddo.did == args[1]: + return ddo + if alg_ddo.did == args[1]: + return alg_ddo + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "algorithm" + assert validator.message == "did_not_found" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": this_is_a_gist, "type": "url"}], +) +def test_fails_algorithm_in_use(provider_wallet, consumer_address, web3): + ddo = Asset(ddo_dict) + alg_ddo = Asset(alg_ddo_dict) + sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + + data = { + "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, + "algorithm": { + "documentId": alg_ddo.did, + "serviceId": sa_compute.id, + "transferTxId": "alg_tx_id", + }, + } + + def side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if ddo.did == args[1]: + return ddo + if alg_ddo.did == args[1]: + return alg_ddo + + def record_consume_request_side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if ddo.did == args[0]: + return ddo + if alg_ddo.did == args[0]: + raise Exception("I know Python!") + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + with patch( + "ocean_provider.validation.algo.record_consume_request", + side_effect=record_consume_request_side_effect, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "algorithm" + assert validator.message == "in_use_or_not_on_chain" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": this_is_a_gist, "type": "url"}], +) +def test_fail_wrong_algo_type(provider_wallet, consumer_address, web3): + ddo = Asset(ddo_dict) + alg_ddo = Asset(alg_ddo_dict) + sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + + data = { + "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, + "algorithm": { + "serviceId": sa_compute.id, + "documentId": alg_ddo.did, + "transferTxId": "alg_tx_id", + }, + } + + def side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if ddo.did == args[1]: + return ddo + if alg_ddo.did == args[1]: + return alg_ddo + + def other_service(*args, **kwargs): + return Service( + index=0, + service_id=data["algorithm"]["serviceId"], + service_type="access", + datatoken_address="0xa", + service_endpoint="test", + encrypted_files="", + timeout=3600, + ) + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + with patch( + "ocean_provider.utils.asset.Asset.get_service_by_id", + side_effect=other_service, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "dataset.serviceId" + assert validator.message == "main_service_compute" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": this_is_a_gist, "type": "url"}], +) +def test_fail_allow_raw_false(provider_wallet, consumer_address, web3): + ddo = Asset(ddo_dict) + alg_ddo = Asset(alg_ddo_dict) + sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + ddo.services[0].compute_dict["allowRawAlgorithm"] = False + data = { + "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, + "algorithm": { + "serviceId": sa_compute.id, + "meta": { + "rawcode": "console.log('Hello world'!)", + "format": "docker-image", + "version": "0.1", + "container": { + "entrypoint": "node $ALGO", + "image": "oceanprotocol/algo_dockers", + "tag": "python-branin", + "checksum": "sha256:8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", + }, + }, + }, + } + + def side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if ddo.did == args[1]: + return ddo + if alg_ddo.did == args[1]: + return alg_ddo + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "dataset" + assert validator.message == "no_raw_algo_allowed" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +def test_success_multiple_services_types(provider_wallet, consumer_address, web3): + ddo = Asset(ddo_dict) + alg_ddo = Asset(alg_ddo_dict) + sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + + data = { + "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, + "algorithm": { + "serviceId": sa_compute.id, + "meta": { + "rawcode": "console.log('Hello world'!)", + "format": "docker-image", + "version": "0.1", + "container": { + "entrypoint": "node $ALGO", + "image": "oceanprotocol/algo_dockers", + "tag": "python-branin", + "checksum": "sha256:8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", + }, + }, + }, + "additionalDatasets": [ + {"documentId": ddo.did, "transferTxId": "ddo.did", "serviceId": "access_1"} + ], + "environment": "ocean-compute", + } + + def side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if ddo.did == args[1]: + return ddo + if alg_ddo.did == args[1]: + return alg_ddo + + def another_side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if args[0].type == "access": + return None + return [{"url": this_is_a_gist, "type": "url"}] + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + with patch( + "ocean_provider.validation.algo.get_service_files_list", + side_effect=another_side_effect, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is True + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +def test_fail_missing_algo_meta_documentId(provider_wallet, consumer_address, web3): + ddo = Asset(ddo_dict) + alg_ddo = Asset(alg_ddo_dict) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + + data = { + "dataset": {"documentId": ddo.did, "transferTxId": "tx_id", "serviceId": sa.id}, + "algorithm": {"serviceId": None, "meta": None}, + "additionalDatasets": [ + {"documentId": ddo.did, "transferTxId": "ddo.did", "serviceId": "access_1"} + ], + } + + def side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if ddo.did == args[1]: + return ddo + if alg_ddo.did == args[1]: + return alg_ddo + + def another_side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if args[0].type == "access": + return None + return [{"url": this_is_a_gist, "type": "url"}] + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + with patch( + "ocean_provider.validation.algo.get_service_files_list", + side_effect=another_side_effect, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "algorithm" + assert validator.message == "missing_meta_documentId" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": this_is_a_gist, "type": "url"}], +) +def test_fee_amount_not_paid(provider_wallet, consumer_address, web3): + """Tests happy flow of validator with algo ddo.""" + ddo = Asset(ddo_dict) + alg_ddo = Asset(alg_ddo_dict) + sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + + data = { + "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, + "algorithm": { + "documentId": alg_ddo.did, + "serviceId": sa_compute.id, + "transferTxId": "alg_tx_id", + }, + } + + def side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if ddo.did == args[1]: + return ddo + if alg_ddo.did == args[1]: + return alg_ddo + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + with patch("ocean_provider.validation.algo.get_provider_fee_amount") as mock: + mock.return_value = 10**18 + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "order" + assert validator.message == "fees_not_paid" + + +@pytest.mark.unit +@patch("ocean_provider.validation.algo.check_asset_consumable", return_value=(True, "")) +@patch( + "ocean_provider.validation.algo.validate_order", + return_value=(None, None, provider_fees_event, None), +) +@patch( + "ocean_provider.validation.algo.get_service_files_list", + return_value=[{"url": "http://some.broken.url", "type": "url"}], +) +def test_algo_ddo_file_broken(provider_wallet, consumer_address, web3): + """Tests case where algo checksum can not be computed.""" + ddo = Asset(ddo_dict) + alg_ddo = Asset(alg_ddo_dict) + sa_compute = get_first_service_by_type(alg_ddo, ServiceType.ACCESS) + sa = get_first_service_by_type(ddo, ServiceType.COMPUTE) + + data = { + "dataset": {"documentId": ddo.did, "serviceId": sa.id, "transferTxId": "tx_id"}, + "algorithm": { + "documentId": alg_ddo.did, + "serviceId": sa_compute.id, + "transferTxId": "alg_tx_id", + }, + "environment": "ocean-compute", + } + + def side_effect(*args, **kwargs): + nonlocal ddo, alg_ddo + if ddo.did == args[1]: + return ddo + if alg_ddo.did == args[1]: + return alg_ddo + + with patch( + "ocean_provider.validation.algo.get_asset_from_metadatastore", + side_effect=side_effect, + ): + validator = WorkflowValidator(consumer_address, data) + assert validator.validate() is False + assert validator.resource == "algorithm" + assert validator.message == "file_unavailable" diff --git a/tests/test_RBAC.py b/tests/test_RBAC.py new file mode 100644 index 00000000..791e1e26 --- /dev/null +++ b/tests/test_RBAC.py @@ -0,0 +1,251 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import copy +import json +from datetime import datetime + +import pytest +from ocean_provider.constants import BaseURLs +from ocean_provider.exceptions import RequestNotFound +from ocean_provider.utils.accounts import sign_message +from ocean_provider.utils.asset import Asset +from ocean_provider.utils.services import Service, ServiceType +from ocean_provider.validation.provider_requests import RBACValidator +from tests.ddo.ddo_sample1_v4 import json_dict as ddo_sample1_v4 +from tests.ddo.ddo_sample_algorithm_v4 import algorithm_ddo_sample +from tests.helpers.compute_helpers import get_compute_signature +from tests.helpers.ddo_dict_builders import get_compute_service +from tests.test_helpers import get_first_service_by_type + + +@pytest.mark.unit +def test_invalid_request_name(): + req = dict() + with pytest.raises(RequestNotFound) as err: + RBACValidator(request_name="MyRequest", request=req) + assert err.value.args[0] == "Request name is not valid!" + + +encrypt_endpoint = BaseURLs.SERVICES_URL + "/encrypt" + + +@pytest.mark.unit +def test_encrypt_request_payload(consumer_wallet, publisher_wallet, monkeypatch): + monkeypatch.setenv("PRIVATE_PROVIDER", "1") + document = { + "url": "http://localhost:8030" + encrypt_endpoint, + "index": 0, + "checksum": "foo_checksum", + "contentLength": "4535431", + "contentType": "text/csv", + "encoding": "UTF-8", + "compression": "zip", + } + req = { + "data": json.dumps(document), + "publisherAddress": publisher_wallet.address, + } + validator = RBACValidator(request_name="EncryptRequest", request=req) + payload = validator.build_payload() + + assert validator.request == req + assert payload["eventType"] == "encryptUrl" + assert payload["providerAccess"] == "private" + assert payload["component"] == "provider" + assert payload["credentials"] == { + "type": "address", + "value": publisher_wallet.address, + } + + +@pytest.mark.unit +def test_wrong_encrypt_request_payload(consumer_wallet, publisher_wallet, monkeypatch): + monkeypatch.setenv("PRIVATE_PROVIDER", "1") + req = { + "publisherAddress": publisher_wallet.address, + } + validator = RBACValidator(request_name="EncryptRequest", request=req) + with pytest.raises(Exception) as err: + validator.build_payload() + assert err.value.args[0] == "Data to encrypt is empty." + + +@pytest.mark.unit +def test_initialize_request_payload( + client, publisher_wallet, consumer_wallet, provider_address, web3 +): + asset = Asset(ddo_sample1_v4) + service = get_first_service_by_type(asset, ServiceType.ACCESS) + + req = { + "documentId": asset.did, + "serviceId": service.id, + "datatoken": service.datatoken_address, + "consumerAddress": consumer_wallet.address, + } + + validator = RBACValidator(request_name="InitializeRequest", request=req) + payload = validator.build_payload() + assert validator.request == req + assert payload["eventType"] == "initialize" + assert payload["providerAccess"] == "public" + assert payload["component"] == "provider" + assert payload["credentials"] == { + "type": "address", + "value": consumer_wallet.address, + } + assert payload["dids"][0]["did"] == asset.did + assert payload["dids"][0]["serviceId"] == service.id + + +@pytest.mark.unit +def test_access_request_payload( + client, publisher_wallet, consumer_wallet, provider_address, web3 +): + asset = Asset(ddo_sample1_v4) + service = get_first_service_by_type(asset, ServiceType.ACCESS) + + req = { + "documentId": asset.did, + "serviceId": service.id, + "datatoken": service.datatoken_address, + "consumerAddress": consumer_wallet.address, + "transferTxId": "0xsometx", + "fileIndex": 0, + } + + nonce = str(datetime.utcnow().timestamp()) + _msg = f"{asset.did}{nonce}" + req["signature"] = sign_message(_msg, consumer_wallet) + req["nonce"] = nonce + + validator = RBACValidator(request_name="DownloadRequest", request=req) + payload = validator.build_payload() + assert validator.request == req + assert payload["eventType"] == "access" + assert payload["providerAccess"] == "public" + assert payload["component"] == "provider" + assert payload["credentials"] == { + "type": "address", + "value": consumer_wallet.address, + } + assert payload["dids"][0]["did"] == asset.did + assert payload["dids"][0]["serviceId"] == service.id + + +@pytest.mark.unit +def test_compute_payload_without_additional_inputs( + client, publisher_wallet, consumer_wallet, provider_address +): + ddo_sample1 = copy.deepcopy(ddo_sample1_v4) + ddo = Asset(ddo_sample1) + ddo.services.append( + Service.from_json(1, get_compute_service(None, None, "0x0", "0x0")) + ) + + alg_ddo = Asset(algorithm_ddo_sample) + sa = get_first_service_by_type(alg_ddo, ServiceType.COMPUTE) + sa_compute = get_first_service_by_type(ddo, ServiceType.COMPUTE) + + nonce, signature = get_compute_signature(client, consumer_wallet, ddo.did) + req = { + "dataset": { + "documentId": ddo.did, + "serviceId": sa.id, + "transferTxId": "0xsometx", + }, + "algorithm": { + "serviceId": sa_compute.id, + "documentId": alg_ddo.did, + "transferTxId": "0xsomeothertx", + }, + "signature": signature, + "nonce": nonce, + "consumerAddress": consumer_wallet.address, + } + + validator = RBACValidator(request_name="ComputeStartRequest", request=req) + payload = validator.build_payload() + assert validator.request == req + assert payload["eventType"] == "compute" + assert payload["providerAccess"] == "public" + assert payload["component"] == "provider" + assert payload["credentials"] == { + "type": "address", + "value": consumer_wallet.address, + } + assert payload["dids"][0]["did"] == ddo.did + assert payload["dids"][0]["serviceId"] == sa.id + assert payload["algos"][0]["did"] == alg_ddo.did + assert payload["algos"][0]["serviceId"] == sa_compute.id + + +@pytest.mark.unit +def test_compute_request_payload( + client, publisher_wallet, consumer_wallet, provider_address +): + ddo_sample1 = copy.deepcopy(ddo_sample1_v4) + ddo = Asset(ddo_sample1) + ddo.services.append( + Service.from_json( + 1, + get_compute_service( + None, + None, + "0x0000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000", + ), + ) + ) + + alg_ddo = Asset(algorithm_ddo_sample) + sa = get_first_service_by_type(alg_ddo, ServiceType.COMPUTE) + sa_compute = get_first_service_by_type(ddo, ServiceType.COMPUTE) + + ddo_sample2 = copy.deepcopy(ddo_sample1_v4) + ddo_sample2["did"] = "0xsomeotherdid" + ddo2 = Asset(ddo_sample2) + sa2 = get_first_service_by_type(ddo2, ServiceType.ACCESS) + + nonce, signature = get_compute_signature(client, consumer_wallet, ddo.did) + + req = { + "dataset": { + "documentId": ddo.did, + "serviceId": sa.id, + "transferTxId": "0xsometx", + }, + "algorithm": { + "documentId": alg_ddo.did, + "transferTxId": "0xsomeothertx", + "serviceId": sa_compute.id, + }, + "signature": signature, + "nonce": nonce, + "consumerAddress": consumer_wallet.address, + "additionalDatasets": [ + { + "documentId": ddo2.did, + "transferTxId": "0xsomeevenothertx", + "serviceId": sa2.id, + } + ], + } + validator = RBACValidator(request_name="ComputeRequest", request=req) + payload = validator.build_payload() + assert validator.request == req + assert payload["eventType"] == "compute" + assert payload["providerAccess"] == "public" + assert payload["component"] == "provider" + assert payload["credentials"] == { + "type": "address", + "value": consumer_wallet.address, + } + assert payload["dids"][0]["did"] == ddo.did + assert payload["dids"][0]["serviceId"] == sa.id + assert payload["algos"][0]["did"] == alg_ddo.did + assert payload["algos"][0]["serviceId"] == sa_compute.id + assert payload["additionalDids"][0]["did"] == ddo2.did + assert payload["additionalDids"][0]["serviceId"] == sa2.id diff --git a/tests/test_graphql.py b/tests/test_graphql.py new file mode 100644 index 00000000..89556af9 --- /dev/null +++ b/tests/test_graphql.py @@ -0,0 +1,143 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import json +from datetime import datetime + +import pytest +from ocean_provider.constants import BaseURLs +from ocean_provider.utils.accounts import sign_message +from ocean_provider.utils.provider_fees import get_provider_fees +from ocean_provider.utils.services import ServiceType +from tests.test_helpers import ( + get_first_service_by_type, + get_registered_asset, + mint_100_datatokens, + start_order, +) + + +@pytest.mark.integration +def test_download_graphql_asset(client, publisher_wallet, consumer_wallet, web3): + unencrypted_files_list = [ + { + "type": "graphql", + "url": "http://172.15.0.15:8000/subgraphs/name/oceanprotocol/ocean-subgraph", + "query": """ + query{ + nfts(orderBy: createdTimestamp,orderDirection:desc){ + id + symbol + createdTimestamp + } + } + """, + } + ] + asset = get_registered_asset( + publisher_wallet, unencrypted_files_list=unencrypted_files_list + ) + service = get_first_service_by_type(asset, ServiceType.ACCESS) + mint_100_datatokens( + web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + ) + tx_id, _ = start_order( + web3, + service.datatoken_address, + consumer_wallet.address, + service.index, + get_provider_fees(asset, service, consumer_wallet.address, 0), + consumer_wallet, + ) + + payload = { + "documentId": asset.did, + "serviceId": service.id, + "consumerAddress": consumer_wallet.address, + "transferTxId": tx_id, + "fileIndex": 0, + } + + download_endpoint = BaseURLs.SERVICES_URL + "/download" + + # Consume using url index and signature (with nonce) + nonce = str(datetime.utcnow().timestamp()) + _msg = f"{asset.did}{nonce}" + payload["signature"] = sign_message(_msg, consumer_wallet) + payload["nonce"] = nonce + response = client.get( + service.service_endpoint + download_endpoint, query_string=payload + ) + assert response.status_code == 200, f"{response.data}" + + +@pytest.mark.integration +def test_download_graphql_asset_with_userdata( + client, publisher_wallet, consumer_wallet, web3 +): + unencrypted_files_list = [ + { + "type": "graphql", + "url": "http://172.15.0.15:8000/subgraphs/name/oceanprotocol/ocean-subgraph", + "query": """ + query nfts($nftAddress: String){ + nfts(where: {id:$nftAddress},orderBy: createdTimestamp,orderDirection:desc){ + id + symbol + createdTimestamp + } + } + """, + } + ] + asset = get_registered_asset( + publisher_wallet, + unencrypted_files_list=unencrypted_files_list, + custom_userdata=[ + { + "name": "nftAddress", + "type": "text", + "label": "nftAddress", + "required": True, + "description": "Nft to search for", + } + ], + ) + service = get_first_service_by_type(asset, ServiceType.ACCESS) + mint_100_datatokens( + web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + ) + tx_id, _ = start_order( + web3, + service.datatoken_address, + consumer_wallet.address, + service.index, + get_provider_fees(asset, service, consumer_wallet.address, 0), + consumer_wallet, + ) + + payload = { + "documentId": asset.did, + "serviceId": service.id, + "consumerAddress": consumer_wallet.address, + "transferTxId": tx_id, + "fileIndex": 0, + "userdata": json.dumps({"nftAddress": asset.nftAddress.lower()}), + } + + download_endpoint = BaseURLs.SERVICES_URL + "/download" + # Consume using url index and signature (with nonce) + nonce = str(datetime.utcnow().timestamp()) + _msg = f"{asset.did}{nonce}" + payload["signature"] = sign_message(_msg, consumer_wallet) + payload["nonce"] = nonce + response = client.get( + service.service_endpoint + download_endpoint, query_string=payload + ) + assert response.status_code == 200, f"{response.data}" + reply = json.loads(response.data) + assert ( + len(reply["data"]["nfts"]) == 1 + ) # make sure our parametrized query works, otherwise we will get a lot of nfts + assert reply["data"]["nfts"][0]["id"] == asset.nftAddress.lower() diff --git a/tests/test_proof.py b/tests/test_proof.py new file mode 100644 index 00000000..3e29c37d --- /dev/null +++ b/tests/test_proof.py @@ -0,0 +1,83 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import json +from datetime import datetime +from unittest.mock import Mock, patch + +import pytest +from ocean_provider.utils.accounts import sign_message +from ocean_provider.utils.proof import send_proof +from ocean_provider.utils.provider_fees import get_provider_fees +from ocean_provider.utils.services import ServiceType +from requests.models import Response +from tests.test_helpers import ( + get_first_service_by_type, + get_registered_asset, + mint_100_datatokens, + start_order, +) + + +@pytest.mark.unit +def test_no_proof_setup(client): + assert send_proof(None, None, None, None, None, None, None) is None + + +@pytest.mark.unit +def test_http_proof(client, monkeypatch): + monkeypatch.setenv("USE_HTTP_PROOF", "http://test.com") + provider_data = json.dumps({"test_data": "test_value"}) + + with patch("requests.post") as mock: + response = Mock(spec=Response) + response.json.return_value = {"a valid response": ""} + response.status_code = 200 + mock.return_value = response + + assert send_proof(8996, b"1", provider_data, None, None, None, None) is True + + mock.assert_called_once() + + with patch("requests.post") as mock: + mock.side_effect = Exception("Boom!") + + assert send_proof(8996, b"1", provider_data, None, None, None, None) is None + + mock.assert_called_once() + + +@pytest.mark.integration +def test_chain_proof(client, monkeypatch, web3, publisher_wallet, consumer_wallet): + monkeypatch.setenv("USE_CHAIN_PROOF", "1") + provider_data = json.dumps({"test_data": "test_value"}) + + asset = get_registered_asset(publisher_wallet) + service = get_first_service_by_type(asset, ServiceType.ACCESS) + mint_100_datatokens( + web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + ) + tx_id, receipt = start_order( + web3, + service.datatoken_address, + consumer_wallet.address, + service.index, + get_provider_fees(asset, service, consumer_wallet.address, 0), + consumer_wallet, + ) + + nonce = str(datetime.utcnow().timestamp()) + + consumer_data = _msg = f"{asset.did}{nonce}" + signature = sign_message(_msg, consumer_wallet) + + assert send_proof( + 8996, + receipt.transactionHash, + provider_data, + consumer_data, + signature, + consumer_wallet.address, + service.datatoken_address, + ) diff --git a/tests/test_routes.py b/tests/test_routes.py new file mode 100644 index 00000000..5bc9891a --- /dev/null +++ b/tests/test_routes.py @@ -0,0 +1,140 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import json +from datetime import datetime + +import pytest +from ocean_provider.constants import BaseURLs +from ocean_provider.run import get_services_endpoints +from ocean_provider.user_nonce import get_nonce, update_nonce +from ocean_provider.utils.accounts import sign_message +from tests.test_helpers import get_registered_asset + + +@pytest.mark.unit +def test_expose_endpoints(client): + get_response = client.get("/") + result = get_response.get_json() + services_endpoints = get_services_endpoints() + assert "serviceEndpoints" in result + assert "software" in result + assert "version" in result + assert "chainIds" in result + assert "providerAddresses" in result + assert get_response.status == "200 OK" + assert len(result["serviceEndpoints"]) == len(services_endpoints) + + +@pytest.mark.unit +def test_spec(client): + response = client.get("/spec") + assert response.status == "200 OK" + + +@pytest.mark.unit +def test_root(client): + response = client.get("/") + assert response.status == "200 OK" + + +@pytest.mark.unit +def test_invalid_endpoint(client, caplog): + response = client.get("invalid/endpoint", query_string={"hello": "world"}) + assert response.status == "404 NOT FOUND" + # TODO: Capture and verify INFO log from log_incoming_request using caplog + + +@pytest.mark.unit +def test_empty_payload_encryption(client): + encrypt_endpoint = BaseURLs.SERVICES_URL + "/encrypt" + publish = client.post(encrypt_endpoint, data=None, content_type="application/json") + assert publish.status_code == 400 + + +@pytest.mark.integration +def test_encrypt_endpoint(client, provider_wallet, publisher_wallet): + asset = get_registered_asset(publisher_wallet) + files_list_str = '["https://raw.githubusercontent.com/tbertinmahieux/MSongsDB/master/Tasks_Demos/CoverSongs/shs_dataset_test.txt"]' + + nonce = datetime.utcnow().timestamp() + msg = f"{asset.did}{nonce}" + signature = sign_message(msg, provider_wallet) + + payload = { + "documentId": asset.did, + "signature": signature, + "document": files_list_str, + "publisherAddress": provider_wallet.address, + } + encrypt_endpoint = BaseURLs.SERVICES_URL + "/encrypt?chainId=8996" + response = client.post( + encrypt_endpoint, json=payload, content_type="application/octet-stream" + ) + assert response.content_type == "text/plain" + assert response.data + assert response.status_code == 201 + + +@pytest.mark.unit +def test_get_nonce(client, publisher_wallet): + address = publisher_wallet.address + # Ensure address exists in database + update_nonce(address, datetime.utcnow().timestamp()) + + endpoint = BaseURLs.SERVICES_URL + "/nonce" + response = client.get( + endpoint + "?" + f"&userAddress={address}", content_type="application/json" + ) + assert ( + response.status_code == 200 and response.data + ), f"get nonce endpoint failed: response status {response.status}, data {response.data}" + + value = response.json if response.json else json.loads(response.data) + assert value["nonce"] == get_nonce(address) + + +@pytest.mark.unit +def test_validate_container(client): + endpoint = BaseURLs.SERVICES_URL + "/validateContainer" + + valid_payload = { + "entrypoint": "node $ALGO", + "image": "oceanprotocol/algo_dockers", + "tag": "python-branin", + "checksum": "sha256:8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", + } + + response = client.post(endpoint, json=valid_payload) + assert response.status_code == 200 + + invalid_payload = { + "entrypoint": "node $ALGO", + "checksum": "sha256:8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", + } + + response = client.post(endpoint, json=invalid_payload) + assert response.status_code == 400 + assert response.json["error"] == "missing_entrypoint_image_checksum" + + another_valid_payload = { + "entrypoint": "node $ALGO", + "image": "node", # missing library prefix + "tag": "latest", + "checksum": "sha256:5c918be3339c8460d13a38e2fc7c027af1cab382b36561f90d3c03342fa866a4", + } + response = client.post(endpoint, json=another_valid_payload) + assert response.status_code == 200 + + invalid_payload = { + "entrypoint": "node $ALGO", + "image": "doesntexist", + "tag": "blabla", + # doesn't start with sha256: + "checksum": "8221d20c1c16491d7d56b9657ea09082c0ee4a8ab1a6621fa720da58b09580e4", + } + + response = client.post(endpoint, json=invalid_payload) + assert response.status_code == 400 + assert response.json["error"] == "checksum_prefix" diff --git a/tests/test_smartcontract.py b/tests/test_smartcontract.py new file mode 100644 index 00000000..d25b7f96 --- /dev/null +++ b/tests/test_smartcontract.py @@ -0,0 +1,205 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import json +import os +from datetime import datetime + +import pytest +from ocean_provider.constants import BaseURLs +from ocean_provider.utils.accounts import sign_message +from ocean_provider.utils.address import get_contract_address +from ocean_provider.utils.provider_fees import get_provider_fees +from ocean_provider.utils.services import ServiceType +from tests.test_helpers import ( + get_first_service_by_type, + get_registered_asset, + mint_100_datatokens, + start_order, +) + + +@pytest.mark.integration +def test_download_smartcontract_asset(client, publisher_wallet, consumer_wallet, web3): + # publish asset, that calls Router's swapOceanFee function (does not need params) + router_address = get_contract_address(os.getenv("ADDRESS_FILE"), "Router", 8996) + abi = { + "inputs": [], + "name": "swapOceanFee", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + } + unencrypted_files_list = [ + { + "type": "smartcontract", + "address": router_address, + "abi": abi, + "chainId": 8996, + } + ] + asset = get_registered_asset( + publisher_wallet, unencrypted_files_list=unencrypted_files_list + ) + service = get_first_service_by_type(asset, ServiceType.ACCESS) + mint_100_datatokens( + web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + ) + tx_id, _ = start_order( + web3, + service.datatoken_address, + consumer_wallet.address, + service.index, + get_provider_fees(asset, service, consumer_wallet.address, 0), + consumer_wallet, + ) + + payload = { + "documentId": asset.did, + "serviceId": service.id, + "consumerAddress": consumer_wallet.address, + "transferTxId": tx_id, + "fileIndex": 0, + } + + download_endpoint = BaseURLs.SERVICES_URL + "/download" + + # Consume using url index and signature (with nonce) + nonce = str(datetime.utcnow().timestamp()) + _msg = f"{asset.did}{nonce}" + payload["signature"] = sign_message(_msg, consumer_wallet) + payload["nonce"] = nonce + response = client.get( + service.service_endpoint + download_endpoint, query_string=payload + ) + assert response.status_code == 200, f"{response.data}" + + +@pytest.mark.integration +def test_download_smartcontract_asset_with_userdata( + client, publisher_wallet, consumer_wallet, web3 +): + # publish asset, that calls Router's getOPCFee for a provided baseToken userdata + router_address = get_contract_address(os.getenv("ADDRESS_FILE"), "Router", 8996) + abi = { + "inputs": [{"internalType": "address", "name": "baseToken", "type": "address"}], + "name": "getOPCFee", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + } + unencrypted_files_list = [ + { + "type": "smartcontract", + "address": router_address, + "abi": abi, + "chainId": 8996, + } + ] + asset = get_registered_asset( + publisher_wallet, + unencrypted_files_list=unencrypted_files_list, + custom_userdata=[ + { + "name": "baseToken", + "type": "text", + "label": "baseToken", + "required": True, + "description": "baseToken to check for fee", + } + ], + ) + service = get_first_service_by_type(asset, ServiceType.ACCESS) + mint_100_datatokens( + web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + ) + tx_id, _ = start_order( + web3, + service.datatoken_address, + consumer_wallet.address, + service.index, + get_provider_fees(asset, service, consumer_wallet.address, 0), + consumer_wallet, + ) + + payload = { + "documentId": asset.did, + "serviceId": service.id, + "consumerAddress": consumer_wallet.address, + "transferTxId": tx_id, + "fileIndex": 0, + "userdata": json.dumps({"baseToken": asset.nftAddress.lower()}), + } + + download_endpoint = BaseURLs.SERVICES_URL + "/download" + # Consume using url index and signature (with nonce) + nonce = str(datetime.utcnow().timestamp()) + _msg = f"{asset.did}{nonce}" + payload["signature"] = sign_message(_msg, consumer_wallet) + payload["nonce"] = nonce + response = client.get( + service.service_endpoint + download_endpoint, query_string=payload + ) + assert response.status_code == 200, f"{response.data}" + + +@pytest.mark.integration +def test_download_smartcontract_asset_with_pure_function( + client, publisher_wallet, consumer_wallet, web3 +): + # publish dummy asset, to get a datatoken deployed + dummy_asset = get_registered_asset(publisher_wallet) + dummy_service = get_first_service_by_type(dummy_asset, ServiceType.ACCESS) + # create abi for getId + abi = { + "inputs": [], + "name": "getId", + "outputs": [{"internalType": "uint8", "name": "", "type": "uint8"}], + "stateMutability": "pure", + "type": "function", + } + + unencrypted_files_list = [ + { + "type": "smartcontract", + "address": dummy_service.datatoken_address, + "abi": abi, + "chainId": 8996, + } + ] + asset = get_registered_asset( + publisher_wallet, unencrypted_files_list=unencrypted_files_list + ) + service = get_first_service_by_type(asset, ServiceType.ACCESS) + mint_100_datatokens( + web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + ) + tx_id, _ = start_order( + web3, + service.datatoken_address, + consumer_wallet.address, + service.index, + get_provider_fees(asset, service, consumer_wallet.address, 0), + consumer_wallet, + ) + + payload = { + "documentId": asset.did, + "serviceId": service.id, + "consumerAddress": consumer_wallet.address, + "transferTxId": tx_id, + "fileIndex": 0, + } + + download_endpoint = BaseURLs.SERVICES_URL + "/download" + + # Consume using url index and signature (with nonce) + nonce = str(datetime.utcnow().timestamp()) + _msg = f"{asset.did}{nonce}" + payload["signature"] = sign_message(_msg, consumer_wallet) + payload["nonce"] = nonce + response = client.get( + service.service_endpoint + download_endpoint, query_string=payload + ) + assert response.status_code == 200, f"{response.data}" From 242639a60f0dc708487bffc81670a0d909b2d0c7 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 4 Apr 2023 14:09:14 +0300 Subject: [PATCH 57/83] Test only download. --- .github/workflows/pytest.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 886c0ba8..dc5c1bed 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -54,9 +54,7 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - coverage run --source ocean_provider -m pytest - coverage report - coverage xml + pytest tests/test_download - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 if: ${{ failure() }} From a409d23f5bff3a116a99f86871444e9e91597f05 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 4 Apr 2023 14:10:56 +0300 Subject: [PATCH 58/83] Fixed workflow. --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index dc5c1bed..e8552c96 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -54,7 +54,7 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - pytest tests/test_download + pytest tests/test_download.py - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 if: ${{ failure() }} From d4ef2a7f1d4403b5c96aa6c1eb401cfa463968a9 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 4 Apr 2023 14:57:47 +0300 Subject: [PATCH 59/83] Test only encryption. --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index e8552c96..a8993eba 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -54,7 +54,7 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - pytest tests/test_download.py + pytest tests/test_encryption.py - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 if: ${{ failure() }} From ba6c343809cd049cc6bec57b5fba72218e4b315e Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 4 Apr 2023 15:06:45 +0300 Subject: [PATCH 60/83] Test only fileinfo. --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index a8993eba..dfe954b6 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -54,7 +54,7 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - pytest tests/test_encryption.py + pytest tests/test_fileinfo.py - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 if: ${{ failure() }} From 1eb24705c39173b5243417c5f8ded27713193cc2 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 4 Apr 2023 16:48:41 +0300 Subject: [PATCH 61/83] Test only graphql tests. --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index dfe954b6..b0fbb242 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -54,7 +54,7 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - pytest tests/test_fileinfo.py + pytest tests/test_graphql.py - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 if: ${{ failure() }} From 8213018f33f9bcdd24ad8438452f4d7258fd88c7 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 4 Apr 2023 17:58:38 +0300 Subject: [PATCH 62/83] Deleted graphql tests. UpTest only initialize. --- .github/workflows/pytest.yml | 2 +- tests/test_graphql.py | 143 ----------------------------------- 2 files changed, 1 insertion(+), 144 deletions(-) delete mode 100644 tests/test_graphql.py diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index b0fbb242..6387695b 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -54,7 +54,7 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - pytest tests/test_graphql.py + pytest tests/test_initialize.py - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 if: ${{ failure() }} diff --git a/tests/test_graphql.py b/tests/test_graphql.py deleted file mode 100644 index 89556af9..00000000 --- a/tests/test_graphql.py +++ /dev/null @@ -1,143 +0,0 @@ -# -# Copyright 2023 Ocean Protocol Foundation -# SPDX-License-Identifier: Apache-2.0 -# -import json -from datetime import datetime - -import pytest -from ocean_provider.constants import BaseURLs -from ocean_provider.utils.accounts import sign_message -from ocean_provider.utils.provider_fees import get_provider_fees -from ocean_provider.utils.services import ServiceType -from tests.test_helpers import ( - get_first_service_by_type, - get_registered_asset, - mint_100_datatokens, - start_order, -) - - -@pytest.mark.integration -def test_download_graphql_asset(client, publisher_wallet, consumer_wallet, web3): - unencrypted_files_list = [ - { - "type": "graphql", - "url": "http://172.15.0.15:8000/subgraphs/name/oceanprotocol/ocean-subgraph", - "query": """ - query{ - nfts(orderBy: createdTimestamp,orderDirection:desc){ - id - symbol - createdTimestamp - } - } - """, - } - ] - asset = get_registered_asset( - publisher_wallet, unencrypted_files_list=unencrypted_files_list - ) - service = get_first_service_by_type(asset, ServiceType.ACCESS) - mint_100_datatokens( - web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - ) - tx_id, _ = start_order( - web3, - service.datatoken_address, - consumer_wallet.address, - service.index, - get_provider_fees(asset, service, consumer_wallet.address, 0), - consumer_wallet, - ) - - payload = { - "documentId": asset.did, - "serviceId": service.id, - "consumerAddress": consumer_wallet.address, - "transferTxId": tx_id, - "fileIndex": 0, - } - - download_endpoint = BaseURLs.SERVICES_URL + "/download" - - # Consume using url index and signature (with nonce) - nonce = str(datetime.utcnow().timestamp()) - _msg = f"{asset.did}{nonce}" - payload["signature"] = sign_message(_msg, consumer_wallet) - payload["nonce"] = nonce - response = client.get( - service.service_endpoint + download_endpoint, query_string=payload - ) - assert response.status_code == 200, f"{response.data}" - - -@pytest.mark.integration -def test_download_graphql_asset_with_userdata( - client, publisher_wallet, consumer_wallet, web3 -): - unencrypted_files_list = [ - { - "type": "graphql", - "url": "http://172.15.0.15:8000/subgraphs/name/oceanprotocol/ocean-subgraph", - "query": """ - query nfts($nftAddress: String){ - nfts(where: {id:$nftAddress},orderBy: createdTimestamp,orderDirection:desc){ - id - symbol - createdTimestamp - } - } - """, - } - ] - asset = get_registered_asset( - publisher_wallet, - unencrypted_files_list=unencrypted_files_list, - custom_userdata=[ - { - "name": "nftAddress", - "type": "text", - "label": "nftAddress", - "required": True, - "description": "Nft to search for", - } - ], - ) - service = get_first_service_by_type(asset, ServiceType.ACCESS) - mint_100_datatokens( - web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - ) - tx_id, _ = start_order( - web3, - service.datatoken_address, - consumer_wallet.address, - service.index, - get_provider_fees(asset, service, consumer_wallet.address, 0), - consumer_wallet, - ) - - payload = { - "documentId": asset.did, - "serviceId": service.id, - "consumerAddress": consumer_wallet.address, - "transferTxId": tx_id, - "fileIndex": 0, - "userdata": json.dumps({"nftAddress": asset.nftAddress.lower()}), - } - - download_endpoint = BaseURLs.SERVICES_URL + "/download" - # Consume using url index and signature (with nonce) - nonce = str(datetime.utcnow().timestamp()) - _msg = f"{asset.did}{nonce}" - payload["signature"] = sign_message(_msg, consumer_wallet) - payload["nonce"] = nonce - response = client.get( - service.service_endpoint + download_endpoint, query_string=payload - ) - assert response.status_code == 200, f"{response.data}" - reply = json.loads(response.data) - assert ( - len(reply["data"]["nfts"]) == 1 - ) # make sure our parametrized query works, otherwise we will get a lot of nfts - assert reply["data"]["nfts"][0]["id"] == asset.nftAddress.lower() From 7c1648465b00dcb605f43a00a1bb52693ea4b33c Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Wed, 5 Apr 2023 11:53:07 +0300 Subject: [PATCH 63/83] Added socket test. Added timestamps to each test. --- tests/test_initialize.py | 72 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) diff --git a/tests/test_initialize.py b/tests/test_initialize.py index 6cd037d2..df97df9f 100644 --- a/tests/test_initialize.py +++ b/tests/test_initialize.py @@ -5,6 +5,7 @@ import json import logging import time +from datetime import datetime from unittest.mock import patch import pytest @@ -35,6 +36,8 @@ @pytest.mark.integration def test_initialize_on_bad_url(client, publisher_wallet, consumer_wallet, web3): + test1_start_timestamp = datetime.now() + logger.info(f"test1_start_timestamp: {test1_start_timestamp}") asset = get_dataset_with_invalid_url_ddo(client, publisher_wallet) service = get_first_service_by_type(asset, ServiceType.ACCESS) @@ -47,10 +50,15 @@ def test_initialize_on_bad_url(client, publisher_wallet, consumer_wallet, web3): ) assert "error" in response.json assert "Asset URL not found, not available or invalid." in response.json["error"] + test1_end_timestamp = datetime.now() + logger.info(f"test1_end_timestamp: {test1_end_timestamp}") + logger.info(f"test1 duration: {test1_end_timestamp - test1_start_timestamp}") @pytest.mark.integration def test_initialize_on_ipfs_url(client, publisher_wallet, consumer_wallet, web3): + test2_start_timestamp = datetime.now() + logger.info(f"test2_start_timestamp: {test2_start_timestamp}") asset = get_dataset_with_ipfs_url_ddo(client, publisher_wallet) service = get_first_service_by_type(asset, ServiceType.ACCESS) mint_100_datatokens( @@ -61,10 +69,15 @@ def test_initialize_on_ipfs_url(client, publisher_wallet, consumer_wallet, web3) ) assert datatoken == service.datatoken_address + test2_end_timestamp = datetime.now() + logger.info(f"test2_end_timestamp: {test2_end_timestamp}") + logger.info(f"test2 duration: {test2_end_timestamp - test2_start_timestamp}") @pytest.mark.integration def test_initialize_on_disabled_asset(client, publisher_wallet, consumer_wallet, web3): + test3_start_timestamp = datetime.now() + logger.info(f"test3_start_timestamp: {test3_start_timestamp}") asset, real_asset = get_dataset_ddo_disabled(client, publisher_wallet) assert real_asset service = get_first_service_by_type(asset, ServiceType.ACCESS) @@ -78,10 +91,15 @@ def test_initialize_on_disabled_asset(client, publisher_wallet, consumer_wallet, ) assert "error" in response.json assert response.json["error"] == "Asset malformed or disabled." + test3_end_timestamp = datetime.now() + logger.info(f"test3_end_timestamp: {test3_end_timestamp}") + logger.info(f"test3 duration: {test3_end_timestamp - test3_start_timestamp}") @pytest.mark.integration def test_initialize_on_unlisted_asset(client, publisher_wallet, consumer_wallet, web3): + test4_start_timestamp = datetime.now() + logger.info(f"test4_start_timestamp: {test4_start_timestamp}") asset, real_asset = get_dataset_ddo_unlisted(client, publisher_wallet) assert real_asset service = get_first_service_by_type(asset, ServiceType.ACCESS) @@ -95,12 +113,17 @@ def test_initialize_on_unlisted_asset(client, publisher_wallet, consumer_wallet, ) assert datatoken == service.datatoken_address + test4_end_timestamp = datetime.now() + logger.info(f"test4_end_timestamp: {test4_end_timestamp}") + logger.info(f"test4 duration: {test4_end_timestamp - test4_start_timestamp}") @pytest.mark.integration def test_initialize_on_asset_with_custom_credentials( client, publisher_wallet, consumer_wallet, web3 ): + test5_start_timestamp = datetime.now() + logger.info(f"test5_start_timestamp: {test5_start_timestamp}") asset = get_dataset_ddo_with_denied_consumer( client, publisher_wallet, consumer_wallet.address ) @@ -119,10 +142,15 @@ def test_initialize_on_asset_with_custom_credentials( response.json["error"] == f"Error: Access to asset {asset.did} was denied with code: ConsumableCodes.CREDENTIAL_IN_DENY_LIST." ) + test5_end_timestamp = datetime.now() + logger.info(f"test5_end_timestamp: {test5_end_timestamp}") + logger.info(f"test5 duration: {test5_end_timestamp - test5_start_timestamp}") @pytest.mark.integration def test_initialize_reuse(client, publisher_wallet, consumer_wallet, web3): + test6_start_timestamp = datetime.now() + logger.info(f"test6_start_timestamp: {test6_start_timestamp}") asset = get_dataset_ddo_with_multiple_files(client, publisher_wallet) service = get_first_service_by_type(asset, ServiceType.ACCESS) @@ -166,12 +194,17 @@ def test_initialize_reuse(client, publisher_wallet, consumer_wallet, web3): assert response.json["datatoken"] == service.datatoken_address assert "validOrder" not in response.json + test6_end_timestamp = datetime.now() + logger.info(f"test6_end_timestamp: {test6_end_timestamp}") + logger.info(f"test6 duration: {test6_end_timestamp - test6_start_timestamp}") @pytest.mark.integration def test_can_not_initialize_compute_service_with_simple_initialize( client, publisher_wallet, consumer_wallet, web3 ): + test7_start_timestamp = datetime.now() + logger.info(f"test7_start_timestamp: {test7_start_timestamp}") asset_w_compute_service = get_registered_asset( publisher_wallet, custom_services="vanilla_compute", custom_services_args=[] ) @@ -188,6 +221,9 @@ def test_can_not_initialize_compute_service_with_simple_initialize( response.json["error"] == "Use the initializeCompute endpoint to initialize compute jobs." ) + test7_end_timestamp = datetime.now() + logger.info(f"test7_end_timestamp: {test7_end_timestamp}") + logger.info(f"test7 duration: {test7_end_timestamp - test7_start_timestamp}") @pytest.mark.integration @@ -198,6 +234,8 @@ def test_initialize_compute_works( Assert response contains `datatoken` and `providerFee` and does not contain `validOrder` for both dataset and algorithm. """ + test8_start_timestamp = datetime.now() + logger.info(f"test8_start_timestamp: {test8_start_timestamp}") ddo, alg_ddo = build_and_send_ddo_with_compute_service( client, publisher_wallet, @@ -241,6 +279,9 @@ def test_initialize_compute_works( assert "datatoken" in response.json["algorithm"] assert "providerFee" in response.json["algorithm"] assert "validOrder" not in response.json["algorithm"] + test8_end_timestamp = datetime.now() + logger.info(f"test8_end_timestamp: {test8_end_timestamp}") + logger.info(f"test8 duration: {test8_end_timestamp - test8_start_timestamp}") @pytest.mark.integration @@ -266,6 +307,8 @@ def test_initialize_compute_order_reused( Case 4: wrong tx id for dataset order """ + test9_start_timestamp = datetime.now() + logger.info(f"test9_start_timestamp: {test9_start_timestamp}") # Order asset, valid for 30 seconds valid_until = get_future_valid_until(short=True) ddo, tx_id, alg_ddo, alg_tx_id = build_and_send_ddo_with_compute_service( @@ -369,12 +412,17 @@ def test_initialize_compute_order_reused( assert response.status_code == 200 assert "datatoken" in response.json["datasets"][0].keys() assert "providerFee" in response.json["datasets"][0].keys() + test9_end_timestamp = datetime.now() + logger.info(f"test9_end_timestamp: {test9_end_timestamp}") + logger.info(f"test9 duration: {test9_end_timestamp - test9_start_timestamp}") @pytest.mark.integration def test_initialize_compute_paid_env( client, publisher_wallet, consumer_wallet, paid_c2d_env ): + test10_start_timestamp = datetime.now() + logger.info(f"test10_start_timestamp: {test10_start_timestamp}") ddo, alg_ddo = build_and_send_ddo_with_compute_service( client, publisher_wallet, @@ -414,3 +462,27 @@ def test_initialize_compute_paid_env( assert int( response.json["datasets"][0]["providerFee"]["providerFeeAmount"] ) >= to_wei(7) + test10_end_timestamp = datetime.now() + logger.info(f"test10_end_timestamp: {test10_end_timestamp}") + logger.info(f"test10 duration: {test10_end_timestamp - test10_start_timestamp}") + + +def test_socket(): + test11_start_timestamp = datetime.now() + logger.info(f"test11_start_timestamp: {test11_start_timestamp}") + import socket + + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + + s.connect(("172.15.0.13", 31000)) + logger.info(f"socket connected successfully to op serv") + s.send(b"GET / HTTP/1.1\r\nHost:172.15.0.13\r\n\r\n") + logger.info(f"socket send GET request successfully to op serv") + response = s.recv(4096) + s.close() + logger.info(f"socket closed successfully") + assert response + logger.info(f"response from socket op serv: {response.decode()}") + test11_end_timestamp = datetime.now() + logger.info(f"test10_end_timestamp: {test11_end_timestamp}") + logger.info(f"test10 duration: {test11_end_timestamp - test11_start_timestamp}") From 43776d6f47adc7b22dd4b2112c15ee5324cd5b82 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Wed, 5 Apr 2023 13:58:11 +0300 Subject: [PATCH 64/83] Decresed number of retries. --- ocean_provider/requests_session.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocean_provider/requests_session.py b/ocean_provider/requests_session.py index 861f3ad8..5b9e65d5 100644 --- a/ocean_provider/requests_session.py +++ b/ocean_provider/requests_session.py @@ -13,7 +13,7 @@ def get_requests_session() -> Session: :return: requests session """ session = Session() - retries = Retry(total=7, backoff_factor=1, status_forcelist=[502, 503, 504]) + retries = Retry(total=3, backoff_factor=1, status_forcelist=[502, 503, 504]) session.mount( "http://", HTTPAdapter( From bf9f134b86461fd274382263a0cebe16fcb99d26 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Fri, 7 Apr 2023 17:54:18 +0300 Subject: [PATCH 65/83] Added back graphql. Modified workflow to skip subgraph deployment. --- .github/workflows/pytest.yml | 2 +- ocean_provider/requests_session.py | 2 +- tests/test_graphql.py | 143 +++++++++++++++++++++++++++++ 3 files changed, 145 insertions(+), 2 deletions(-) create mode 100644 tests/test_graphql.py diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 6387695b..64c1a80b 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -33,7 +33,7 @@ jobs: - name: Run Barge working-directory: ${{ github.workspace }}/barge run: | - bash -x start_ocean.sh --no-dashboard 2>&1 --with-rbac --with-provider2 --with-c2d > start_ocean.log & + bash -x start_ocean.sh --no-dashboard 2>&1 --with-rbac --with-provider2 --with-c2d --with-thegraph --skip-subgraph-deploy > start_ocean.log & - name: Wait for contracts deployment and C2D cluster to be ready working-directory: ${{ github.workspace }}/barge run: | diff --git a/ocean_provider/requests_session.py b/ocean_provider/requests_session.py index 5b9e65d5..0e07ab44 100644 --- a/ocean_provider/requests_session.py +++ b/ocean_provider/requests_session.py @@ -13,7 +13,7 @@ def get_requests_session() -> Session: :return: requests session """ session = Session() - retries = Retry(total=3, backoff_factor=1, status_forcelist=[502, 503, 504]) + retries = Retry(total=5, backoff_factor=1, status_forcelist=[502, 503, 504]) session.mount( "http://", HTTPAdapter( diff --git a/tests/test_graphql.py b/tests/test_graphql.py new file mode 100644 index 00000000..63ce8a08 --- /dev/null +++ b/tests/test_graphql.py @@ -0,0 +1,143 @@ +# +# Copyright 2023 Ocean Protocol Foundation +# SPDX-License-Identifier: Apache-2.0 +# +import json +from datetime import datetime + +import pytest +from ocean_provider.constants import BaseURLs +from ocean_provider.utils.accounts import sign_message +from ocean_provider.utils.provider_fees import get_provider_fees +from ocean_provider.utils.services import ServiceType +from tests.test_helpers import ( + get_first_service_by_type, + get_registered_asset, + mint_100_datatokens, + start_order, +) + + +@pytest.mark.integration +def test_download_graphql_asset(client, publisher_wallet, consumer_wallet, web3): + unencrypted_files_list = [ + { + "type": "graphql", + "url": "http://172.15.0.15:8000/subgraphs/name/oceanprotocol/ocean-subgraph", + "query": """ + query{ + nfts(orderBy: createdTimestamp,orderDirection:desc){ + id + symbol + createdTimestamp + } + } + """, + } + ] + asset = get_registered_asset( + publisher_wallet, unencrypted_files_list=unencrypted_files_list + ) + service = get_first_service_by_type(asset, ServiceType.ACCESS) + mint_100_datatokens( + web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + ) + tx_id, _ = start_order( + web3, + service.datatoken_address, + consumer_wallet.address, + service.index, + get_provider_fees(asset, service, consumer_wallet.address, 0), + consumer_wallet, + ) + + payload = { + "documentId": asset.did, + "serviceId": service.id, + "consumerAddress": consumer_wallet.address, + "transferTxId": tx_id, + "fileIndex": 0, + } + + download_endpoint = BaseURLs.SERVICES_URL + "/download" + + # Consume using url index and signature (with nonce) + nonce = str(datetime.utcnow().timestamp()) + _msg = f"{asset.did}{nonce}" + payload["signature"] = sign_message(_msg, consumer_wallet) + payload["nonce"] = nonce + response = client.get( + service.service_endpoint + download_endpoint, query_string=payload + ) + assert response.status_code == 200, f"{response.data}" + + +@pytest.mark.integration +def test_download_graphql_asset_with_userdata( + client, publisher_wallet, consumer_wallet, web3 +): + unencrypted_files_list = [ + { + "type": "graphql", + "url": "http://172.15.0.15:8000/subgraphs/name/oceanprotocol/ocean-subgraph", + "query": """ + query nfts($nftAddress: String){ + nfts(where: {id:$nftAddress},orderBy: createdTimestamp,orderDirection:desc){ + id + symbol + createdTimestamp + } + } + """, + } + ] + asset = get_registered_asset( + publisher_wallet, + unencrypted_files_list=unencrypted_files_list, + custom_userdata=[ + { + "name": "nftAddress", + "type": "text", + "label": "nftAddress", + "required": True, + "description": "Nft to search for", + } + ], + ) + service = get_first_service_by_type(asset, ServiceType.ACCESS) + mint_100_datatokens( + web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + ) + tx_id, _ = start_order( + web3, + service.datatoken_address, + consumer_wallet.address, + service.index, + get_provider_fees(asset, service, consumer_wallet.address, 0), + consumer_wallet, + ) + + payload = { + "documentId": asset.did, + "serviceId": service.id, + "consumerAddress": consumer_wallet.address, + "transferTxId": tx_id, + "fileIndex": 0, + "userdata": json.dumps({"nftAddress": asset.nftAddress.lower()}), + } + + download_endpoint = BaseURLs.SERVICES_URL + "/download" + # Consume using url index and signature (with nonce) + nonce = str(datetime.utcnow().timestamp()) + _msg = f"{asset.did}{nonce}" + payload["signature"] = sign_message(_msg, consumer_wallet) + payload["nonce"] = nonce + response = client.get( + service.service_endpoint + download_endpoint, query_string=payload + ) + assert response.status_code == 200, f"{response.data}" + reply = json.loads(response.data) + assert ( + len(reply["data"]["nfts"]) == 1 + ) # make sure our parametrized query works, otherwise we will get a lot of nfts + assert reply["data"]["nfts"][0]["id"] == asset.nftAddress.lower() \ No newline at end of file From 6627d7e85806a77938a60fdb006c4ea795ae7131 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Fri, 7 Apr 2023 17:56:03 +0300 Subject: [PATCH 66/83] black. --- tests/test_graphql.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_graphql.py b/tests/test_graphql.py index 63ce8a08..89556af9 100644 --- a/tests/test_graphql.py +++ b/tests/test_graphql.py @@ -140,4 +140,4 @@ def test_download_graphql_asset_with_userdata( assert ( len(reply["data"]["nfts"]) == 1 ) # make sure our parametrized query works, otherwise we will get a lot of nfts - assert reply["data"]["nfts"][0]["id"] == asset.nftAddress.lower() \ No newline at end of file + assert reply["data"]["nfts"][0]["id"] == asset.nftAddress.lower() From 17f0a1114ab779762c03ef5ff54194f9cca2714d Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Mon, 10 Apr 2023 13:18:26 +0300 Subject: [PATCH 67/83] Commented mint_fake_ocean. --- tests/test_initialize.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/tests/test_initialize.py b/tests/test_initialize.py index df97df9f..ae54b72f 100644 --- a/tests/test_initialize.py +++ b/tests/test_initialize.py @@ -61,9 +61,9 @@ def test_initialize_on_ipfs_url(client, publisher_wallet, consumer_wallet, web3) logger.info(f"test2_start_timestamp: {test2_start_timestamp}") asset = get_dataset_with_ipfs_url_ddo(client, publisher_wallet) service = get_first_service_by_type(asset, ServiceType.ACCESS) - mint_100_datatokens( - web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - ) + # mint_100_datatokens( + # web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + # ) datatoken, nonce, computeAddress, providerFees = initialize_service( client, asset.did, service, consumer_wallet ) @@ -82,9 +82,9 @@ def test_initialize_on_disabled_asset(client, publisher_wallet, consumer_wallet, assert real_asset service = get_first_service_by_type(asset, ServiceType.ACCESS) - mint_100_datatokens( - web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - ) + # mint_100_datatokens( + # web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + # ) response = initialize_service( client, asset.did, service, consumer_wallet, raw_response=True @@ -104,9 +104,9 @@ def test_initialize_on_unlisted_asset(client, publisher_wallet, consumer_wallet, assert real_asset service = get_first_service_by_type(asset, ServiceType.ACCESS) - mint_100_datatokens( - web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - ) + # mint_100_datatokens( + # web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + # ) datatoken, nonce, computeAddress, providerFees = initialize_service( client, asset.did, service, consumer_wallet @@ -130,9 +130,9 @@ def test_initialize_on_asset_with_custom_credentials( service = get_first_service_by_type(asset, ServiceType.ACCESS) - mint_100_datatokens( - web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - ) + # mint_100_datatokens( + # web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + # ) response = initialize_service( client, asset.did, service, consumer_wallet, raw_response=True @@ -155,9 +155,9 @@ def test_initialize_reuse(client, publisher_wallet, consumer_wallet, web3): service = get_first_service_by_type(asset, ServiceType.ACCESS) - mint_100_datatokens( - web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - ) + # mint_100_datatokens( + # web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + # ) tx_id, _ = start_order( web3, @@ -209,9 +209,9 @@ def test_can_not_initialize_compute_service_with_simple_initialize( publisher_wallet, custom_services="vanilla_compute", custom_services_args=[] ) service = get_first_service_by_type(asset_w_compute_service, ServiceType.COMPUTE) - mint_100_datatokens( - web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - ) + # mint_100_datatokens( + # web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + # ) response = initialize_service( client, asset_w_compute_service.did, service, consumer_wallet, raw_response=True From 41bdeed882a7695e2f627ddc6a19dfde6a7691c6 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 11 Apr 2023 11:59:21 +0300 Subject: [PATCH 68/83] replace with print. --- tests/test_initialize.py | 74 ++++++++++++++++++++-------------------- 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/tests/test_initialize.py b/tests/test_initialize.py index ae54b72f..679f14fe 100644 --- a/tests/test_initialize.py +++ b/tests/test_initialize.py @@ -37,7 +37,7 @@ @pytest.mark.integration def test_initialize_on_bad_url(client, publisher_wallet, consumer_wallet, web3): test1_start_timestamp = datetime.now() - logger.info(f"test1_start_timestamp: {test1_start_timestamp}") + print(f"test1_start_timestamp: {test1_start_timestamp}") asset = get_dataset_with_invalid_url_ddo(client, publisher_wallet) service = get_first_service_by_type(asset, ServiceType.ACCESS) @@ -51,14 +51,14 @@ def test_initialize_on_bad_url(client, publisher_wallet, consumer_wallet, web3): assert "error" in response.json assert "Asset URL not found, not available or invalid." in response.json["error"] test1_end_timestamp = datetime.now() - logger.info(f"test1_end_timestamp: {test1_end_timestamp}") - logger.info(f"test1 duration: {test1_end_timestamp - test1_start_timestamp}") + print(f"test1_end_timestamp: {test1_end_timestamp}") + print(f"test1 duration: {test1_end_timestamp - test1_start_timestamp}") @pytest.mark.integration def test_initialize_on_ipfs_url(client, publisher_wallet, consumer_wallet, web3): test2_start_timestamp = datetime.now() - logger.info(f"test2_start_timestamp: {test2_start_timestamp}") + print(f"test2_start_timestamp: {test2_start_timestamp}") asset = get_dataset_with_ipfs_url_ddo(client, publisher_wallet) service = get_first_service_by_type(asset, ServiceType.ACCESS) # mint_100_datatokens( @@ -70,14 +70,14 @@ def test_initialize_on_ipfs_url(client, publisher_wallet, consumer_wallet, web3) assert datatoken == service.datatoken_address test2_end_timestamp = datetime.now() - logger.info(f"test2_end_timestamp: {test2_end_timestamp}") - logger.info(f"test2 duration: {test2_end_timestamp - test2_start_timestamp}") + print(f"test2_end_timestamp: {test2_end_timestamp}") + print(f"test2 duration: {test2_end_timestamp - test2_start_timestamp}") @pytest.mark.integration def test_initialize_on_disabled_asset(client, publisher_wallet, consumer_wallet, web3): test3_start_timestamp = datetime.now() - logger.info(f"test3_start_timestamp: {test3_start_timestamp}") + print(f"test3_start_timestamp: {test3_start_timestamp}") asset, real_asset = get_dataset_ddo_disabled(client, publisher_wallet) assert real_asset service = get_first_service_by_type(asset, ServiceType.ACCESS) @@ -92,14 +92,14 @@ def test_initialize_on_disabled_asset(client, publisher_wallet, consumer_wallet, assert "error" in response.json assert response.json["error"] == "Asset malformed or disabled." test3_end_timestamp = datetime.now() - logger.info(f"test3_end_timestamp: {test3_end_timestamp}") - logger.info(f"test3 duration: {test3_end_timestamp - test3_start_timestamp}") + print(f"test3_end_timestamp: {test3_end_timestamp}") + print(f"test3 duration: {test3_end_timestamp - test3_start_timestamp}") @pytest.mark.integration def test_initialize_on_unlisted_asset(client, publisher_wallet, consumer_wallet, web3): test4_start_timestamp = datetime.now() - logger.info(f"test4_start_timestamp: {test4_start_timestamp}") + print(f"test4_start_timestamp: {test4_start_timestamp}") asset, real_asset = get_dataset_ddo_unlisted(client, publisher_wallet) assert real_asset service = get_first_service_by_type(asset, ServiceType.ACCESS) @@ -114,8 +114,8 @@ def test_initialize_on_unlisted_asset(client, publisher_wallet, consumer_wallet, assert datatoken == service.datatoken_address test4_end_timestamp = datetime.now() - logger.info(f"test4_end_timestamp: {test4_end_timestamp}") - logger.info(f"test4 duration: {test4_end_timestamp - test4_start_timestamp}") + print(f"test4_end_timestamp: {test4_end_timestamp}") + print(f"test4 duration: {test4_end_timestamp - test4_start_timestamp}") @pytest.mark.integration @@ -123,7 +123,7 @@ def test_initialize_on_asset_with_custom_credentials( client, publisher_wallet, consumer_wallet, web3 ): test5_start_timestamp = datetime.now() - logger.info(f"test5_start_timestamp: {test5_start_timestamp}") + print(f"test5_start_timestamp: {test5_start_timestamp}") asset = get_dataset_ddo_with_denied_consumer( client, publisher_wallet, consumer_wallet.address ) @@ -143,14 +143,14 @@ def test_initialize_on_asset_with_custom_credentials( == f"Error: Access to asset {asset.did} was denied with code: ConsumableCodes.CREDENTIAL_IN_DENY_LIST." ) test5_end_timestamp = datetime.now() - logger.info(f"test5_end_timestamp: {test5_end_timestamp}") - logger.info(f"test5 duration: {test5_end_timestamp - test5_start_timestamp}") + print(f"test5_end_timestamp: {test5_end_timestamp}") + print(f"test5 duration: {test5_end_timestamp - test5_start_timestamp}") @pytest.mark.integration def test_initialize_reuse(client, publisher_wallet, consumer_wallet, web3): test6_start_timestamp = datetime.now() - logger.info(f"test6_start_timestamp: {test6_start_timestamp}") + print(f"test6_start_timestamp: {test6_start_timestamp}") asset = get_dataset_ddo_with_multiple_files(client, publisher_wallet) service = get_first_service_by_type(asset, ServiceType.ACCESS) @@ -195,8 +195,8 @@ def test_initialize_reuse(client, publisher_wallet, consumer_wallet, web3): assert response.json["datatoken"] == service.datatoken_address assert "validOrder" not in response.json test6_end_timestamp = datetime.now() - logger.info(f"test6_end_timestamp: {test6_end_timestamp}") - logger.info(f"test6 duration: {test6_end_timestamp - test6_start_timestamp}") + print(f"test6_end_timestamp: {test6_end_timestamp}") + print(f"test6 duration: {test6_end_timestamp - test6_start_timestamp}") @pytest.mark.integration @@ -204,7 +204,7 @@ def test_can_not_initialize_compute_service_with_simple_initialize( client, publisher_wallet, consumer_wallet, web3 ): test7_start_timestamp = datetime.now() - logger.info(f"test7_start_timestamp: {test7_start_timestamp}") + print(f"test7_start_timestamp: {test7_start_timestamp}") asset_w_compute_service = get_registered_asset( publisher_wallet, custom_services="vanilla_compute", custom_services_args=[] ) @@ -222,8 +222,8 @@ def test_can_not_initialize_compute_service_with_simple_initialize( == "Use the initializeCompute endpoint to initialize compute jobs." ) test7_end_timestamp = datetime.now() - logger.info(f"test7_end_timestamp: {test7_end_timestamp}") - logger.info(f"test7 duration: {test7_end_timestamp - test7_start_timestamp}") + print(f"test7_end_timestamp: {test7_end_timestamp}") + print(f"test7 duration: {test7_end_timestamp - test7_start_timestamp}") @pytest.mark.integration @@ -235,7 +235,7 @@ def test_initialize_compute_works( `validOrder` for both dataset and algorithm. """ test8_start_timestamp = datetime.now() - logger.info(f"test8_start_timestamp: {test8_start_timestamp}") + print(f"test8_start_timestamp: {test8_start_timestamp}") ddo, alg_ddo = build_and_send_ddo_with_compute_service( client, publisher_wallet, @@ -280,8 +280,8 @@ def test_initialize_compute_works( assert "providerFee" in response.json["algorithm"] assert "validOrder" not in response.json["algorithm"] test8_end_timestamp = datetime.now() - logger.info(f"test8_end_timestamp: {test8_end_timestamp}") - logger.info(f"test8 duration: {test8_end_timestamp - test8_start_timestamp}") + print(f"test8_end_timestamp: {test8_end_timestamp}") + print(f"test8 duration: {test8_end_timestamp - test8_start_timestamp}") @pytest.mark.integration @@ -308,7 +308,7 @@ def test_initialize_compute_order_reused( wrong tx id for dataset order """ test9_start_timestamp = datetime.now() - logger.info(f"test9_start_timestamp: {test9_start_timestamp}") + print(f"test9_start_timestamp: {test9_start_timestamp}") # Order asset, valid for 30 seconds valid_until = get_future_valid_until(short=True) ddo, tx_id, alg_ddo, alg_tx_id = build_and_send_ddo_with_compute_service( @@ -413,8 +413,8 @@ def test_initialize_compute_order_reused( assert "datatoken" in response.json["datasets"][0].keys() assert "providerFee" in response.json["datasets"][0].keys() test9_end_timestamp = datetime.now() - logger.info(f"test9_end_timestamp: {test9_end_timestamp}") - logger.info(f"test9 duration: {test9_end_timestamp - test9_start_timestamp}") + print(f"test9_end_timestamp: {test9_end_timestamp}") + print(f"test9 duration: {test9_end_timestamp - test9_start_timestamp}") @pytest.mark.integration @@ -422,7 +422,7 @@ def test_initialize_compute_paid_env( client, publisher_wallet, consumer_wallet, paid_c2d_env ): test10_start_timestamp = datetime.now() - logger.info(f"test10_start_timestamp: {test10_start_timestamp}") + print(f"test10_start_timestamp: {test10_start_timestamp}") ddo, alg_ddo = build_and_send_ddo_with_compute_service( client, publisher_wallet, @@ -463,26 +463,26 @@ def test_initialize_compute_paid_env( response.json["datasets"][0]["providerFee"]["providerFeeAmount"] ) >= to_wei(7) test10_end_timestamp = datetime.now() - logger.info(f"test10_end_timestamp: {test10_end_timestamp}") - logger.info(f"test10 duration: {test10_end_timestamp - test10_start_timestamp}") + print(f"test10_end_timestamp: {test10_end_timestamp}") + print(f"test10 duration: {test10_end_timestamp - test10_start_timestamp}") def test_socket(): test11_start_timestamp = datetime.now() - logger.info(f"test11_start_timestamp: {test11_start_timestamp}") + print(f"test11_start_timestamp: {test11_start_timestamp}") import socket s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect(("172.15.0.13", 31000)) - logger.info(f"socket connected successfully to op serv") + print(f"socket connected successfully to op serv") s.send(b"GET / HTTP/1.1\r\nHost:172.15.0.13\r\n\r\n") - logger.info(f"socket send GET request successfully to op serv") + print(f"socket send GET request successfully to op serv") response = s.recv(4096) s.close() - logger.info(f"socket closed successfully") + print(f"socket closed successfully") assert response - logger.info(f"response from socket op serv: {response.decode()}") + print(f"response from socket op serv: {response.decode()}") test11_end_timestamp = datetime.now() - logger.info(f"test10_end_timestamp: {test11_end_timestamp}") - logger.info(f"test10 duration: {test11_end_timestamp - test11_start_timestamp}") + print(f"test10_end_timestamp: {test11_end_timestamp}") + print(f"test10 duration: {test11_end_timestamp - test11_start_timestamp}") From dda2469aa15c3344fb399c8c452528cc1fcdb8f8 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 11 Apr 2023 12:01:03 +0300 Subject: [PATCH 69/83] Test only one test. --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 64c1a80b..985c3ab3 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -54,7 +54,7 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - pytest tests/test_initialize.py + pytest tests/test_initialize.py::test_initialize_on_bad_url - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 if: ${{ failure() }} From bd10575ac6b7dac96b8e1de262fb45ca3969c39a Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 11 Apr 2023 12:28:06 +0300 Subject: [PATCH 70/83] Test the ipfs url test. --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 985c3ab3..9c78791e 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -54,7 +54,7 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - pytest tests/test_initialize.py::test_initialize_on_bad_url + pytest tests/test_initialize.py::test_initialize_on_ipfs_url - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 if: ${{ failure() }} From 8dc44db57a19327df6ce1b5d24f49f9faa89b18f Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 11 Apr 2023 13:48:31 +0300 Subject: [PATCH 71/83] Added logs for ipfs. --- .github/workflows/pytest.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 9c78791e..38ec76b7 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -47,6 +47,7 @@ jobs: nc -zv 172.15.0.13 31000 cat $HOME/.ocean/ocean-contracts/artifacts/address.json curl http://172.15.0.13:31000 + nc -zv 172.15.0.16 5001 - name: Install dependencies working-directory: ${{ github.workspace }} run: | @@ -56,7 +57,7 @@ jobs: run: | pytest tests/test_initialize.py::test_initialize_on_ipfs_url - name: docker logs - run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 + run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 && docker logs ocean_ipfs_1 if: ${{ failure() }} - name: Publish code coverage uses: paambaati/codeclimate-action@v2.7.5 From aa1a6e390670ecf24db70be205950a7368f20b80 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 11 Apr 2023 14:27:23 +0300 Subject: [PATCH 72/83] Modified ipfs test. --- .github/workflows/pytest.yml | 2 +- tests/test_initialize.py | 10 +++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 38ec76b7..b502343e 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -47,7 +47,7 @@ jobs: nc -zv 172.15.0.13 31000 cat $HOME/.ocean/ocean-contracts/artifacts/address.json curl http://172.15.0.13:31000 - nc -zv 172.15.0.16 5001 + nc -zv 172.15.0.16 8080 - name: Install dependencies working-directory: ${{ github.workspace }} run: | diff --git a/tests/test_initialize.py b/tests/test_initialize.py index 679f14fe..14ceea57 100644 --- a/tests/test_initialize.py +++ b/tests/test_initialize.py @@ -8,6 +8,7 @@ from datetime import datetime from unittest.mock import patch +import ipfshttpclient import pytest from ocean_provider.constants import BaseURLs from ocean_provider.utils.currency import to_wei @@ -59,7 +60,14 @@ def test_initialize_on_bad_url(client, publisher_wallet, consumer_wallet, web3): def test_initialize_on_ipfs_url(client, publisher_wallet, consumer_wallet, web3): test2_start_timestamp = datetime.now() print(f"test2_start_timestamp: {test2_start_timestamp}") - asset = get_dataset_with_ipfs_url_ddo(client, publisher_wallet) + client = ipfshttpclient.connect("/dns/172.15.0.16/tcp/5001/http") + cid = client.add("./resources/ddo_sample_file.txt")["Hash"] + url_object = {"type": "ipfs", "hash": cid} + asset = get_registered_asset( + publisher_wallet, + unencrypted_files_list=[url_object], + ) + # get_dataset_with_ipfs_url_ddo(client, publisher_wallet) service = get_first_service_by_type(asset, ServiceType.ACCESS) # mint_100_datatokens( # web3, service.datatoken_address, consumer_wallet.address, publisher_wallet From f3f53fcad2676bac91b029ebd726a8063e601c0e Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 11 Apr 2023 15:21:49 +0300 Subject: [PATCH 73/83] Updated ipfs test. --- .github/workflows/pytest.yml | 2 +- tests/test_initialize.py | 34 ++-------------------------------- 2 files changed, 3 insertions(+), 33 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index b502343e..e64d19b8 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -55,7 +55,7 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - pytest tests/test_initialize.py::test_initialize_on_ipfs_url + pytest tests/test_initialize.py - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 && docker logs ocean_ipfs_1 if: ${{ failure() }} diff --git a/tests/test_initialize.py b/tests/test_initialize.py index 14ceea57..c8b924a8 100644 --- a/tests/test_initialize.py +++ b/tests/test_initialize.py @@ -24,7 +24,6 @@ get_dataset_ddo_with_denied_consumer, get_dataset_ddo_with_multiple_files, get_dataset_with_invalid_url_ddo, - get_dataset_with_ipfs_url_ddo, get_first_service_by_type, get_registered_asset, initialize_service, @@ -58,29 +57,19 @@ def test_initialize_on_bad_url(client, publisher_wallet, consumer_wallet, web3): @pytest.mark.integration def test_initialize_on_ipfs_url(client, publisher_wallet, consumer_wallet, web3): - test2_start_timestamp = datetime.now() - print(f"test2_start_timestamp: {test2_start_timestamp}") - client = ipfshttpclient.connect("/dns/172.15.0.16/tcp/5001/http") - cid = client.add("./resources/ddo_sample_file.txt")["Hash"] + ipfs_client = ipfshttpclient.connect("/dns/172.15.0.16/tcp/5001/http") + cid = ipfs_client.add("./tests/resources/ddo_sample_file.txt")["Hash"] url_object = {"type": "ipfs", "hash": cid} asset = get_registered_asset( publisher_wallet, unencrypted_files_list=[url_object], ) - # get_dataset_with_ipfs_url_ddo(client, publisher_wallet) service = get_first_service_by_type(asset, ServiceType.ACCESS) - # mint_100_datatokens( - # web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - # ) datatoken, nonce, computeAddress, providerFees = initialize_service( client, asset.did, service, consumer_wallet ) assert datatoken == service.datatoken_address - test2_end_timestamp = datetime.now() - print(f"test2_end_timestamp: {test2_end_timestamp}") - print(f"test2 duration: {test2_end_timestamp - test2_start_timestamp}") - @pytest.mark.integration def test_initialize_on_disabled_asset(client, publisher_wallet, consumer_wallet, web3): @@ -90,10 +79,6 @@ def test_initialize_on_disabled_asset(client, publisher_wallet, consumer_wallet, assert real_asset service = get_first_service_by_type(asset, ServiceType.ACCESS) - # mint_100_datatokens( - # web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - # ) - response = initialize_service( client, asset.did, service, consumer_wallet, raw_response=True ) @@ -112,10 +97,6 @@ def test_initialize_on_unlisted_asset(client, publisher_wallet, consumer_wallet, assert real_asset service = get_first_service_by_type(asset, ServiceType.ACCESS) - # mint_100_datatokens( - # web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - # ) - datatoken, nonce, computeAddress, providerFees = initialize_service( client, asset.did, service, consumer_wallet ) @@ -138,10 +119,6 @@ def test_initialize_on_asset_with_custom_credentials( service = get_first_service_by_type(asset, ServiceType.ACCESS) - # mint_100_datatokens( - # web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - # ) - response = initialize_service( client, asset.did, service, consumer_wallet, raw_response=True ) @@ -163,10 +140,6 @@ def test_initialize_reuse(client, publisher_wallet, consumer_wallet, web3): service = get_first_service_by_type(asset, ServiceType.ACCESS) - # mint_100_datatokens( - # web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - # ) - tx_id, _ = start_order( web3, service.datatoken_address, @@ -217,9 +190,6 @@ def test_can_not_initialize_compute_service_with_simple_initialize( publisher_wallet, custom_services="vanilla_compute", custom_services_args=[] ) service = get_first_service_by_type(asset_w_compute_service, ServiceType.COMPUTE) - # mint_100_datatokens( - # web3, service.datatoken_address, consumer_wallet.address, publisher_wallet - # ) response = initialize_service( client, asset_w_compute_service.did, service, consumer_wallet, raw_response=True From 9efdea9cf6db6a853634373d918d3908d5d05f57 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 11 Apr 2023 15:23:01 +0300 Subject: [PATCH 74/83] black. --- tests/test_initialize.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_initialize.py b/tests/test_initialize.py index c8b924a8..afd4a4fb 100644 --- a/tests/test_initialize.py +++ b/tests/test_initialize.py @@ -71,6 +71,7 @@ def test_initialize_on_ipfs_url(client, publisher_wallet, consumer_wallet, web3) assert datatoken == service.datatoken_address + @pytest.mark.integration def test_initialize_on_disabled_asset(client, publisher_wallet, consumer_wallet, web3): test3_start_timestamp = datetime.now() From 6eefaeca62a8667bccb1baa01f4d4b41984139f7 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 11 Apr 2023 15:39:42 +0300 Subject: [PATCH 75/83] Removed prints. Mint datatokens. Removed helper function. --- tests/test_helpers.py | 9 ------ tests/test_initialize.py | 69 ++-------------------------------------- 2 files changed, 3 insertions(+), 75 deletions(-) diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 4b65c141..5ec84e9e 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -366,15 +366,6 @@ def get_dataset_with_invalid_url_ddo(client, wallet): ) -def get_dataset_with_ipfs_url_ddo(client, wallet): - return get_registered_asset( - wallet, - unencrypted_files_list=[ - {"type": "ipfs", "hash": "QmXtkGkWCG47tVpiBr8f5FdHuCMPq8h2jhck4jgjSXKiWZ"} - ], - ) - - def get_resource_path(dir_name, file_name): base = os.path.realpath(__file__).split(os.path.sep)[1:-1] if dir_name: diff --git a/tests/test_initialize.py b/tests/test_initialize.py index afd4a4fb..2f0b56fb 100644 --- a/tests/test_initialize.py +++ b/tests/test_initialize.py @@ -36,8 +36,6 @@ @pytest.mark.integration def test_initialize_on_bad_url(client, publisher_wallet, consumer_wallet, web3): - test1_start_timestamp = datetime.now() - print(f"test1_start_timestamp: {test1_start_timestamp}") asset = get_dataset_with_invalid_url_ddo(client, publisher_wallet) service = get_first_service_by_type(asset, ServiceType.ACCESS) @@ -50,9 +48,6 @@ def test_initialize_on_bad_url(client, publisher_wallet, consumer_wallet, web3): ) assert "error" in response.json assert "Asset URL not found, not available or invalid." in response.json["error"] - test1_end_timestamp = datetime.now() - print(f"test1_end_timestamp: {test1_end_timestamp}") - print(f"test1 duration: {test1_end_timestamp - test1_start_timestamp}") @pytest.mark.integration @@ -74,8 +69,6 @@ def test_initialize_on_ipfs_url(client, publisher_wallet, consumer_wallet, web3) @pytest.mark.integration def test_initialize_on_disabled_asset(client, publisher_wallet, consumer_wallet, web3): - test3_start_timestamp = datetime.now() - print(f"test3_start_timestamp: {test3_start_timestamp}") asset, real_asset = get_dataset_ddo_disabled(client, publisher_wallet) assert real_asset service = get_first_service_by_type(asset, ServiceType.ACCESS) @@ -85,15 +78,10 @@ def test_initialize_on_disabled_asset(client, publisher_wallet, consumer_wallet, ) assert "error" in response.json assert response.json["error"] == "Asset malformed or disabled." - test3_end_timestamp = datetime.now() - print(f"test3_end_timestamp: {test3_end_timestamp}") - print(f"test3 duration: {test3_end_timestamp - test3_start_timestamp}") @pytest.mark.integration def test_initialize_on_unlisted_asset(client, publisher_wallet, consumer_wallet, web3): - test4_start_timestamp = datetime.now() - print(f"test4_start_timestamp: {test4_start_timestamp}") asset, real_asset = get_dataset_ddo_unlisted(client, publisher_wallet) assert real_asset service = get_first_service_by_type(asset, ServiceType.ACCESS) @@ -103,17 +91,12 @@ def test_initialize_on_unlisted_asset(client, publisher_wallet, consumer_wallet, ) assert datatoken == service.datatoken_address - test4_end_timestamp = datetime.now() - print(f"test4_end_timestamp: {test4_end_timestamp}") - print(f"test4 duration: {test4_end_timestamp - test4_start_timestamp}") @pytest.mark.integration def test_initialize_on_asset_with_custom_credentials( client, publisher_wallet, consumer_wallet, web3 ): - test5_start_timestamp = datetime.now() - print(f"test5_start_timestamp: {test5_start_timestamp}") asset = get_dataset_ddo_with_denied_consumer( client, publisher_wallet, consumer_wallet.address ) @@ -128,18 +111,16 @@ def test_initialize_on_asset_with_custom_credentials( response.json["error"] == f"Error: Access to asset {asset.did} was denied with code: ConsumableCodes.CREDENTIAL_IN_DENY_LIST." ) - test5_end_timestamp = datetime.now() - print(f"test5_end_timestamp: {test5_end_timestamp}") - print(f"test5 duration: {test5_end_timestamp - test5_start_timestamp}") @pytest.mark.integration def test_initialize_reuse(client, publisher_wallet, consumer_wallet, web3): - test6_start_timestamp = datetime.now() - print(f"test6_start_timestamp: {test6_start_timestamp}") asset = get_dataset_ddo_with_multiple_files(client, publisher_wallet) service = get_first_service_by_type(asset, ServiceType.ACCESS) + mint_100_datatokens( + web3, service.datatoken_address, consumer_wallet.address, publisher_wallet + ) tx_id, _ = start_order( web3, @@ -176,17 +157,12 @@ def test_initialize_reuse(client, publisher_wallet, consumer_wallet, web3): assert response.json["datatoken"] == service.datatoken_address assert "validOrder" not in response.json - test6_end_timestamp = datetime.now() - print(f"test6_end_timestamp: {test6_end_timestamp}") - print(f"test6 duration: {test6_end_timestamp - test6_start_timestamp}") @pytest.mark.integration def test_can_not_initialize_compute_service_with_simple_initialize( client, publisher_wallet, consumer_wallet, web3 ): - test7_start_timestamp = datetime.now() - print(f"test7_start_timestamp: {test7_start_timestamp}") asset_w_compute_service = get_registered_asset( publisher_wallet, custom_services="vanilla_compute", custom_services_args=[] ) @@ -200,9 +176,6 @@ def test_can_not_initialize_compute_service_with_simple_initialize( response.json["error"] == "Use the initializeCompute endpoint to initialize compute jobs." ) - test7_end_timestamp = datetime.now() - print(f"test7_end_timestamp: {test7_end_timestamp}") - print(f"test7 duration: {test7_end_timestamp - test7_start_timestamp}") @pytest.mark.integration @@ -213,8 +186,6 @@ def test_initialize_compute_works( Assert response contains `datatoken` and `providerFee` and does not contain `validOrder` for both dataset and algorithm. """ - test8_start_timestamp = datetime.now() - print(f"test8_start_timestamp: {test8_start_timestamp}") ddo, alg_ddo = build_and_send_ddo_with_compute_service( client, publisher_wallet, @@ -258,9 +229,6 @@ def test_initialize_compute_works( assert "datatoken" in response.json["algorithm"] assert "providerFee" in response.json["algorithm"] assert "validOrder" not in response.json["algorithm"] - test8_end_timestamp = datetime.now() - print(f"test8_end_timestamp: {test8_end_timestamp}") - print(f"test8 duration: {test8_end_timestamp - test8_start_timestamp}") @pytest.mark.integration @@ -286,8 +254,6 @@ def test_initialize_compute_order_reused( Case 4: wrong tx id for dataset order """ - test9_start_timestamp = datetime.now() - print(f"test9_start_timestamp: {test9_start_timestamp}") # Order asset, valid for 30 seconds valid_until = get_future_valid_until(short=True) ddo, tx_id, alg_ddo, alg_tx_id = build_and_send_ddo_with_compute_service( @@ -391,17 +357,12 @@ def test_initialize_compute_order_reused( assert response.status_code == 200 assert "datatoken" in response.json["datasets"][0].keys() assert "providerFee" in response.json["datasets"][0].keys() - test9_end_timestamp = datetime.now() - print(f"test9_end_timestamp: {test9_end_timestamp}") - print(f"test9 duration: {test9_end_timestamp - test9_start_timestamp}") @pytest.mark.integration def test_initialize_compute_paid_env( client, publisher_wallet, consumer_wallet, paid_c2d_env ): - test10_start_timestamp = datetime.now() - print(f"test10_start_timestamp: {test10_start_timestamp}") ddo, alg_ddo = build_and_send_ddo_with_compute_service( client, publisher_wallet, @@ -441,27 +402,3 @@ def test_initialize_compute_paid_env( assert int( response.json["datasets"][0]["providerFee"]["providerFeeAmount"] ) >= to_wei(7) - test10_end_timestamp = datetime.now() - print(f"test10_end_timestamp: {test10_end_timestamp}") - print(f"test10 duration: {test10_end_timestamp - test10_start_timestamp}") - - -def test_socket(): - test11_start_timestamp = datetime.now() - print(f"test11_start_timestamp: {test11_start_timestamp}") - import socket - - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - - s.connect(("172.15.0.13", 31000)) - print(f"socket connected successfully to op serv") - s.send(b"GET / HTTP/1.1\r\nHost:172.15.0.13\r\n\r\n") - print(f"socket send GET request successfully to op serv") - response = s.recv(4096) - s.close() - print(f"socket closed successfully") - assert response - print(f"response from socket op serv: {response.decode()}") - test11_end_timestamp = datetime.now() - print(f"test10_end_timestamp: {test11_end_timestamp}") - print(f"test10 duration: {test11_end_timestamp - test11_start_timestamp}") From 1cf05ce022b1e94601fd0c960e0cc4041ef2d601 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 11 Apr 2023 15:52:53 +0300 Subject: [PATCH 76/83] Run the whole test suite. --- .github/workflows/pytest.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index e64d19b8..268ae505 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Setup Provider - uses: actions/checkout@v3 + uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 with: @@ -55,7 +55,9 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - pytest tests/test_initialize.py + coverage run --source ocean_provider -m pytest + coverage report + coverage xml - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 && docker logs ocean_ipfs_1 if: ${{ failure() }} From c5c6102ea80ec1b4958062c0090ca0f086306067 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 11 Apr 2023 16:32:27 +0300 Subject: [PATCH 77/83] Tested just graphql. --- .github/workflows/pytest.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 268ae505..89d776a9 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -55,9 +55,7 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - coverage run --source ocean_provider -m pytest - coverage report - coverage xml + pytest tests/test_graphql.py::test_download_graphql_asset_with_userdata - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 && docker logs ocean_ipfs_1 if: ${{ failure() }} From e673b33cd1fd452abe23c4fa4d8edc74943837fe Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 11 Apr 2023 18:43:00 +0300 Subject: [PATCH 78/83] Added full graph deployment. --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 89d776a9..c85daa6e 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -33,7 +33,7 @@ jobs: - name: Run Barge working-directory: ${{ github.workspace }}/barge run: | - bash -x start_ocean.sh --no-dashboard 2>&1 --with-rbac --with-provider2 --with-c2d --with-thegraph --skip-subgraph-deploy > start_ocean.log & + bash -x start_ocean.sh --no-dashboard 2>&1 --with-rbac --with-provider2 --with-c2d --with-thegraph > start_ocean.log & - name: Wait for contracts deployment and C2D cluster to be ready working-directory: ${{ github.workspace }}/barge run: | From 0c9eba8a66bfd8bc4e7852122c701e87f79cccf3 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 11 Apr 2023 18:54:04 +0300 Subject: [PATCH 79/83] Added tests back. --- .github/workflows/pytest.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index c85daa6e..21b61d52 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -55,7 +55,9 @@ jobs: pip install -r requirements_dev.txt - name: Test with pytest run: | - pytest tests/test_graphql.py::test_download_graphql_asset_with_userdata + coverage run --source ocean_provider -m pytest + coverage report + coverage xml - name: docker logs run: docker logs ocean_aquarius_1 && docker logs ocean_provider_1 && docker logs ocean_provider2_1 && docker logs ocean_ipfs_1 if: ${{ failure() }} From a1dcc93206060c8ab70f75d8248e23b241c35a40 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 11 Apr 2023 23:02:13 +0300 Subject: [PATCH 80/83] Removed pytest.skip. --- conftest.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/conftest.py b/conftest.py index 7ea1aa4e..32e164f0 100644 --- a/conftest.py +++ b/conftest.py @@ -96,19 +96,13 @@ def web3(): @pytest.fixture def free_c2d_env(): - try: - environments = get_c2d_environments(flat=True) - except AssertionError: - pytest.skip("C2D connection failed. Need fix in #610") + environments = get_c2d_environments(flat=True) return next(env for env in environments if float(env["priceMin"]) == float(0)) @pytest.fixture def paid_c2d_env(): - try: - environments = get_c2d_environments(flat=True) - except AssertionError: - pytest.skip("C2D connection failed. Need fix in #610") + environments = get_c2d_environments(flat=True) return next(env for env in environments if env["id"] == "ocean-compute-env2") From a1c5aee51c906b75502d39fd8b2aa7fa2a4cd246 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Tue, 11 Apr 2023 23:44:16 +0300 Subject: [PATCH 81/83] Rename script. Modified graphql test. --- .github/workflows/pytest.yml | 2 +- start_provider.sh => start_local_provider.sh | 0 tests/test_graphql.py | 9 ++++++--- 3 files changed, 7 insertions(+), 4 deletions(-) rename start_provider.sh => start_local_provider.sh (100%) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 21b61d52..268ae505 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -33,7 +33,7 @@ jobs: - name: Run Barge working-directory: ${{ github.workspace }}/barge run: | - bash -x start_ocean.sh --no-dashboard 2>&1 --with-rbac --with-provider2 --with-c2d --with-thegraph > start_ocean.log & + bash -x start_ocean.sh --no-dashboard 2>&1 --with-rbac --with-provider2 --with-c2d --with-thegraph --skip-subgraph-deploy > start_ocean.log & - name: Wait for contracts deployment and C2D cluster to be ready working-directory: ${{ github.workspace }}/barge run: | diff --git a/start_provider.sh b/start_local_provider.sh similarity index 100% rename from start_provider.sh rename to start_local_provider.sh diff --git a/tests/test_graphql.py b/tests/test_graphql.py index 89556af9..44b91c9d 100644 --- a/tests/test_graphql.py +++ b/tests/test_graphql.py @@ -137,7 +137,10 @@ def test_download_graphql_asset_with_userdata( ) assert response.status_code == 200, f"{response.data}" reply = json.loads(response.data) + assert len(reply["errors"]) == 1 + # Make sure the subgraph is deployed. Response has 200 OK status code from subgraph + # Due to lack of CI resources, assert the specific error assert ( - len(reply["data"]["nfts"]) == 1 - ) # make sure our parametrized query works, otherwise we will get a lot of nfts - assert reply["data"]["nfts"][0]["id"] == asset.nftAddress.lower() + reply["errors"][0]["message"] + == "deployment `oceanprotocol/ocean-subgraph` does not exist" + ) From 629c89e6eb2865ff65f1b3790236c3bfe8dbd0a3 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Thu, 13 Apr 2023 12:55:34 +0300 Subject: [PATCH 82/83] Replaced subgraph URL. --- tests/test_graphql.py | 37 ++++++++++++++++--------------------- 1 file changed, 16 insertions(+), 21 deletions(-) diff --git a/tests/test_graphql.py b/tests/test_graphql.py index 44b91c9d..472c3a8c 100644 --- a/tests/test_graphql.py +++ b/tests/test_graphql.py @@ -23,16 +23,16 @@ def test_download_graphql_asset(client, publisher_wallet, consumer_wallet, web3) unencrypted_files_list = [ { "type": "graphql", - "url": "http://172.15.0.15:8000/subgraphs/name/oceanprotocol/ocean-subgraph", + "url": "http://172.15.0.15:8030/graphql", "query": """ - query{ - nfts(orderBy: createdTimestamp,orderDirection:desc){ - id - symbol - createdTimestamp + query { + indexingStatuses { + subgraph + chains + node + } } - } - """, + """, } ] asset = get_registered_asset( @@ -79,13 +79,13 @@ def test_download_graphql_asset_with_userdata( unencrypted_files_list = [ { "type": "graphql", - "url": "http://172.15.0.15:8000/subgraphs/name/oceanprotocol/ocean-subgraph", + "url": "http://172.15.0.15:8030/graphql", "query": """ - query nfts($nftAddress: String){ - nfts(where: {id:$nftAddress},orderBy: createdTimestamp,orderDirection:desc){ - id - symbol - createdTimestamp + query { + indexingStatuses { + subgraph + chains + node } } """, @@ -137,10 +137,5 @@ def test_download_graphql_asset_with_userdata( ) assert response.status_code == 200, f"{response.data}" reply = json.loads(response.data) - assert len(reply["errors"]) == 1 - # Make sure the subgraph is deployed. Response has 200 OK status code from subgraph - # Due to lack of CI resources, assert the specific error - assert ( - reply["errors"][0]["message"] - == "deployment `oceanprotocol/ocean-subgraph` does not exist" - ) + assert len(reply["data"]) == 1 + assert "indexingStatuses" in reply["data"].keys() From 20894828dbf4ee41b1617af7774430d5d35989a1 Mon Sep 17 00:00:00 2001 From: Maria Carmina Date: Thu, 13 Apr 2023 15:58:41 +0300 Subject: [PATCH 83/83] Adapted test with parameterized query. Added aquarius URL back. --- compose-env-values | 2 +- tests/test_graphql.py | 21 +++++++++++---------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/compose-env-values b/compose-env-values index 98c91935..70213a9d 100644 --- a/compose-env-values +++ b/compose-env-values @@ -5,7 +5,7 @@ PARITY_KEYFILE1=/ocean-provider/tests/resources/consumer_key_file.json PROVIDER_ADDRESS=068ed00cf0441e4829d9784fcbe7b9e26d4bd8d0 NETWORK_URL=wss://rinkeby.infura.io/ws/v3/357f2fe737db4304bd2f7285c5602d0d -AQUARIUS_URL=https://v4.aquarius.oceanprotocol.com +AQUARIUS_URL=https://aquarius.marketplace.dev-ocean.com OCEAN_PROVIDER_URL=http://0.0.0.0:8030 OPERATOR_SERVICE_URL=https://stagev4.c2d.oceanprotocol.com diff --git a/tests/test_graphql.py b/tests/test_graphql.py index 472c3a8c..45b593d3 100644 --- a/tests/test_graphql.py +++ b/tests/test_graphql.py @@ -81,13 +81,13 @@ def test_download_graphql_asset_with_userdata( "type": "graphql", "url": "http://172.15.0.15:8030/graphql", "query": """ - query { - indexingStatuses { - subgraph - chains - node + query GetSubgraph($name: [String!]){ + indexingStatuses(subgraphs: $name) { + subgraph + chains + node + } } - } """, } ] @@ -96,11 +96,12 @@ def test_download_graphql_asset_with_userdata( unencrypted_files_list=unencrypted_files_list, custom_userdata=[ { - "name": "nftAddress", + "name": "name", "type": "text", - "label": "nftAddress", + "label": "name", "required": True, - "description": "Nft to search for", + "description": "Subgraph indexing status", + "default": ["subgraph"], } ], ) @@ -123,7 +124,7 @@ def test_download_graphql_asset_with_userdata( "consumerAddress": consumer_wallet.address, "transferTxId": tx_id, "fileIndex": 0, - "userdata": json.dumps({"nftAddress": asset.nftAddress.lower()}), + "userdata": json.dumps({"name": ["subgraph"]}), } download_endpoint = BaseURLs.SERVICES_URL + "/download"