diff --git a/aries_cloudagent/config/default_context.py b/aries_cloudagent/config/default_context.py index 3a64617339..8b6bd4549f 100644 --- a/aries_cloudagent/config/default_context.py +++ b/aries_cloudagent/config/default_context.py @@ -117,7 +117,7 @@ async def bind_providers(self, context: InjectionContext): BaseVerifier, ClassProvider( "aries_cloudagent.verifier.indy.IndyVerifier", - ClassProvider.Inject(BaseWallet), + ClassProvider.Inject(BaseLedger), ), ) diff --git a/aries_cloudagent/messaging/credential_definitions/tests/__init__.py b/aries_cloudagent/messaging/credential_definitions/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/aries_cloudagent/messaging/credential_definitions/tests/test_routes.py b/aries_cloudagent/messaging/credential_definitions/tests/test_routes.py new file mode 100644 index 0000000000..75bb1212cd --- /dev/null +++ b/aries_cloudagent/messaging/credential_definitions/tests/test_routes.py @@ -0,0 +1,118 @@ +from asynctest import TestCase as AsyncTestCase +from asynctest import mock as async_mock + +from aiohttp import web as aio_web + +from ....config.injection_context import InjectionContext +from ....issuer.base import BaseIssuer +from ....ledger.base import BaseLedger +from ....storage.base import BaseStorage +from ....messaging.request_context import RequestContext + +from .. import routes as test_module + + +SCHEMA_ID = "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0" +CRED_DEF_ID = "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag" + + +class TestCredentialDefinitionRoutes(AsyncTestCase): + def setUp(self): + self.context = InjectionContext(enforce_typing=False) + + self.ledger = async_mock.create_autospec(BaseLedger) + self.ledger.__aenter__ = async_mock.CoroutineMock(return_value=self.ledger) + self.ledger.create_and_send_credential_definition = async_mock.CoroutineMock( + return_value=(CRED_DEF_ID, {"cred": "def"}) + ) + self.ledger.get_credential_definition = async_mock.CoroutineMock( + return_value={"cred": "def"} + ) + self.context.injector.bind_instance(BaseLedger, self.ledger) + + self.issuer = async_mock.create_autospec(BaseIssuer) + self.context.injector.bind_instance(BaseIssuer, self.issuer) + + self.storage = async_mock.create_autospec(BaseStorage) + self.storage.search_records = async_mock.MagicMock( + return_value=async_mock.MagicMock( + fetch_all=async_mock.CoroutineMock( + return_value=[async_mock.MagicMock(value=CRED_DEF_ID)] + ) + ) + ) + self.context.injector.bind_instance(BaseStorage, self.storage) + + self.app = { + "request_context": self.context, + } + + async def test_send_credential_definition(self): + mock_request = async_mock.MagicMock( + app=self.app, + json=async_mock.CoroutineMock( + return_value={ + "schema_id": "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0", + "support_revocation": False, + "tag": "tag", + } + ), + ) + + with async_mock.patch.object(test_module.web, "json_response") as mock_response: + result = await test_module.credential_definitions_send_credential_definition( + mock_request + ) + assert result == mock_response.return_value + mock_response.assert_called_once_with( + {"credential_definition_id": CRED_DEF_ID} + ) + + async def test_created(self): + mock_request = async_mock.MagicMock( + app=self.app, + json=async_mock.CoroutineMock( + return_value={ + "schema_id": "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0", + "support_revocation": False, + "tag": "tag", + } + ), + match_info={"cred_def_id": CRED_DEF_ID}, + ) + + with async_mock.patch.object(test_module.web, "json_response") as mock_response: + result = await test_module.credential_definitions_created(mock_request) + assert result == mock_response.return_value + mock_response.assert_called_once_with( + {"credential_definition_ids": [CRED_DEF_ID]} + ) + + async def test_get_credential_definition(self): + mock_request = async_mock.MagicMock( + app=self.app, + json=async_mock.CoroutineMock( + return_value={ + "schema_id": "WgWxqztrNooG92RXvxSTWv:2:schema_name:1.0", + "support_revocation": False, + "tag": "tag", + } + ), + match_info={"cred_def_id": CRED_DEF_ID}, + ) + + with async_mock.patch.object(test_module.web, "json_response") as mock_response: + result = await test_module.credential_definitions_get_credential_definition( + mock_request + ) + assert result == mock_response.return_value + mock_response.assert_called_once_with( + {"credential_definition": {"cred": "def"}} + ) + + async def test_register(self): + mock_app = async_mock.MagicMock() + mock_app.add_routes = async_mock.MagicMock() + + await test_module.register(mock_app) + mock_app.add_routes.assert_called_once() diff --git a/aries_cloudagent/messaging/decorators/localization_decorator.py b/aries_cloudagent/messaging/decorators/localization_decorator.py index dcb922209b..f45b6ca17e 100644 --- a/aries_cloudagent/messaging/decorators/localization_decorator.py +++ b/aries_cloudagent/messaging/decorators/localization_decorator.py @@ -31,7 +31,7 @@ def __init__( catalogs: A list of URLs for localization resources """ - super(LocalizationDecorator, self).__init__() + super().__init__() self.locale = locale self.localizable = list(localizable) if localizable else [] self.catalogs = list(catalogs) if catalogs else [] diff --git a/aries_cloudagent/messaging/decorators/tests/test_base.py b/aries_cloudagent/messaging/decorators/tests/test_base.py new file mode 100644 index 0000000000..f1d1662edd --- /dev/null +++ b/aries_cloudagent/messaging/decorators/tests/test_base.py @@ -0,0 +1,72 @@ +import json +import pytest +import uuid + +from copy import deepcopy +from datetime import datetime, timezone +from time import time +from unittest import TestCase + +from marshmallow import fields + +from ....messaging.models.base import BaseModel, BaseModelSchema + +from ..base import BaseDecoratorSet, DECORATOR_PREFIX + + +class SampleDecorator(BaseModel): + """Sample model for base decorator tests.""" + + class Meta: + """Sample decorator metadata.""" + + schema_class = "SampleDecoratorSchema" + + def __init__(self, score: int, **kwargs): + """Initializer.""" + super().__init__(**kwargs) + self.score = score + + +class SampleDecoratorSchema(BaseModelSchema): + """Sample schema decorator for base decorator tests.""" + + class Meta: + model_class = SampleDecorator + + score = fields.Int(required=True) + + +class TestBaseDecoratorSet(TestCase): + def test_base_decorator_set(self): + MODELS = {"a": SampleDecorator} + deco_set = BaseDecoratorSet(MODELS) + assert type(deco_set) == BaseDecoratorSet + assert not deco_set.fields + assert deco_set.models == MODELS + assert deco_set.prefix == DECORATOR_PREFIX + assert BaseDecoratorSet.__name__ in str(deco_set) + + deco_set_copy = deco_set.copy() + assert type(deco_set_copy) == BaseDecoratorSet + assert not deco_set_copy.fields + assert deco_set_copy.models == MODELS + assert deco_set_copy.prefix == DECORATOR_PREFIX + + assert not deco_set.has_field("x") + deco_set.field("x") + assert not deco_set.has_field("x") # empty + assert not len(deco_set.field("x")) + deco_set.remove_field("x") + assert not deco_set.has_field("x") + + deco_set.add_model("c", SampleDecorator) + assert "c" in deco_set.models + deco_set.remove_model("c") + assert "c" not in deco_set.models + + with pytest.raises(ValueError): + deco_set["a"] = None + deco_set["a"] = {"score": 23} + deco_set["a"] = SampleDecorator(23) + deco_set.load_decorator("a", None) diff --git a/aries_cloudagent/messaging/decorators/tests/test_decorator_set.py b/aries_cloudagent/messaging/decorators/tests/test_decorator_set.py index 6c8cd99154..6acd7e8b4d 100644 --- a/aries_cloudagent/messaging/decorators/tests/test_decorator_set.py +++ b/aries_cloudagent/messaging/decorators/tests/test_decorator_set.py @@ -4,6 +4,7 @@ from ...models.base import BaseModel, BaseModelSchema from ..base import BaseDecoratorSet +from ..default import DecoratorSet, DEFAULT_MODELS class SimpleModel(BaseModel): @@ -25,6 +26,10 @@ class Meta: class TestDecoratorSet(TestCase): + def test_deco_set(self): + deco_set = DecoratorSet() + assert all(k in deco_set.models for k in DEFAULT_MODELS) + def test_extract(self): decor_value = {} diff --git a/aries_cloudagent/messaging/decorators/tests/test_localization_decorator.py b/aries_cloudagent/messaging/decorators/tests/test_localization_decorator.py new file mode 100644 index 0000000000..93e7e32d4b --- /dev/null +++ b/aries_cloudagent/messaging/decorators/tests/test_localization_decorator.py @@ -0,0 +1,39 @@ +from ..localization_decorator import LocalizationDecorator + +from unittest import TestCase + + +class TestThreadDecorator(TestCase): + + LOCALE = "en-ca" + LOCALIZABLE = ["a", "b"] + CATALOGS = ["http://192.168.56.111/my-project/catalog.json"] + + def test_init(self): + decorator = LocalizationDecorator() + assert decorator.locale is None + assert decorator.localizable == [] + assert decorator.catalogs == [] + + decorator = LocalizationDecorator( + locale=TestThreadDecorator.LOCALE, + localizable=TestThreadDecorator.LOCALIZABLE, + catalogs=TestThreadDecorator.CATALOGS, + ) + assert decorator.locale == TestThreadDecorator.LOCALE + assert decorator.localizable == TestThreadDecorator.LOCALIZABLE + assert decorator.catalogs == TestThreadDecorator.CATALOGS + + def test_serialize_load(self): + decorator = LocalizationDecorator( + locale=TestThreadDecorator.LOCALE, + localizable=TestThreadDecorator.LOCALIZABLE, + catalogs=TestThreadDecorator.CATALOGS, + ) + + dumped = decorator.serialize() + loaded = LocalizationDecorator.deserialize(dumped) + + assert loaded.locale == self.LOCALE + assert loaded.localizable == self.LOCALIZABLE + assert loaded.catalogs == self.CATALOGS diff --git a/aries_cloudagent/messaging/decorators/tests/test_thread_decorator.py b/aries_cloudagent/messaging/decorators/tests/test_thread_decorator.py index 57a6ee695a..898930233f 100644 --- a/aries_cloudagent/messaging/decorators/tests/test_thread_decorator.py +++ b/aries_cloudagent/messaging/decorators/tests/test_thread_decorator.py @@ -39,3 +39,6 @@ def test_serialize_load(self): assert loaded.pthid == self.parent_id assert loaded.sender_order == self.sender_order assert loaded.received_orders == self.received_orders + + loaded.pthid = "dummy" + assert loaded.pthid == "dummy" diff --git a/aries_cloudagent/messaging/decorators/tests/test_timing_decorator.py b/aries_cloudagent/messaging/decorators/tests/test_timing_decorator.py new file mode 100644 index 0000000000..8e3de33072 --- /dev/null +++ b/aries_cloudagent/messaging/decorators/tests/test_timing_decorator.py @@ -0,0 +1,23 @@ +from datetime import datetime +from unittest import TestCase + +from ...util import datetime_to_str +from ..timing_decorator import TimingDecorator, TimingDecoratorSchema + + +NOW = datetime.now() + + +class TestTimingDecorator(TestCase): + def test_serialize_load(self): + deco = TimingDecorator(in_time=NOW, out_time=NOW,) + + assert deco.in_time == datetime_to_str(NOW) + assert deco.out_time == datetime_to_str(NOW) + assert not deco.stale_time + assert not deco.expires_time + assert not deco.delay_milli + assert not deco.wait_until_time + + dumped = deco.serialize() + loaded = TimingDecorator.deserialize(dumped) diff --git a/aries_cloudagent/messaging/decorators/tests/test_transport_decorator.py b/aries_cloudagent/messaging/decorators/tests/test_transport_decorator.py new file mode 100644 index 0000000000..e2cca257cc --- /dev/null +++ b/aries_cloudagent/messaging/decorators/tests/test_transport_decorator.py @@ -0,0 +1,20 @@ +from unittest import TestCase + +from ...valid import UUIDFour +from ..transport_decorator import TransportDecorator, TransportDecoratorSchema + + +class TestTransportDecorator(TestCase): + def test_serialize_load(self): + deco = TransportDecorator( + return_route="all", + return_route_thread=UUIDFour.EXAMPLE, + queued_message_count=23, + ) + + assert deco.return_route == "all" + assert deco.return_route_thread == UUIDFour.EXAMPLE + assert deco.queued_message_count == 23 + + dumped = deco.serialize() + loaded = TransportDecorator.deserialize(dumped) diff --git a/aries_cloudagent/messaging/decorators/transport_decorator.py b/aries_cloudagent/messaging/decorators/transport_decorator.py index 0f9bb5c2de..22cc0c1f26 100644 --- a/aries_cloudagent/messaging/decorators/transport_decorator.py +++ b/aries_cloudagent/messaging/decorators/transport_decorator.py @@ -7,7 +7,7 @@ from marshmallow import fields, validate from ..models.base import BaseModel, BaseModelSchema -from ..valid import UUIDFour +from ..valid import UUIDFour, WHOLE_NUM class TransportDecorator(BaseModel): @@ -33,7 +33,7 @@ def __init__( return_route_thread: Identify the thread to enable return routing for queued_message_count: Indicate the number of queued messages """ - super(TransportDecorator, self).__init__() + super().__init__() self.return_route = return_route self.return_route_thread = return_route_thread self.queued_message_count = queued_message_count @@ -59,5 +59,5 @@ class Meta: example=UUIDFour.EXAMPLE, ) queued_message_count = fields.Int( - required=False, description="Number of queued messages", example=3, + required=False, description="Number of queued messages", **WHOLE_NUM ) diff --git a/aries_cloudagent/protocols/connections/v1_0/manager.py b/aries_cloudagent/protocols/connections/v1_0/manager.py index 7afb30e136..70f683363f 100644 --- a/aries_cloudagent/protocols/connections/v1_0/manager.py +++ b/aries_cloudagent/protocols/connections/v1_0/manager.py @@ -296,8 +296,11 @@ async def create_request( # Create connection request message if not my_endpoint: - my_endpoints = [self.context.settings.get("default_endpoint")] - my_endpoints.extend(self.context.settings.get("additional_endpoints")) + my_endpoints = [] + default_endpoint = self.context.settings.get("default_endpoint") + if default_endpoint: + my_endpoints.append(default_endpoint) + my_endpoints.extend(self.context.settings.get("additional_endpoints", [])) else: my_endpoints = [my_endpoint] did_doc = await self.create_did_document( @@ -478,8 +481,11 @@ async def create_response( # Create connection response message if not my_endpoint: - my_endpoints = [self.context.settings.get("default_endpoint")] - my_endpoints.extend(self.context.settings.get("additional_endpoints")) + my_endpoints = [] + default_endpoint = self.context.settings.get("default_endpoint") + if default_endpoint: + my_endpoints.append(default_endpoint) + my_endpoints.extend(self.context.settings.get("additional_endpoints", [])) did_doc = await self.create_did_document( my_info, connection.inbound_connection_id, my_endpoints ) diff --git a/aries_cloudagent/protocols/connections/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/connections/v1_0/tests/test_manager.py index 354d2eaaff..28f22cb4a8 100644 --- a/aries_cloudagent/protocols/connections/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/connections/v1_0/tests/test_manager.py @@ -77,7 +77,7 @@ def setUp(self): { "default_endpoint": "http://aries.ca/endpoint", "default_label": "This guy", - "additional_endpoints": [], + "additional_endpoints": ["http://aries.ca/another-endpoint"], "debug.auto_accept_invites": True, "debug.auto_accept_requests": True, } diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/manager.py b/aries_cloudagent/protocols/issue_credential/v1_0/manager.py index 81ae6f9a93..0a436f484d 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/manager.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/manager.py @@ -81,8 +81,7 @@ async def prepare_send( Args: connection_id: Connection to create offer for - credential_proposal: The credential proposal with preview on - attribute values to use if auto_issue is enabled + credential_proposal: The credential proposal with preview auto_remove: Flag to automatically remove the record on completion Returns: diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/routes.py b/aries_cloudagent/protocols/issue_credential/v1_0/routes.py index 0095e03f4a..496a6b08b4 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/routes.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/routes.py @@ -101,6 +101,11 @@ class V10CredentialProposalRequestSchemaBase(AdminAPIMessageTracingSchema): required=False, ) comment = fields.Str(description="Human-readable comment", required=False) + trace = fields.Bool( + description="Whether to trace event (default false)", + required=False, + example=False, + ) class V10CredentialProposalRequestOptSchema(V10CredentialProposalRequestSchemaBase): @@ -145,6 +150,11 @@ class V10CredentialOfferRequestSchema(AdminAPIMessageTracingSchema): ) comment = fields.Str(description="Human-readable comment", required=False) credential_preview = fields.Nested(CredentialPreviewSchema, required=True) + trace = fields.Bool( + description="Whether to trace event (default false)", + required=False, + example=False, + ) class V10CredentialIssueRequestSchema(Schema): @@ -427,7 +437,7 @@ async def credential_exchange_send_proposal(request: web.BaseRequest): @docs( tags=["issue-credential"], - summary="Send holder a credential offer, independent of any proposal with preview", + summary="Send holder a credential offer, independent of any proposal", ) @request_schema(V10CredentialOfferRequestSchema()) @response_schema(V10CredentialExchangeSchema(), 200) @@ -454,21 +464,20 @@ async def credential_exchange_send_free_offer(request: web.BaseRequest): connection_id = body.get("connection_id") cred_def_id = body.get("cred_def_id") + if not cred_def_id: + raise web.HTTPBadRequest(reason="cred_def_id is required") + auto_issue = body.get( "auto_issue", context.settings.get("debug.auto_respond_credential_request") ) + auto_remove = body.get("auto_remove") comment = body.get("comment") preview_spec = body.get("credential_preview") - trace_msg = body.get("trace") - - if not cred_def_id: - raise web.HTTPBadRequest(reason="cred_def_id is required") + if not preview_spec: + raise web.HTTPBadRequest(reason=("Missing credential_preview")) - if auto_issue and not preview_spec: - raise web.HTTPBadRequest( - reason=("If auto_issue is set then credential_preview must be provided") - ) + trace_msg = body.get("trace") try: connection_record = await ConnectionRecord.retrieve_by_id( @@ -480,19 +489,16 @@ async def credential_exchange_send_free_offer(request: web.BaseRequest): if not connection_record.is_ready: raise web.HTTPForbidden() - if preview_spec: - credential_preview = CredentialPreview.deserialize(preview_spec) - credential_proposal = CredentialProposal( - comment=comment, - credential_proposal=credential_preview, - cred_def_id=cred_def_id, - ) - credential_proposal.assign_trace_decorator( - context.settings, trace_msg, - ) - credential_proposal_dict = credential_proposal.serialize() - else: - credential_proposal_dict = None + credential_preview = CredentialPreview.deserialize(preview_spec) + credential_proposal = CredentialProposal( + comment=comment, + credential_proposal=credential_preview, + cred_def_id=cred_def_id, + ) + credential_proposal.assign_trace_decorator( + context.settings, trace_msg, + ) + credential_proposal_dict = credential_proposal.serialize() credential_exchange_record = V10CredentialExchange( connection_id=connection_id, @@ -671,7 +677,6 @@ async def credential_exchange_issue(request: web.BaseRequest): body = await request.json() comment = body.get("comment") preview_spec = body.get("credential_preview") - if not preview_spec: raise web.HTTPBadRequest(reason="credential_preview must be provided") diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_routes.py index 14ded13582..36849e051f 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_routes.py @@ -329,7 +329,13 @@ async def test_credential_exchange_send_proposal_not_ready(self): async def test_credential_exchange_send_free_offer(self): mock = async_mock.MagicMock() mock.json = async_mock.CoroutineMock( - return_value={"auto_issue": False, "cred_def_id": "cred-def-id",} + return_value={ + "auto_issue": False, + "cred_def_id": "cred-def-id", + "credential_preview": { + "attributes": [{"name": "hello", "value": "world"}] + }, + } ) mock.app = { @@ -384,14 +390,10 @@ async def test_credential_exchange_send_free_offer_no_cred_def_id(self): with self.assertRaises(test_module.web.HTTPBadRequest): await test_module.credential_exchange_send_free_offer(mock) - async def test_credential_exchange_send_free_offer_auto_issue_no_preview(self): + async def test_credential_exchange_send_free_offer_no_preview(self): mock = async_mock.MagicMock() mock.json = async_mock.CoroutineMock() - mock.json.return_value = { - "comment": "comment", - "cred_def_id": "dummy", - "auto_issue": True, - } + mock.json.return_value = {"comment": "comment", "cred_def_id": "dummy"} mock.app = { "outbound_message_router": async_mock.CoroutineMock(), @@ -407,7 +409,11 @@ async def test_credential_exchange_send_free_offer_auto_issue_no_preview(self): async def test_credential_exchange_send_free_offer_no_conn_record(self): mock = async_mock.MagicMock() mock.json = async_mock.CoroutineMock( - return_value={"auto_issue": False, "cred_def_id": "cred-def-id",} + return_value={ + "auto_issue": False, + "cred_def_id": "cred-def-id", + "credential_preview": "dummy", + } ) mock.app = { @@ -474,51 +480,6 @@ async def test_credential_exchange_send_free_offer_not_ready(self): with self.assertRaises(test_module.web.HTTPForbidden): await test_module.credential_exchange_send_free_offer(mock) - async def test_credential_exchange_send_free_offer_with_preview(self): - mock = async_mock.MagicMock() - mock.json = async_mock.CoroutineMock( - return_value={ - "auto_issue": False, - "cred_def_id": "cred-def-id", - "credential_preview": { - "attributes": [{"name": "hello", "value": "world"}] - }, - } - ) - - mock.app = { - "outbound_message_router": async_mock.CoroutineMock(), - "request_context": async_mock.patch.object( - aio_web, "BaseRequest", autospec=True - ), - } - mock.app["request_context"].settings = {} - - with async_mock.patch.object( - test_module, "ConnectionRecord", autospec=True - ) as mock_connection_record, async_mock.patch.object( - test_module, "CredentialManager", autospec=True - ) as mock_credential_manager, async_mock.patch.object( - test_module.web, "json_response" - ) as mock_response: - - mock_credential_manager.return_value.create_offer = ( - async_mock.CoroutineMock() - ) - - mock_cred_ex_record = async_mock.MagicMock() - - mock_credential_manager.return_value.create_offer.return_value = ( - mock_cred_ex_record, - async_mock.MagicMock(), - ) - - await test_module.credential_exchange_send_free_offer(mock) - - mock_response.assert_called_once_with( - mock_cred_ex_record.serialize.return_value - ) - async def test_credential_exchange_send_bound_offer(self): mock = async_mock.MagicMock() mock.json = async_mock.CoroutineMock() diff --git a/aries_cloudagent/protocols/present_proof/v1_0/routes.py b/aries_cloudagent/protocols/present_proof/v1_0/routes.py index 0bc1d78f63..6bb986c333 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/routes.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/routes.py @@ -76,6 +76,11 @@ class V10PresentationProposalRequestSchema(AdminAPIMessageTracingSchema): required=False, default=False, ) + trace = fields.Bool( + description="Whether to trace event (default false)", + required=False, + example=False, + ) class IndyProofReqSpecRestrictionsSchema(Schema): @@ -199,14 +204,26 @@ class IndyProofRequestSchema(Schema): non_revoked = fields.Nested(IndyProofReqNonRevokedSchema(), required=False) -class V10PresentationRequestRequestSchema(AdminAPIMessageTracingSchema): - """Request schema for sending a proof request.""" +class V10PresentationCreateRequestRequestSchema(AdminAPIMessageTracingSchema): + """Request schema for creating a proof request free of any connection.""" + + proof_request = fields.Nested(IndyProofRequestSchema(), required=True) + comment = fields.Str(required=False) + trace = fields.Bool( + description="Whether to trace event (default false)", + required=False, + example=False, + ) + + +class V10PresentationSendRequestRequestSchema( + V10PresentationCreateRequestRequestSchema +): + """Request schema for sending a proof request on a connection.""" connection_id = fields.UUID( description="Connection identifier", required=True, example=UUIDFour.EXAMPLE ) - proof_request = fields.Nested(IndyProofRequestSchema(), required=True) - comment = fields.Str(required=False) class IndyRequestedCredsRequestedAttrSchema(Schema): @@ -279,6 +296,11 @@ class V10PresentationRequestSchema(AdminAPIMessageTracingSchema): keys=fields.Str(example="pred_referent"), # marshmallow/apispec v3.0 ignores values=fields.Nested(IndyRequestedCredsRequestedPredSchema()), ) + trace = fields.Bool( + description="Whether to trace event (default false)", + required=False, + example=False, + ) class CredentialsFetchQueryStringSchema(Schema): @@ -496,7 +518,7 @@ async def presentation_exchange_send_proposal(request: web.BaseRequest): Creates a presentation request not bound to any proposal or existing connection """, ) -@request_schema(V10PresentationRequestRequestSchema()) +@request_schema(V10PresentationCreateRequestRequestSchema()) @response_schema(V10PresentationExchangeSchema(), 200) async def presentation_exchange_create_request(request: web.BaseRequest): """ @@ -562,7 +584,7 @@ async def presentation_exchange_create_request(request: web.BaseRequest): tags=["present-proof"], summary="Sends a free presentation request not bound to any proposal", ) -@request_schema(V10PresentationRequestRequestSchema()) +@request_schema(V10PresentationSendRequestRequestSchema()) @response_schema(V10PresentationExchangeSchema(), 200) async def presentation_exchange_send_free_request(request: web.BaseRequest): """ @@ -638,7 +660,7 @@ async def presentation_exchange_send_free_request(request: web.BaseRequest): summary="Sends a presentation request in reference to a proposal", ) @match_info_schema(PresExIdMatchInfoSchema()) -@request_schema(V10PresentationRequestRequestSchema()) +@request_schema(V10PresentationSendRequestRequestSchema()) @response_schema(V10PresentationExchangeSchema(), 200) async def presentation_exchange_send_bound_request(request: web.BaseRequest): """ diff --git a/aries_cloudagent/protocols/trustping/v1_0/handlers/tests/test_ping_handler.py b/aries_cloudagent/protocols/trustping/v1_0/handlers/tests/test_ping_handler.py index 4be06c446c..fa58b247b1 100644 --- a/aries_cloudagent/protocols/trustping/v1_0/handlers/tests/test_ping_handler.py +++ b/aries_cloudagent/protocols/trustping/v1_0/handlers/tests/test_ping_handler.py @@ -21,6 +21,7 @@ class TestPingHandler: async def test_ping(self, request_context): request_context.message_receipt = MessageReceipt() request_context.message = Ping(response_requested=False) + request_context.settings = {"debug.monitor_ping": True} request_context.connection_ready = True handler = PingHandler() responder = MockResponder() @@ -28,6 +29,17 @@ async def test_ping(self, request_context): messages = responder.messages assert len(messages) == 0 + @pytest.mark.asyncio + async def test_ping_not_ready(self, request_context): + request_context.message_receipt = MessageReceipt() + request_context.message = Ping(response_requested=False) + request_context.connection_ready = False + handler = PingHandler() + responder = MockResponder() + assert not await handler.handle(request_context, responder) + messages = responder.messages + assert len(messages) == 0 + @pytest.mark.asyncio async def test_ping_response(self, request_context): request_context.message_receipt = MessageReceipt() diff --git a/aries_cloudagent/protocols/trustping/v1_0/handlers/tests/test_ping_response_handler.py b/aries_cloudagent/protocols/trustping/v1_0/handlers/tests/test_ping_response_handler.py index 9bbcaa6fc8..b459ec0207 100644 --- a/aries_cloudagent/protocols/trustping/v1_0/handlers/tests/test_ping_response_handler.py +++ b/aries_cloudagent/protocols/trustping/v1_0/handlers/tests/test_ping_response_handler.py @@ -20,6 +20,7 @@ class TestPingResponseHandler: async def test_ping_response(self, request_context): request_context.message_receipt = MessageReceipt() request_context.message = PingResponse() + request_context.settings = {"debug.monitor_ping": True} request_context.connection_ready = True handler = PingResponseHandler() responder = MockResponder() diff --git a/aries_cloudagent/protocols/trustping/v1_0/messages/ping.py b/aries_cloudagent/protocols/trustping/v1_0/messages/ping.py index e6bb8a70f8..76507d95c6 100644 --- a/aries_cloudagent/protocols/trustping/v1_0/messages/ping.py +++ b/aries_cloudagent/protocols/trustping/v1_0/messages/ping.py @@ -31,7 +31,7 @@ def __init__( comment: An optional comment string """ - super(Ping, self).__init__(**kwargs) + super().__init__(**kwargs) self.comment = comment self.response_requested = response_requested diff --git a/aries_cloudagent/protocols/trustping/v1_0/messages/ping_response.py b/aries_cloudagent/protocols/trustping/v1_0/messages/ping_response.py index 9efdf8656c..e703b9ee1f 100644 --- a/aries_cloudagent/protocols/trustping/v1_0/messages/ping_response.py +++ b/aries_cloudagent/protocols/trustping/v1_0/messages/ping_response.py @@ -27,7 +27,7 @@ def __init__(self, *, comment: str = None, **kwargs): comment: An optional comment string to include in the message """ - super(PingResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.comment = comment diff --git a/aries_cloudagent/protocols/trustping/v1_0/tests/__init__.py b/aries_cloudagent/protocols/trustping/v1_0/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/aries_cloudagent/protocols/trustping/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/trustping/v1_0/tests/test_routes.py new file mode 100644 index 0000000000..8615e1d5f3 --- /dev/null +++ b/aries_cloudagent/protocols/trustping/v1_0/tests/test_routes.py @@ -0,0 +1,76 @@ +import json +import pytest + +from asynctest import TestCase as AsyncTestCase +from asynctest import mock as async_mock + +from .. import routes as test_module + + +class TestHolderRoutes(AsyncTestCase): + def setUp(self): + self.context = async_mock.MagicMock() + + self.app = { + "request_context": self.context, + "outbound_message_router": async_mock.CoroutineMock(), + } + + async def test_connections_send_ping(self): + request = async_mock.MagicMock( + app=self.app, + json=async_mock.CoroutineMock(return_value={"comment": "some comment"}), + match_info={"conn_id": "dummy"}, + ) + + with async_mock.patch.object( + test_module.ConnectionRecord, "retrieve_by_id", async_mock.CoroutineMock() + ) as mock_retrieve, async_mock.patch.object( + test_module, "Ping", async_mock.MagicMock() + ) as mock_ping, async_mock.patch.object( + test_module.web, "json_response", async_mock.MagicMock() + ) as json_response: + mock_ping.return_value = async_mock.MagicMock(_thread_id="dummy") + mock_retrieve.return_value = async_mock.MagicMock(is_ready=True) + result = await test_module.connections_send_ping(request) + json_response.assert_called_once_with({"thread_id": "dummy"}) + assert result is json_response.return_value + + async def test_connections_send_ping_no_conn(self): + request = async_mock.MagicMock( + app=self.app, + json=async_mock.CoroutineMock(return_value={"comment": "some comment"}), + match_info={"conn_id": "dummy"}, + ) + + with async_mock.patch.object( + test_module.ConnectionRecord, "retrieve_by_id", async_mock.CoroutineMock() + ) as mock_retrieve, async_mock.patch.object( + test_module.web, "json_response", async_mock.MagicMock() + ) as json_response: + mock_retrieve.side_effect = test_module.StorageNotFoundError() + with self.assertRaises(test_module.web.HTTPNotFound): + await test_module.connections_send_ping(request) + + async def test_connections_send_ping_not_ready(self): + request = async_mock.MagicMock( + app=self.app, + json=async_mock.CoroutineMock(return_value={"comment": "some comment"}), + match_info={"conn_id": "dummy"}, + ) + + with async_mock.patch.object( + test_module.ConnectionRecord, "retrieve_by_id", async_mock.CoroutineMock() + ) as mock_retrieve, async_mock.patch.object( + test_module.web, "json_response", async_mock.MagicMock() + ) as json_response: + mock_retrieve.return_value = async_mock.MagicMock(is_ready=False) + with self.assertRaises(test_module.web.HTTPBadRequest): + await test_module.connections_send_ping(request) + + async def test_register(self): + mock_app = async_mock.MagicMock() + mock_app.add_routes = async_mock.MagicMock() + + await test_module.register(mock_app) + mock_app.add_routes.assert_called_once() diff --git a/aries_cloudagent/storage/basic.py b/aries_cloudagent/storage/basic.py index 18b73bacd3..01990c0034 100644 --- a/aries_cloudagent/storage/basic.py +++ b/aries_cloudagent/storage/basic.py @@ -190,13 +190,13 @@ def basic_tag_value_match(value: str, match: dict) -> bool: if op == "$neq": chk = value != cmp_val elif op == "$gt": - chk = value > cmp_val + chk = float(value) > float(cmp_val) elif op == "$gte": - chk = value >= cmp_val + chk = float(value) >= float(cmp_val) elif op == "$lt": - chk = value < cmp_val + chk = float(value) < float(cmp_val) elif op == "$lte": - chk = value <= cmp_val + chk = float(value) <= float(cmp_val) # elif op == "$like": NYI else: raise StorageSearchError("Unsupported match operator: ".format(op)) diff --git a/aries_cloudagent/storage/provider.py b/aries_cloudagent/storage/provider.py index 65746421f5..6786617a4c 100644 --- a/aries_cloudagent/storage/provider.py +++ b/aries_cloudagent/storage/provider.py @@ -20,7 +20,6 @@ class StorageProvider(BaseProvider): async def provide(self, settings: BaseSettings, injector: BaseInjector): """Create and return the storage instance.""" - wallet: BaseWallet = await injector.inject(BaseWallet) wallet_type = settings.get_value("wallet.type", default="basic").lower() diff --git a/aries_cloudagent/storage/tests/test_basic_storage.py b/aries_cloudagent/storage/tests/test_basic_storage.py index f139047c40..f45ff46a56 100644 --- a/aries_cloudagent/storage/tests/test_basic_storage.py +++ b/aries_cloudagent/storage/tests/test_basic_storage.py @@ -1,5 +1,7 @@ import pytest +from asynctest import mock as async_mock + from aries_cloudagent.storage.error import ( StorageDuplicateError, StorageError, @@ -7,10 +9,14 @@ StorageSearchError, ) +from aries_cloudagent.storage.indy import IndyStorageRecordSearch +from aries_cloudagent.storage.basic import ( + BasicStorage, + basic_tag_value_match, + basic_tag_query_match, +) from aries_cloudagent.storage.record import StorageRecord -from aries_cloudagent.storage.basic import BasicStorage - @pytest.fixture() def store(): @@ -26,6 +32,9 @@ def test_missing_record(tags={}): class TestBasicStorage: + def test_repr(self, store): + assert store.__class__.__name__ in str(store) + @pytest.mark.asyncio async def test_add_required(self, store): with pytest.raises(StorageError): @@ -121,7 +130,12 @@ async def test_delete_tags_missing(self, store): async def test_search(self, store): record = test_record() await store.add_record(record) + + # search search = store.search_records(record.type, {}, None) + assert search.__class__.__name__ in str(search) + assert search.handle is None or isinstance(search, IndyStorageRecordSearch) + assert not search.options await search.open() rows = await search.fetch(100) assert len(rows) == 1 @@ -133,6 +147,43 @@ async def test_search(self, store): more = await search.fetch(100) assert len(more) == 0 + # search again with fetch-all + search = store.search_records(record.type, {}, None) + await search.open() + rows = await search.fetch_all() + assert len(rows) == 1 + + # search again with with iterator mystery error + search = store.search_records(record.type, {}, None) + with async_mock.patch.object( + search, "fetch", async_mock.CoroutineMock() + ) as mock_fetch: + mock_fetch.return_value = async_mock.MagicMock( + pop=async_mock.MagicMock(side_effect=IndexError()) + ) + with pytest.raises(StopAsyncIteration): + await search.__anext__() + + # search again with fetch-single + search = store.search_records(record.type, {}, None) + await search.open() + row = await search.fetch_single() + assert row + + # search again with fetch-single on no rows + search = store.search_records("NOT-MY-TYPE", {}, None) + await search.open() + with pytest.raises(StorageNotFoundError): + await search.fetch_single() + + # search again with fetch-single on multiple rows + record = test_record() + await store.add_record(record) + search = store.search_records(record.type, {}, None) + async with search as s: + with pytest.raises(StorageDuplicateError): + await s.fetch_single() + @pytest.mark.asyncio async def test_iter_search(self, store): record = test_record() @@ -152,3 +203,63 @@ async def test_closed_search(self, store): search = store.search_records("TYPE", {}, None) with pytest.raises(StorageSearchError): await search.fetch(100) + + @pytest.mark.asyncio + async def test_basic_tag_value_match(self, store): + TAGS = {"a": "aardvark", "b": "bear", "z": "0"} + record = test_record(TAGS) + await store.add_record(record) + + assert not basic_tag_value_match(None, {"$neq": "octopus"}) + assert not basic_tag_value_match(TAGS["a"], {"$in": ["cat", "dog"]}) + assert basic_tag_value_match(TAGS["a"], {"$neq": "octopus"}) + assert basic_tag_value_match(TAGS["z"], {"$gt": "-0.5"}) + assert basic_tag_value_match(TAGS["z"], {"$gte": "0"}) + assert basic_tag_value_match(TAGS["z"], {"$lt": "1"}) + assert basic_tag_value_match(TAGS["z"], {"$lte": "0"}) + + with pytest.raises(StorageSearchError) as excinfo: + basic_tag_value_match(TAGS["z"], {"$gt": "-1", "$lt": "1"}) + assert "Unsupported subquery" in str(excinfo.value) + + with pytest.raises(StorageSearchError) as excinfo: + basic_tag_value_match(TAGS["a"], {"$in": "aardvark"}) + assert "Expected list" in str(excinfo.value) + + with pytest.raises(StorageSearchError) as excinfo: + basic_tag_value_match(TAGS["z"], {"$gte": -1}) + assert "Expected string" in str(excinfo.value) + + with pytest.raises(StorageSearchError) as excinfo: + basic_tag_value_match(TAGS["z"], {"$near": "-1"}) + assert "Unsupported match operator" in str(excinfo.value) + + @pytest.mark.asyncio + async def test_basic_tag_query_match(self, store): + TAGS = {"a": "aardvark", "b": "bear", "z": "0"} + record = test_record(TAGS) + await store.add_record(record) + + assert basic_tag_query_match(None, None) + assert not basic_tag_query_match(None, {"a": "aardvark"}) + assert basic_tag_query_match( + TAGS, {"$or": [{"a": "aardvark"}, {"a": "alligator"}]} + ) + assert basic_tag_query_match(TAGS, {"$not": {"a": "alligator"}}) + assert basic_tag_query_match(TAGS, {"z": {"$gt": "-1"}}) + + with pytest.raises(StorageSearchError) as excinfo: + basic_tag_query_match(TAGS, {"$or": "-1"}) + assert "Expected list" in str(excinfo.value) + + with pytest.raises(StorageSearchError) as excinfo: + basic_tag_query_match(TAGS, {"$not": [{"z": "-1"}, {"z": "1"}]}) + assert "Expected dict for $not filter value" in str(excinfo.value) + + with pytest.raises(StorageSearchError) as excinfo: + basic_tag_query_match(TAGS, {"$near": {"z": "-1"}}) + assert "Unexpected filter operator" in str(excinfo.value) + + with pytest.raises(StorageSearchError) as excinfo: + basic_tag_query_match(TAGS, {"a": -1}) + assert "Expected string or dict for filter value" in str(excinfo.value) diff --git a/aries_cloudagent/storage/tests/test_indy_storage.py b/aries_cloudagent/storage/tests/test_indy_storage.py index f13680a028..4ea5dc4d17 100644 --- a/aries_cloudagent/storage/tests/test_indy_storage.py +++ b/aries_cloudagent/storage/tests/test_indy_storage.py @@ -1,10 +1,21 @@ +import json import pytest import os +import indy.anoncreds +import indy.crypto +import indy.did +import indy.wallet + +from asynctest import mock as async_mock + +from aries_cloudagent.wallet import indy as test_wallet from aries_cloudagent.wallet.indy import IndyWallet +from aries_cloudagent.storage.error import StorageError from aries_cloudagent.storage.indy import IndyStorage from aries_cloudagent.storage.record import StorageRecord +from .. import indy as test_module from . import test_basic_storage @@ -27,7 +38,174 @@ async def store(): @pytest.mark.indy class TestIndyStorage(test_basic_storage.TestBasicStorage): - """ """ + """Tests for indy storage.""" + + @pytest.mark.asyncio + async def test_record(self): + with async_mock.patch.object( + test_wallet, "load_postgres_plugin", async_mock.MagicMock() + ) as mock_load, async_mock.patch.object( + indy.wallet, "create_wallet", async_mock.CoroutineMock() + ) as mock_create, async_mock.patch.object( + indy.wallet, "open_wallet", async_mock.CoroutineMock() + ) as mock_open, async_mock.patch.object( + indy.anoncreds, "prover_create_master_secret", async_mock.CoroutineMock() + ) as mock_master, async_mock.patch.object( + indy.wallet, "close_wallet", async_mock.CoroutineMock() + ) as mock_close, async_mock.patch.object( + indy.wallet, "delete_wallet", async_mock.CoroutineMock() + ) as mock_delete: + fake_wallet = IndyWallet( + { + "auto_create": True, + "auto_remove": True, + "name": "test_pg_wallet", + "key": await IndyWallet.generate_wallet_key(), + "key_derivation_method": "RAW", + "storage_type": "postgres_storage", + "storage_config": json.dumps({"url": "dummy"}), + "storage_creds": json.dumps( + { + "account": "postgres", + "password": "mysecretpassword", + "admin_account": "postgres", + "admin_password": "mysecretpassword", + } + ), + } + ) + await fake_wallet.open() + storage = IndyStorage(fake_wallet) + + for record_x in [ + None, + StorageRecord( + type="connection", + value=json.dumps( + { + "initiator": "self", + "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", + "state": "invitation", + "routing_state": "none", + "error_msg": None, + "their_label": None, + "created_at": "2019-05-14 21:58:24.143260+00:00", + "updated_at": "2019-05-14 21:58:24.143260+00:00", + } + ), + tags={ + "initiator": "self", + "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", + "state": "invitation", + "routing_state": "none", + }, + id=None, + ), + StorageRecord( + type=None, + value=json.dumps( + { + "initiator": "self", + "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", + "state": "invitation", + "routing_state": "none", + "error_msg": None, + "their_label": None, + "created_at": "2019-05-14 21:58:24.143260+00:00", + "updated_at": "2019-05-14 21:58:24.143260+00:00", + } + ), + tags={ + "initiator": "self", + "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", + "state": "invitation", + "routing_state": "none", + }, + id="f96f76ec-0e9b-4f32-8237-f4219e6cf0c7", + ), + StorageRecord( + type="connection", + value=None, + tags={ + "initiator": "self", + "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", + "state": "invitation", + "routing_state": "none", + }, + id="f96f76ec-0e9b-4f32-8237-f4219e6cf0c7", + ), + ]: + with pytest.raises(StorageError): + await storage.add_record(record_x) + + with pytest.raises(StorageError): + await storage.get_record(None, "dummy-id") + with pytest.raises(StorageError): + await storage.get_record("connection", None) + + with async_mock.patch.object( + test_module.non_secrets, "get_wallet_record", async_mock.CoroutineMock() + ) as mock_get_record: + mock_get_record.side_effect = test_module.IndyError( + test_module.ErrorCode.CommonInvalidStructure + ) + with pytest.raises(test_module.StorageError): + await storage.get_record("connection", "dummy-id") + + with async_mock.patch.object( + test_module.non_secrets, + "update_wallet_record_value", + async_mock.CoroutineMock(), + ) as mock_update_value, async_mock.patch.object( + test_module.non_secrets, + "update_wallet_record_tags", + async_mock.CoroutineMock(), + ) as mock_update_tags, async_mock.patch.object( + test_module.non_secrets, + "delete_wallet_record", + async_mock.CoroutineMock(), + ) as mock_delete: + mock_update_value.side_effect = test_module.IndyError( + test_module.ErrorCode.CommonInvalidStructure + ) + mock_update_tags.side_effect = test_module.IndyError( + test_module.ErrorCode.CommonInvalidStructure + ) + mock_delete.side_effect = test_module.IndyError( + test_module.ErrorCode.CommonInvalidStructure + ) + + rec = StorageRecord( + type="connection", + value=json.dumps( + { + "initiator": "self", + "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", + "state": "invitation", + "routing_state": "none", + "error_msg": None, + "their_label": None, + "created_at": "2019-05-14 21:58:24.143260+00:00", + "updated_at": "2019-05-14 21:58:24.143260+00:00", + } + ), + tags={ + "initiator": "self", + "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", + "state": "invitation", + "routing_state": "none", + }, + id="f96f76ec-0e9b-4f32-8237-f4219e6cf0c7", + ) + + with pytest.raises(test_module.StorageError): + await storage.update_record_value(rec, "dummy-value") + + with pytest.raises(test_module.StorageError): + await storage.update_record_tags(rec, {"tag": "tag"}) + + with pytest.raises(test_module.StorageError): + await storage.delete_record(rec) # TODO get these to run in docker ci/cd @pytest.mark.asyncio @@ -60,14 +238,25 @@ async def test_postgres_wallet_storage_works(self): # add and then fetch a record record = StorageRecord( - value='{"initiator": "self", "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", "state": "invitation", "routing_state": "none", "error_msg": null, "their_label": null, "created_at": "2019-05-14 21:58:24.143260+00:00", "updated_at": "2019-05-14 21:58:24.143260+00:00"}', + type="connection", + value=json.dumps( + { + "initiator": "self", + "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", + "state": "invitation", + "routing_state": "none", + "error_msg": None, + "their_label": None, + "created_at": "2019-05-14 21:58:24.143260+00:00", + "updated_at": "2019-05-14 21:58:24.143260+00:00", + } + ), tags={ "initiator": "self", "invitation_key": "9XgL7Y4TBTJyVJdomT6axZGUFg9npxcrXnRT4CG8fWYg", "state": "invitation", "routing_state": "none", }, - type="connection", id="f96f76ec-0e9b-4f32-8237-f4219e6cf0c7", ) await storage.add_record(record) diff --git a/aries_cloudagent/verifier/indy.py b/aries_cloudagent/verifier/indy.py index 0f8ff1ece2..7018d9be57 100644 --- a/aries_cloudagent/verifier/indy.py +++ b/aries_cloudagent/verifier/indy.py @@ -8,6 +8,8 @@ from indy.error import IndyError from ..messaging.util import canon, encode +from ..ledger.base import BaseLedger + from .base import BaseVerifier LOGGER = logging.getLogger(__name__) @@ -24,18 +26,17 @@ class PreVerifyResult(Enum): class IndyVerifier(BaseVerifier): """Indy verifier class.""" - def __init__(self, wallet): + def __init__(self, ledger: BaseLedger): """ Initialize an IndyVerifier instance. Args: - wallet: IndyWallet instance + ledger: ledger instance """ - self.wallet = wallet + self.ledger = ledger - @staticmethod - def pre_verify(pres_req: dict, pres: dict) -> (PreVerifyResult, str): + async def pre_verify(self, pres_req: dict, pres: dict) -> (PreVerifyResult, str): """ Check for essential components and tampering in presentation. @@ -47,9 +48,16 @@ def pre_verify(pres_req: dict, pres: dict) -> (PreVerifyResult, str): pres: corresponding presentation Returns: - An instance of `PreVerifyResult` representing the validation result + A tuple with `PreVerifyResult` representing the validation result and + reason text for failure or None for OK. """ + if not ( + pres_req + and "requested_predicates" in pres_req + and "requested_attributes" in pres_req + ): + return (PreVerifyResult.INCOMPLETE, "Incomplete or missing proof request") if not pres: return (PreVerifyResult.INCOMPLETE, "No proof provided") if "requested_proof" not in pres: @@ -57,9 +65,22 @@ def pre_verify(pres_req: dict, pres: dict) -> (PreVerifyResult, str): if "proof" not in pres: return (PreVerifyResult.INCOMPLETE, "Missing 'proof'") + for (index, ident) in enumerate(pres["identifiers"]): + if not ident.get("timestamp"): + cred_def_id = ident["cred_def_id"] + cred_def = await self.ledger.get_credential_definition(cred_def_id) + if cred_def["value"].get("revocation"): + return ( + PreVerifyResult.INCOMPLETE, + ( + f"Missing timestamp in presentation identifier #{index} " + f"for cred def id {cred_def_id}" + ), + ) + for (uuid, req_pred) in pres_req["requested_predicates"].items(): - canon_attr = canon(req_pred["name"]) try: + canon_attr = canon(req_pred["name"]) for ge_proof in pres["proof"]["proofs"][ pres["requested_proof"]["predicates"][uuid]["sub_proof_index"] ]["primary_proof"]["ge_proofs"]: @@ -171,7 +192,7 @@ async def verify_presentation( rev_reg_entries: revocation registry entries """ - (pv_result, pv_msg) = self.pre_verify(presentation_request, presentation) + (pv_result, pv_msg) = await self.pre_verify(presentation_request, presentation) if pv_result != PreVerifyResult.OK: LOGGER.error( f"Presentation on nonce={presentation_request['nonce']} " diff --git a/aries_cloudagent/verifier/tests/test_indy.py b/aries_cloudagent/verifier/tests/test_indy.py index 6f4c471d33..ae9887b83f 100644 --- a/aries_cloudagent/verifier/tests/test_indy.py +++ b/aries_cloudagent/verifier/tests/test_indy.py @@ -6,6 +6,8 @@ from asynctest import TestCase as AsyncTestCase from asynctest import mock as async_mock +from indy.error import IndyError + from ..indy import IndyVerifier, PreVerifyResult @@ -20,65 +22,51 @@ } }, "requested_predicates": {}, + "non_revoked": {"from": 1579892963, "to": 1579892963}, } -INDY_PROOF_NAME = json.loads( - """{ - "proof": { - "proofs": [ - { - "primary_proof": { - "eq_proof": { - "revealed_attrs": { - "preferredname": "94607763023542937648705576709896212619553924110058781320304650334433495169960" - }, - "a_prime": "...", - "e": "...", - "v": "...", - "m": { - "master_secret": "...", - "musthave": "..." - }, - "m2": "..." +INDY_PROOF_NAME = { + "proof": { + "proofs": [ + { + "primary_proof": { + "eq_proof": { + "revealed_attrs": { + "preferredname": "94607763023542937648705576709896212619553924110058781320304650334433495169960" }, - "ge_proofs": [] + "a_prime": "...", + "e": "...", + "v": "...", + "m": {"master_secret": "...", "musthave": "..."}, + "m2": "...", }, - "non_revoc_proof": null - } - ], - "aggregated_proof": { - "c_hash": "...", - "c_list": [ - [ - 1, - 152, - 172, - 159 - ] - ] + "ge_proofs": [], + }, + "non_revoc_proof": None, } - }, - "requested_proof": { - "revealed_attrs": { - "19_uuid": { - "sub_proof_index": 0, - "raw": "Chicken Hawk", - "encoded": "94607763023542937648705576709896212619553924110058781320304650334433495169960" - } - }, - "self_attested_attrs": {}, - "unrevealed_attrs": {}, - "predicates": {} - }, - "identifiers": [ - { - "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:non-revo:1579888926.0", - "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag", - "rev_reg_id": null, - "timestamp": null + ], + "aggregated_proof": {"c_hash": "...", "c_list": [[1, 152, 172, 159]]}, + }, + "requested_proof": { + "revealed_attrs": { + "19_uuid": { + "sub_proof_index": 0, + "raw": "Chicken Hawk", + "encoded": "94607763023542937648705576709896212619553924110058781320304650334433495169960", } - ] - }""" -) + }, + "self_attested_attrs": {}, + "unrevealed_attrs": {}, + "predicates": {}, + }, + "identifiers": [ + { + "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:non-revo:1579888926.0", + "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag", + "rev_reg_id": "LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag:CL_ACCUM:0", + "timestamp": 1579892963, + } + ], +} INDY_PROOF_REQ_PRED_NAMES = { "nonce": "12301197819298309547817", @@ -295,20 +283,41 @@ @pytest.mark.indy class TestIndyVerifier(AsyncTestCase): - def test_init(self): - verifier = IndyVerifier("wallet") - assert verifier.wallet == "wallet" - assert repr(verifier) == "" + def setUp(self): + mock_ledger = async_mock.MagicMock( + get_credential_definition=async_mock.CoroutineMock( + return_value={ + "...": "...", + "value": { + "revocation": { + "g": "1 ...", + "g_dash": "1 ...", + "h": "1 ...", + "h0": "1 ...", + "h1": "1 ...", + "h2": "1 ...", + "htilde": "1 ...", + "h_cap": "1 ...", + "u": "1 ...", + "pk": "1 ...", + "y": "1 ...", + } + }, + } + ) + ) + self.verifier = IndyVerifier(mock_ledger) + assert repr(self.verifier) == "" @async_mock.patch("indy.anoncreds.verifier_verify_proof") async def test_verify_presentation(self, mock_verify): mock_verify.return_value = "val" - verifier = IndyVerifier("wallet") with async_mock.patch.object( - verifier, "pre_verify", return_value=(PreVerifyResult.OK, None) - ): - verified = await verifier.verify_presentation( + self.verifier, "pre_verify", async_mock.CoroutineMock() + ) as mock_pre_verify: + mock_pre_verify.return_value = (PreVerifyResult.OK, None) + verified = await self.verifier.verify_presentation( "presentation_request", "presentation", "schemas", @@ -328,11 +337,258 @@ async def test_verify_presentation(self, mock_verify): assert verified == "val" + @async_mock.patch("indy.anoncreds.verifier_verify_proof") + async def test_verify_presentation_x_indy(self, mock_verify): + mock_verify.side_effect = IndyError(error_code=1) + + with async_mock.patch.object( + self.verifier, "pre_verify", async_mock.CoroutineMock() + ) as mock_pre_verify: + mock_pre_verify.return_value = (PreVerifyResult.OK, None) + verified = await self.verifier.verify_presentation( + {"nonce": "1234567890"}, + "presentation", + "schemas", + "credential_definitions", + "rev_reg_defs", + "rev_reg_entries", + ) + + mock_verify.assert_called_once_with( + json.dumps({"nonce": "1234567890"}), + json.dumps("presentation"), + json.dumps("schemas"), + json.dumps("credential_definitions"), + json.dumps("rev_reg_defs"), + json.dumps("rev_reg_entries"), + ) + + assert not verified + + async def test_pre_verify(self): + assert ( + PreVerifyResult.INCOMPLETE + == ( + await self.verifier.pre_verify( + None, {"requested_proof": "...", "proof": "..."} + ) + )[0] + ) + assert ( + PreVerifyResult.INCOMPLETE + == ( + await self.verifier.pre_verify( + {"requested_predicates": "...", "requested_attributes": "..."}, + None, + ) + )[0] + ) + assert ( + PreVerifyResult.INCOMPLETE + == ( + await self.verifier.pre_verify( + {"requested_predicates": "...", "requested_attributes": "..."}, + {"requested_proof": "..."}, + ) + )[0] + ) + assert ( + PreVerifyResult.INCOMPLETE + == ( + await self.verifier.pre_verify( + {"requested_predicates": "...", "requested_attributes": "..."}, + {"proof": "..."}, + ) + )[0] + ) + assert ( + PreVerifyResult.INCOMPLETE + == ( + await self.verifier.pre_verify( + { + "requested_predicates": {"0_name_uuid": "..."}, + "requested_attributes": "...", + }, + INDY_PROOF_PRED_NAMES, + ) + )[0] + ) + assert ( + PreVerifyResult.INCOMPLETE + == ( + await self.verifier.pre_verify( + INDY_PROOF_REQ_NAME, + { + "proof": "...", + "requested_proof": { + "revealed_attrs": {}, + "self_attested_attrs": {"19_uuid": "Chicken Hawk"}, + "unrevealed_attrs": {}, + "predicates": {}, + }, + "identifiers": [ + { + "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:non-revo:1579888926.0", + "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag", + "rev_reg_id": "LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag:CL_ACCUM:0", + "timestamp": 1579892963, + } + ], + }, + ) + )[0] + ) + assert ( + PreVerifyResult.INCOMPLETE + == ( + await self.verifier.pre_verify( + { + "nonce": "15606741555044336341559", + "name": "proof_req", + "version": "0.0", + "requested_attributes": {"19_uuid": {"name": "Preferred Name"}}, + "requested_predicates": {}, + }, + { + "proof": "...", + "requested_proof": { + "revealed_attrs": {}, + "self_attested_attrs": {}, + "unrevealed_attrs": {}, + "predicates": {}, + }, + "identifiers": [ + { + "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:non-revo:1579888926.0", + "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag", + "rev_reg_id": "LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag:CL_ACCUM:0", + "timestamp": 1579892963, + } + ], + }, + ) + )[0] + ) + assert ( + PreVerifyResult.INCOMPLETE + == ( + await self.verifier.pre_verify( + { + "nonce": "15606741555044336341559", + "name": "proof_req", + "version": "0.0", + "requested_attributes": { + "19_uuid": {"neither-name-nor-names": "Preferred Name"} + }, + "requested_predicates": {}, + }, + { + "proof": "...", + "requested_proof": { + "revealed_attrs": { + "19_uuid": { + "sub_proof_index": 0, + "raw": "Chicken Hawk", + "encoded": "94607763023542937648705576709896212619553924110058781320304650334433495169960", + } + }, + "self_attested_attrs": {}, + "unrevealed_attrs": {}, + "predicates": {}, + }, + "identifiers": [ + { + "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:non-revo:1579888926.0", + "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag", + "rev_reg_id": "LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag:CL_ACCUM:0", + "timestamp": 1579892963, + } + ], + }, + ) + )[0] + ) + assert ( + PreVerifyResult.INCOMPLETE + == ( + await self.verifier.pre_verify( + INDY_PROOF_REQ_NAME, + { + "proof": { + "proofs": [ + { + "primary_proof": { + "eq_proof": { + "revealed_attrs": {"otherthing": "..."}, + "...": "...", + }, + "ge_proofs": [], + }, + "...": "...", + } + ], + "...": "...", + }, + "requested_proof": { + "revealed_attrs": { + "19_uuid": { + "sub_proof_index": 0, + "raw": "Chicken Hawk", + "encoded": "94607763023542937648705576709896212619553924110058781320304650334433495169960", + } + }, + "self_attested_attrs": {}, + "unrevealed_attrs": {}, + "predicates": {}, + }, + "identifiers": [ + { + "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:non-revo:1579888926.0", + "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag", + "rev_reg_id": "LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag:CL_ACCUM:0", + "timestamp": 1579892963, + } + ], + }, + ) + )[0] + ) + assert ( + PreVerifyResult.OK + == ( + await self.verifier.pre_verify( + { + "nonce": "15606741555044336341559", + "name": "proof_req", + "version": "0.0", + "requested_attributes": {"19_uuid": {"name": "Preferred Name"}}, + "requested_predicates": {}, + }, + { + "proof": "...", + "requested_proof": { + "revealed_attrs": {}, + "self_attested_attrs": {"19_uuid": "Chicken Hawk"}, + "unrevealed_attrs": {}, + "predicates": {}, + }, + "identifiers": [ + { + "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:non-revo:1579888926.0", + "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:19:tag", + "rev_reg_id": "LjgpST2rjsoxYegQDRm7EL:4:LjgpST2rjsoxYegQDRm7EL:3:CL:18:tag:CL_ACCUM:0", + "timestamp": 1579892963, + } + ], + }, + ) + )[0] + ) + @async_mock.patch("indy.anoncreds.verifier_verify_proof") async def test_check_encoding_attr(self, mock_verify): mock_verify.return_value = True - verifier = IndyVerifier("wallet") - verified = await verifier.verify_presentation( + verified = await self.verifier.verify_presentation( INDY_PROOF_REQ_NAME, INDY_PROOF_NAME, "schemas", @@ -354,15 +610,12 @@ async def test_check_encoding_attr(self, mock_verify): @async_mock.patch("indy.anoncreds.verifier_verify_proof") async def test_check_encoding_attr_tamper_raw(self, mock_verify): - mock_verify.return_value = True - verifier = IndyVerifier("wallet") - INDY_PROOF_X = deepcopy(INDY_PROOF_NAME) INDY_PROOF_X["requested_proof"]["revealed_attrs"]["19_uuid"][ "raw" ] = "Mock chicken" - verified = await verifier.verify_presentation( + verified = await self.verifier.verify_presentation( INDY_PROOF_REQ_NAME, INDY_PROOF_X, "schemas", @@ -377,15 +630,12 @@ async def test_check_encoding_attr_tamper_raw(self, mock_verify): @async_mock.patch("indy.anoncreds.verifier_verify_proof") async def test_check_encoding_attr_tamper_encoded(self, mock_verify): - mock_verify.return_value = True - verifier = IndyVerifier("wallet") - INDY_PROOF_X = deepcopy(INDY_PROOF_NAME) INDY_PROOF_X["requested_proof"]["revealed_attrs"]["19_uuid"][ "encoded" ] = "1234567890" - verified = await verifier.verify_presentation( + verified = await self.verifier.verify_presentation( INDY_PROOF_REQ_NAME, INDY_PROOF_X, "schemas", @@ -401,8 +651,7 @@ async def test_check_encoding_attr_tamper_encoded(self, mock_verify): @async_mock.patch("indy.anoncreds.verifier_verify_proof") async def test_check_pred_names(self, mock_verify): mock_verify.return_value = True - verifier = IndyVerifier("wallet") - verified = await verifier.verify_presentation( + verified = await self.verifier.verify_presentation( INDY_PROOF_REQ_PRED_NAMES, INDY_PROOF_PRED_NAMES, "schemas", @@ -424,15 +673,12 @@ async def test_check_pred_names(self, mock_verify): @async_mock.patch("indy.anoncreds.verifier_verify_proof") async def test_check_pred_names_tamper_pred_value(self, mock_verify): - mock_verify.return_value = True - verifier = IndyVerifier("wallet") - INDY_PROOF_X = deepcopy(INDY_PROOF_PRED_NAMES) INDY_PROOF_X["proof"]["proofs"][0]["primary_proof"]["ge_proofs"][0][ "predicate" ]["value"] = 0 - verified = await verifier.verify_presentation( + verified = await self.verifier.verify_presentation( INDY_PROOF_REQ_PRED_NAMES, INDY_PROOF_X, "schemas", @@ -446,14 +692,35 @@ async def test_check_pred_names_tamper_pred_value(self, mock_verify): assert verified == False @async_mock.patch("indy.anoncreds.verifier_verify_proof") - async def test_check_pred_names_tamper_pred_req_attr(self, mock_verify): - mock_verify.return_value = True - verifier = IndyVerifier("wallet") + async def test_check_pred_names_bypass_timestamp(self, mock_verify): + INDY_PROOF_REQ_X = deepcopy(INDY_PROOF_REQ_PRED_NAMES) + INDY_PROOF_REQ_X["requested_attributes"]["18_uuid"].pop("non_revoked") + INDY_PROOF_REQ_X["requested_predicates"]["18_id_GE_uuid"].pop("non_revoked") + INDY_PROOF_REQ_X["requested_predicates"]["18_busid_GE_uuid"].pop("non_revoked") + + INDY_PROOF_X = deepcopy(INDY_PROOF_PRED_NAMES) + INDY_PROOF_X["identifiers"][0]["timestamp"] = None + INDY_PROOF_X["identifiers"][0]["rev_reg_id"] = None + + verified = await self.verifier.verify_presentation( + INDY_PROOF_REQ_X, + INDY_PROOF_X, + "schemas", + "credential_definitions", + "rev_reg_defs", + "rev_reg_entries", + ) + + mock_verify.assert_not_called() + + assert verified == False + @async_mock.patch("indy.anoncreds.verifier_verify_proof") + async def test_check_pred_names_tamper_pred_req_attr(self, mock_verify): INDY_PROOF_REQ_X = deepcopy(INDY_PROOF_REQ_PRED_NAMES) INDY_PROOF_REQ_X["requested_predicates"]["18_busid_GE_uuid"]["name"] = "dummy" - verified = await verifier.verify_presentation( + verified = await self.verifier.verify_presentation( INDY_PROOF_REQ_X, INDY_PROOF_PRED_NAMES, "schemas", @@ -468,15 +735,12 @@ async def test_check_pred_names_tamper_pred_req_attr(self, mock_verify): @async_mock.patch("indy.anoncreds.verifier_verify_proof") async def test_check_pred_names_tamper_attr_groups(self, mock_verify): - mock_verify.return_value = True - verifier = IndyVerifier("wallet") - INDY_PROOF_X = deepcopy(INDY_PROOF_PRED_NAMES) INDY_PROOF_X["requested_proof"]["revealed_attr_groups"][ "x_uuid" ] = INDY_PROOF_X["requested_proof"]["revealed_attr_groups"].pop("18_uuid") - verified = await verifier.verify_presentation( + verified = await self.verifier.verify_presentation( INDY_PROOF_REQ_PRED_NAMES, INDY_PROOF_X, "schemas", diff --git a/aries_cloudagent/wallet/tests/test_routes.py b/aries_cloudagent/wallet/tests/test_routes.py index d19efebcef..f7244c2c22 100644 --- a/aries_cloudagent/wallet/tests/test_routes.py +++ b/aries_cloudagent/wallet/tests/test_routes.py @@ -257,7 +257,7 @@ async def test_set_public_did_update_endpoint(self): Ledger = async_mock.MagicMock() self.ledger = Ledger() self.ledger.update_endpoint_for_did = async_mock.CoroutineMock() - self.ledger._aenter__ = async_mock.CoroutineMock(return_value=self.ledger) + self.ledger.__aenter__ = async_mock.CoroutineMock(return_value=self.ledger) self.context.injector.bind_instance(BaseLedger, self.ledger) with async_mock.patch.object( diff --git a/demo/AcmeDemoWorkshop.md b/demo/AcmeDemoWorkshop.md index 0d24f82acb..73c72a6e8b 100644 --- a/demo/AcmeDemoWorkshop.md +++ b/demo/AcmeDemoWorkshop.md @@ -123,6 +123,13 @@ Now we need to handle receipt of the proof. Locate the code that handles receiv then replace the ```# TODO``` comment and the ```pass``` statement: ``` + log_status("#27 Process the proof provided by X") + log_status("#28 Check if proof is valid") + proof = await self.admin_POST( + f"/present-proof/records/{presentation_exchange_id}/verify-presentation" + ) + self.log("Proof = ", proof["verified"]) + # if presentation is a degree schema (proof of education), # check values received pres_req = message["presentation_request"] @@ -147,7 +154,7 @@ then replace the ```# TODO``` comment and the ```pass``` statement: self.log("#28.1 Received ", message["presentation_request"]["name"]) ``` -Right now this just prints out information received in the proof, but in "real life" your application could do something useful with this information. +Right now this just verifies the proof received and prints out the attributes it reveals, but in "real life" your application could do something useful with this information. Now you can run the Faber/Alice/Acme script from the "Preview of the Acme Controller" section above, and you should see Acme receive a proof from Alice! @@ -260,9 +267,6 @@ with the following code: { "comment": f"Issuing credential, exchange {credential_exchange_id}", "credential_preview": cred_preview - # "credential_preview": CredentialPreview( - # attributes=CredAttrSpec.list_plain(cred_attrs) - # ).serialize() } ) ``` diff --git a/demo/AriesOpenAPIDemo.md b/demo/AriesOpenAPIDemo.md index 76672bbd49..3baa7fab23 100644 --- a/demo/AriesOpenAPIDemo.md +++ b/demo/AriesOpenAPIDemo.md @@ -491,7 +491,7 @@ Finally, we need put into the JSON the data values for the credential proposal s }, { "name": "timestamp", - "value": "123456789" + "value": "1234567890" }, { "name": "date", diff --git a/demo/runners/faber.py b/demo/runners/faber.py index 3045ee442f..0bdc2f924d 100644 --- a/demo/runners/faber.py +++ b/demo/runners/faber.py @@ -127,8 +127,7 @@ async def handle_present_proof(self, message): log_status("#27 Process the proof provided by X") log_status("#28 Check if proof is valid") proof = await self.admin_POST( - f"/present-proof/records/{presentation_exchange_id}/" - "verify-presentation" + f"/present-proof/records/{presentation_exchange_id}/verify-presentation" ) self.log("Proof =", proof["verified"]) @@ -277,7 +276,6 @@ async def main( "trace": exchange_tracing, } await agent.admin_POST("/issue-credential/send-offer", offer_request) - # TODO issue an additional credential for Student ID elif option == "2":