diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 8b36d741b2..2f4a7c17c6 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -44,6 +44,7 @@ "remoteUser": "vscode", "remoteEnv": { + "RUST_LOG":"aries-askar::log::target=error" //"PATH": "${containerEnv:PATH}:${workspaceRoot}/.venv/bin" }, diff --git a/aries_cloudagent/connections/base_manager.py b/aries_cloudagent/connections/base_manager.py index 74a6384356..f1d2ce9fa8 100644 --- a/aries_cloudagent/connections/base_manager.py +++ b/aries_cloudagent/connections/base_manager.py @@ -18,7 +18,6 @@ Ed25519VerificationKey2020, JsonWebKey2020, ) - from ..cache.base import BaseCache from ..config.base import InjectionError from ..config.logging import get_logger_inst @@ -61,7 +60,7 @@ class BaseConnectionManagerError(BaseError): class BaseConnectionManager: """Class to provide utilities regarding connection_targets.""" - RECORD_TYPE_DID_DOC = "did_doc" + RECORD_TYPE_DID_DOC = "did_doc" # legacy RECORD_TYPE_DID_KEY = "did_key" def __init__(self, profile: Profile): @@ -123,6 +122,7 @@ async def create_did_document( f"Router connection not completed: {router_id}" ) routing_doc, _ = await self.fetch_did_document(router.their_did) + assert isinstance(routing_doc, DIDDoc) if not routing_doc.service: raise BaseConnectionManagerError( f"No services defined by routing DIDDoc: {router_id}" @@ -671,6 +671,7 @@ async def fetch_did_document(self, did: str) -> Tuple[DIDDoc, StorageRecord]: Args: did: The DID to search for """ + # legacy documents for unqualified dids async with self._profile.session() as session: storage = session.inject(BaseStorage) record = await storage.find_record(self.RECORD_TYPE_DID_DOC, {"did": did}) diff --git a/aries_cloudagent/resolver/__init__.py b/aries_cloudagent/resolver/__init__.py index 793bc383f8..a72b017dbe 100644 --- a/aries_cloudagent/resolver/__init__.py +++ b/aries_cloudagent/resolver/__init__.py @@ -50,3 +50,15 @@ async def setup(context: InjectionContext): ).provide(context.settings, context.injector) await universal_resolver.setup(context) registry.register_resolver(universal_resolver) + + peer_did_2_resolver = ClassProvider( + "aries_cloudagent.resolver.default.peer2.PeerDID2Resolver" + ).provide(context.settings, context.injector) + await peer_did_2_resolver.setup(context) + registry.register_resolver(peer_did_2_resolver) + + peer_did_3_resolver = ClassProvider( + "aries_cloudagent.resolver.default.peer3.PeerDID3Resolver" + ).provide(context.settings, context.injector) + await peer_did_3_resolver.setup(context) + registry.register_resolver(peer_did_3_resolver) diff --git a/aries_cloudagent/resolver/default/peer2.py b/aries_cloudagent/resolver/default/peer2.py new file mode 100644 index 0000000000..62520fa306 --- /dev/null +++ b/aries_cloudagent/resolver/default/peer2.py @@ -0,0 +1,85 @@ +"""Peer DID Resolver. + +Resolution is performed using the peer-did-python library https://github.com/sicpa-dlab/peer-did-python. +""" + +from typing import Optional, Pattern, Sequence, Text, Union + +from peerdid.dids import ( + is_peer_did, + PEER_DID_PATTERN, + resolve_peer_did, + DID, + DIDDocument, +) + +from ...config.injection_context import InjectionContext +from ...core.profile import Profile +from ..base import BaseDIDResolver, DIDNotFound, ResolverType +from .peer3 import PeerDID3Resolver + + +class PeerDID2Resolver(BaseDIDResolver): + """Peer DID Resolver.""" + + def __init__(self): + """Initialize Key Resolver.""" + super().__init__(ResolverType.NATIVE) + + async def setup(self, context: InjectionContext): + """Perform required setup for Key DID resolution.""" + + @property + def supported_did_regex(self) -> Pattern: + """Return supported_did_regex of Key DID Resolver.""" + return PEER_DID_PATTERN + + async def _resolve( + self, + profile: Profile, + did: str, + service_accept: Optional[Sequence[Text]] = None, + ) -> dict: + """Resolve a Key DID.""" + try: + peer_did = is_peer_did(did) + except Exception as e: + raise DIDNotFound(f"peer_did is not formatted correctly: {did}") from e + if peer_did: + did_doc = self.resolve_peer_did_with_service_key_reference(did) + await PeerDID3Resolver().create_and_store_document(profile, did_doc) + else: + raise DIDNotFound(f"did is not a peer did: {did}") + + return did_doc.dict() + + def resolve_peer_did_with_service_key_reference( + self, peer_did_2: Union[str, DID] + ) -> DIDDocument: + """Generate a DIDDocument from the did:peer:2 based on peer-did-python library. + + And additional modification to ensure recipient key + references verificationmethod in same document. + """ + return _resolve_peer_did_with_service_key_reference(peer_did_2) + + +def _resolve_peer_did_with_service_key_reference( + peer_did_2: Union[str, DID] +) -> DIDDocument: + try: + doc = resolve_peer_did(peer_did_2) + ## WORKAROUND LIBRARY NOT REREFERENCING RECEIPIENT_KEY + services = doc.service + signing_keys = [ + vm + for vm in doc.verification_method or [] + if vm.type == "Ed25519VerificationKey2020" + ] + if services and signing_keys: + services[0].__dict__["recipient_keys"] = [signing_keys[0].id] + else: + raise Exception("no recipient_key signing_key pair") + except Exception as e: + raise ValueError("pydantic validation error:" + str(e)) + return doc diff --git a/aries_cloudagent/resolver/default/peer3.py b/aries_cloudagent/resolver/default/peer3.py new file mode 100644 index 0000000000..bfe79ce15c --- /dev/null +++ b/aries_cloudagent/resolver/default/peer3.py @@ -0,0 +1,134 @@ +"""Peer DID Resolver. + +Resolution is performed by converting did:peer:2 to did:peer:3 according to +https://identity.foundation/peer-did-method-spec/#generation-method:~:text=Method%203%3A%20DID%20Shortening%20with%20SHA%2D256%20Hash +DID Document is just a did:peer:2 document (resolved by peer-did-python) where +the did:peer:2 has been replaced with the did:peer:3. +""" + +import re +from copy import deepcopy +from hashlib import sha256 +from typing import Optional, Pattern, Sequence, Text +from multiformats import multibase, multicodec + +from peerdid.dids import ( + DID, + MalformedPeerDIDError, + DIDDocument, +) +from peerdid.keys import to_multibase, MultibaseFormat +from ...wallet.util import bytes_to_b58 + +from ...connections.base_manager import BaseConnectionManager +from ...config.injection_context import InjectionContext +from ...core.profile import Profile +from ...storage.base import BaseStorage +from ...storage.error import StorageNotFoundError +from ...storage.record import StorageRecord + +from ..base import BaseDIDResolver, DIDNotFound, ResolverType + +RECORD_TYPE_DID_DOCUMENT = "did_document" # pydid DIDDocument + + +class PeerDID3Resolver(BaseDIDResolver): + """Peer DID Resolver.""" + + def __init__(self): + """Initialize Key Resolver.""" + super().__init__(ResolverType.NATIVE) + + async def setup(self, context: InjectionContext): + """Perform required setup for Key DID resolution.""" + + @property + def supported_did_regex(self) -> Pattern: + """Return supported_did_regex of Key DID Resolver.""" + return re.compile(r"^did:peer:3(.*)") + + async def _resolve( + self, + profile: Profile, + did: str, + service_accept: Optional[Sequence[Text]] = None, + ) -> dict: + """Resolve a Key DID.""" + if did.startswith("did:peer:3"): + # retrieve did_doc from storage using did:peer:3 + async with profile.session() as session: + storage = session.inject(BaseStorage) + record = await storage.find_record( + RECORD_TYPE_DID_DOCUMENT, {"did": did} + ) + did_doc = DIDDocument.from_json(record.value) + else: + raise DIDNotFound(f"did is not a did:peer:3 {did}") + + return did_doc.dict() + + async def create_and_store_document( + self, profile: Profile, peer_did_2_doc: DIDDocument + ): + """Injest did:peer:2 document create did:peer:3 and store document.""" + if not peer_did_2_doc.id.startswith("did:peer:2"): + raise MalformedPeerDIDError("did:peer:2 expected") + + dp3_doc = deepcopy(peer_did_2_doc) + _convert_to_did_peer_3_document(dp3_doc) + try: + async with profile.session() as session: + storage = session.inject(BaseStorage) + record = await storage.find_record( + RECORD_TYPE_DID_DOCUMENT, {"did": dp3_doc.id} + ) + except StorageNotFoundError: + record = StorageRecord( + RECORD_TYPE_DID_DOCUMENT, + dp3_doc.to_json(), + {"did": dp3_doc.id}, + ) + async with profile.session() as session: + storage: BaseStorage = session.inject(BaseStorage) + await storage.add_record(record) + await set_keys_from_did_doc(profile, dp3_doc) + else: + # If doc already exists for did:peer:3 then it cannot have been modified + pass + return dp3_doc + + +async def set_keys_from_did_doc(profile, did_doc): + """Add verificationMethod keys for lookup by conductor.""" + conn_mgr = BaseConnectionManager(profile) + + for vm in did_doc.verification_method or []: + if vm.controller == did_doc.id: + if vm.public_key_base58: + await conn_mgr.add_key_for_did(did_doc.id, vm.public_key_base58) + if vm.public_key_multibase: + pk = multibase.decode(vm.public_key_multibase) + if len(pk) == 32: # No multicodec prefix + pk = bytes_to_b58(pk) + else: + codec, key = multicodec.unwrap(pk) + if codec == multicodec.multicodec("ed25519-pub"): + pk = bytes_to_b58(key) + else: + continue + await conn_mgr.add_key_for_did(did_doc.id, pk) + + +def _convert_to_did_peer_3_document(dp2_document: DIDDocument) -> DIDDocument: + content = to_multibase( + sha256(dp2_document.id.lstrip("did:peer:2").encode()).digest(), + MultibaseFormat.BASE58, + ) + dp3 = DID("did:peer:3" + content) + dp2 = dp2_document.id + + dp2_doc_str = dp2_document.to_json() + dp3_doc_str = dp2_doc_str.replace(dp2, dp3) + + dp3_doc = DIDDocument.from_json(dp3_doc_str) + return dp3_doc diff --git a/aries_cloudagent/resolver/default/tests/test_peer2.py b/aries_cloudagent/resolver/default/tests/test_peer2.py new file mode 100644 index 0000000000..c384b934ff --- /dev/null +++ b/aries_cloudagent/resolver/default/tests/test_peer2.py @@ -0,0 +1,93 @@ +"""Test PeerDIDResolver.""" + +from asynctest import mock as async_mock +from peerdid.dids import resolve_peer_did, DIDDocument, DID +import pytest + +from .. import legacy_peer as test_module +from ....cache.base import BaseCache +from ....cache.in_memory import InMemoryCache +from ....core.in_memory import InMemoryProfile +from ....core.profile import Profile +from ...did_resolver import DIDResolver +from ..peer2 import PeerDID2Resolver, _resolve_peer_did_with_service_key_reference + + +TEST_DID0 = "did:peer:2.Ez6LSpkcni2KTTxf4nAp6cPxjRbu26Tj4b957BgHcknVeNFEj.Vz6MksXhfmxm2i3RnoHH2mKQcx7EY4tToJR9JziUs6bp8a6FM.SeyJ0IjoiZGlkLWNvbW11bmljYXRpb24iLCJzIjoiaHR0cDovL2hvc3QuZG9ja2VyLmludGVybmFsOjkwNzAiLCJyZWNpcGllbnRfa2V5cyI6W119" +TEST_DID0_DOC = _resolve_peer_did_with_service_key_reference(TEST_DID0).dict() +TEST_DID0_RAW_DOC = resolve_peer_did(TEST_DID0).dict() + + +@pytest.fixture +def common_resolver(): + """Resolver fixture.""" + yield DIDResolver([PeerDID2Resolver()]) + + +@pytest.fixture +def resolver(): + """Resolver fixture.""" + yield PeerDID2Resolver() + + +@pytest.fixture +def profile(): + """Profile fixture.""" + profile = InMemoryProfile.test_profile() + profile.context.injector.bind_instance(BaseCache, InMemoryCache()) + yield profile + + +class TestPeerDID2Resolver: + @pytest.mark.asyncio + async def test_resolution_types(self, resolver: PeerDID2Resolver, profile: Profile): + """Test supports.""" + assert DID.is_valid(TEST_DID0) + assert isinstance(resolve_peer_did(TEST_DID0), DIDDocument) + assert isinstance( + _resolve_peer_did_with_service_key_reference(TEST_DID0), DIDDocument + ) + + @pytest.mark.asyncio + async def test_supports(self, resolver: PeerDID2Resolver, profile: Profile): + """Test supports.""" + with async_mock.patch.object(test_module, "BaseConnectionManager") as mock_mgr: + mock_mgr.return_value = async_mock.MagicMock( + fetch_did_document=async_mock.CoroutineMock( + return_value=(TEST_DID0_DOC, None) + ) + ) + assert await resolver.supports(profile, TEST_DID0) + + @pytest.mark.asyncio + async def test_supports_no_cache( + self, resolver: PeerDID2Resolver, profile: Profile + ): + """Test supports.""" + profile.context.injector.clear_binding(BaseCache) + with async_mock.patch.object(test_module, "BaseConnectionManager") as mock_mgr: + mock_mgr.return_value = async_mock.MagicMock( + fetch_did_document=async_mock.CoroutineMock( + return_value=(TEST_DID0_DOC, None) + ) + ) + assert await resolver.supports(profile, TEST_DID0) + + @pytest.mark.asyncio + async def test_supports_service_referenced( + self, resolver: PeerDID2Resolver, common_resolver: DIDResolver, profile: Profile + ): + """Test supports.""" + profile.context.injector.clear_binding(BaseCache) + with async_mock.patch.object(test_module, "BaseConnectionManager") as mock_mgr: + mock_mgr.return_value = async_mock.MagicMock( + fetch_did_document=async_mock.CoroutineMock( + return_value=(TEST_DID0_DOC, None) + ) + ) + recipient_key = await common_resolver.dereference( + profile, + TEST_DID0_DOC["service"][0]["recipient_keys"][0], + document=DIDDocument.deserialize(TEST_DID0_DOC), + ) + assert recipient_key diff --git a/aries_cloudagent/resolver/default/tests/test_peer3.py b/aries_cloudagent/resolver/default/tests/test_peer3.py new file mode 100644 index 0000000000..e311ce4149 --- /dev/null +++ b/aries_cloudagent/resolver/default/tests/test_peer3.py @@ -0,0 +1,93 @@ +"""Test PeerDIDResolver.""" + +from hashlib import sha256 +from peerdid.keys import to_multibase, MultibaseFormat + +from asynctest import mock as async_mock +from peerdid.dids import DIDDocument, DID +import pytest + +from .. import peer3 as test_module +from ....cache.base import BaseCache +from ....cache.in_memory import InMemoryCache +from ....core.in_memory import InMemoryProfile +from ....core.profile import Profile +from ...did_resolver import DIDResolver +from ..peer2 import _resolve_peer_did_with_service_key_reference +from ..peer3 import PeerDID3Resolver, _convert_to_did_peer_3_document + + +TEST_DP2 = "did:peer:2.Ez6LSpkcni2KTTxf4nAp6cPxjRbu26Tj4b957BgHcknVeNFEj.Vz6MksXhfmxm2i3RnoHH2mKQcx7EY4tToJR9JziUs6bp8a6FM.SeyJ0IjoiZGlkLWNvbW11bmljYXRpb24iLCJzIjoiaHR0cDovL2hvc3QuZG9ja2VyLmludGVybmFsOjkwNzAiLCJyZWNpcGllbnRfa2V5cyI6W119" +TEST_DID0_DOC = _resolve_peer_did_with_service_key_reference(TEST_DP2) + +TEST_DP3 = DID( + "did:peer:3" + + to_multibase( + sha256(TEST_DP2.lstrip("did:peer:2").encode()).digest(), MultibaseFormat.BASE58 + ) +) +TEST_DP3_DOC = _convert_to_did_peer_3_document(TEST_DID0_DOC) + + +@pytest.fixture +def common_resolver(): + """Resolver fixture.""" + yield DIDResolver([PeerDID3Resolver()]) + + +@pytest.fixture +def resolver(): + """Resolver fixture.""" + yield PeerDID3Resolver() + + +@pytest.fixture +def profile(): + """Profile fixture.""" + profile = InMemoryProfile.test_profile() + profile.context.injector.bind_instance(BaseCache, InMemoryCache()) + yield profile + + +class TestPeerDID3Resolver: + @pytest.mark.asyncio + async def test_resolution_types(self, resolver: PeerDID3Resolver, profile: Profile): + """Test supports.""" + assert DID.is_valid(TEST_DP3) + assert isinstance(TEST_DP3_DOC, DIDDocument) + assert TEST_DP3_DOC.id == TEST_DP3 + + @pytest.mark.asyncio + async def test_supports(self, resolver: PeerDID3Resolver, profile: Profile): + """Test supports.""" + with async_mock.patch.object(test_module, "PeerDID3Resolver") as mock_resolve: + mock_resolve.return_value = async_mock.MagicMock( + _resolve=async_mock.CoroutineMock(return_value=TEST_DP3_DOC) + ) + assert await resolver.supports(profile, TEST_DP3) + + @pytest.mark.asyncio + async def test_supports_no_cache( + self, resolver: PeerDID3Resolver, profile: Profile + ): + """Test supports.""" + profile.context.injector.clear_binding(BaseCache) + with async_mock.patch.object(test_module, "PeerDID3Resolver") as mock_resolve: + mock_resolve.return_value = async_mock.MagicMock( + _resolve=async_mock.CoroutineMock(return_value=TEST_DP3_DOC) + ) + assert await resolver.supports(profile, TEST_DP3) + + @pytest.mark.asyncio + async def test_supports_service_referenced( + self, resolver: PeerDID3Resolver, common_resolver: DIDResolver, profile: Profile + ): + """Test supports.""" + profile.context.injector.clear_binding(BaseCache) + + recipient_key = await common_resolver.dereference( + profile, + TEST_DP3_DOC.dict()["service"][0]["recipient_keys"][0], + document=TEST_DP3_DOC, + ) + assert recipient_key diff --git a/aries_cloudagent/resolver/did_resolver.py b/aries_cloudagent/resolver/did_resolver.py index 4438fa1b9a..a25576254c 100644 --- a/aries_cloudagent/resolver/did_resolver.py +++ b/aries_cloudagent/resolver/did_resolver.py @@ -135,14 +135,14 @@ async def dereference( """Dereference a DID URL to its corresponding DID Doc object.""" try: parsed = DIDUrl.parse(did_url) - if not parsed.did: + if not parsed.did and not document: raise ValueError("Invalid DID URL") except DIDError as err: raise ResolverError( "Failed to parse DID URL from {}".format(did_url) ) from err - if document and parsed.did != document.id: + if document and parsed.did and parsed.did != document.id: document = None if not document: diff --git a/poetry.lock b/poetry.lock index 1aaad7392c..03c7d1cf76 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1719,6 +1719,25 @@ files = [ {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] +[[package]] +name = "peerdid" +version = "0.5.2" +description = "PeerDID for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "peerdid-0.5.2-py3-none-any.whl", hash = "sha256:e92be3ba483cf3e37e87d9855afae2346266f8e4d86809071edb6dbce510d569"}, + {file = "peerdid-0.5.2.tar.gz", hash = "sha256:efc4c16a4cf2b3802a62e0d5be459a5019ccf48befdc17d1c97f6d47be4f7130"}, +] + +[package.dependencies] +base58 = ">=2.1.0,<2.2.0" +pydid = ">=0.3.5,<0.4.0" +varint = ">=1.0.2,<1.1.0" + +[package.extras] +tests = ["pytest (==6.2.5)", "pytest-xdist (==2.3.0)"] + [[package]] name = "pillow" version = "10.0.0" @@ -1871,7 +1890,7 @@ name = "prompt-toolkit" version = "2.0.10" description = "Library for building powerful interactive command lines in Python" optional = false -python-versions = ">=2.6,<3.0.dev0 || >=3.3.dev0" +python-versions = ">=2.6,<3.0.0 || >=3.3.0" files = [ {file = "prompt_toolkit-2.0.10-py2-none-any.whl", hash = "sha256:e7f8af9e3d70f514373bf41aa51bc33af12a6db3f71461ea47fea985defb2c31"}, {file = "prompt_toolkit-2.0.10-py3-none-any.whl", hash = "sha256:46642344ce457641f28fc9d1c9ca939b63dadf8df128b86f1b9860e59c73a5e4"}, @@ -2260,6 +2279,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2267,8 +2287,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2285,6 +2312,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2292,6 +2320,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2685,6 +2714,16 @@ files = [ {file = "ursa_bbs_signatures-1.0.1-py3-none-win_amd64.whl", hash = "sha256:ffd5f8cf1518c706b372feccac5d727a9d6c64a68f54f4d109133c4101108368"}, ] +[[package]] +name = "varint" +version = "1.0.2" +description = "Simple python varint implementation" +optional = false +python-versions = "*" +files = [ + {file = "varint-1.0.2.tar.gz", hash = "sha256:a6ecc02377ac5ee9d65a6a8ad45c9ff1dac8ccee19400a5950fb51d594214ca5"}, +] + [[package]] name = "virtualenv" version = "20.24.4" @@ -2847,4 +2886,4 @@ indy = ["python3-indy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "fc9e090bc514183ff4558d42ee83e673d8af0ceefe7f4f7b66623b1ebf5a785e" +content-hash = "3572d33f01ad13410e0ae03d925d0ab238b66e7c888ccfb29501f72f9bec66e7" diff --git a/pyproject.toml b/pyproject.toml index 363ad05fb4..a634cc0ba7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,6 +62,7 @@ ursa-bbs-signatures= { version = "~1.0.1", optional = true } # indy python3-indy= { version = "^1.11.1", optional = true } +peerdid = "^0.5.2" [tool.poetry.group.dev.dependencies] pre-commit="~3.3.3"