diff --git a/aries_cloudagent/holder/routes.py b/aries_cloudagent/holder/routes.py index ce8c58a21e..c961c1c9a1 100644 --- a/aries_cloudagent/holder/routes.py +++ b/aries_cloudagent/holder/routes.py @@ -14,7 +14,7 @@ from ..admin.request_context import AdminRequestContext from ..indy.holder import IndyHolder, IndyHolderError -from ..indy.sdk.models.cred_precis import IndyCredInfoSchema +from ..indy.models.cred_precis import IndyCredInfoSchema from ..ledger.base import BaseLedger from ..ledger.error import LedgerError from ..messaging.models.openapi import OpenAPISchema diff --git a/aries_cloudagent/holder/tests/test_routes.py b/aries_cloudagent/holder/tests/test_routes.py index a4043ee310..58d9dbb709 100644 --- a/aries_cloudagent/holder/tests/test_routes.py +++ b/aries_cloudagent/holder/tests/test_routes.py @@ -17,11 +17,11 @@ VC_RECORD = VCRecord( contexts=[ "https://www.w3.org/2018/credentials/v1", - "https://www.w3.org/2018/credentials/v1/examples", + "https://www.w3.org/2018/credentials/examples/v1", ], - types=[ - "VerifiableCredential", - "AlumniCredential", + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", ], issuer_id="https://example.edu/issuers/565049", subject_ids=["did:example:ebfeb1f712ebc6f1c276e12ec21"], diff --git a/aries_cloudagent/indy/sdk/models/__init__.py b/aries_cloudagent/indy/models/__init__.py similarity index 100% rename from aries_cloudagent/indy/sdk/models/__init__.py rename to aries_cloudagent/indy/models/__init__.py diff --git a/aries_cloudagent/indy/sdk/models/cred.py b/aries_cloudagent/indy/models/cred.py similarity index 97% rename from aries_cloudagent/indy/sdk/models/cred.py rename to aries_cloudagent/indy/models/cred.py index 82e9bafec7..cf8c256270 100644 --- a/aries_cloudagent/indy/sdk/models/cred.py +++ b/aries_cloudagent/indy/models/cred.py @@ -4,8 +4,8 @@ from marshmallow import EXCLUDE, fields -from ....messaging.models.base import BaseModel, BaseModelSchema -from ....messaging.valid import ( +from ...messaging.models.base import BaseModel, BaseModelSchema +from ...messaging.valid import ( INDY_CRED_DEF_ID, INDY_REV_REG_ID, INDY_SCHEMA_ID, diff --git a/aries_cloudagent/indy/sdk/models/cred_abstract.py b/aries_cloudagent/indy/models/cred_abstract.py similarity index 95% rename from aries_cloudagent/indy/sdk/models/cred_abstract.py rename to aries_cloudagent/indy/models/cred_abstract.py index ca5649d7c9..a11c687153 100644 --- a/aries_cloudagent/indy/sdk/models/cred_abstract.py +++ b/aries_cloudagent/indy/models/cred_abstract.py @@ -4,8 +4,8 @@ from marshmallow import EXCLUDE, fields -from ....messaging.models.base import BaseModel, BaseModelSchema -from ....messaging.valid import INDY_CRED_DEF_ID, INDY_SCHEMA_ID, NUM_STR_WHOLE +from ...messaging.models.base import BaseModel, BaseModelSchema +from ...messaging.valid import INDY_CRED_DEF_ID, INDY_SCHEMA_ID, NUM_STR_WHOLE class IndyKeyCorrectnessProof(BaseModel): diff --git a/aries_cloudagent/indy/sdk/models/cred_def.py b/aries_cloudagent/indy/models/cred_def.py similarity index 95% rename from aries_cloudagent/indy/sdk/models/cred_def.py rename to aries_cloudagent/indy/models/cred_def.py index ae7ecf0a16..2124041c06 100644 --- a/aries_cloudagent/indy/sdk/models/cred_def.py +++ b/aries_cloudagent/indy/models/cred_def.py @@ -2,8 +2,8 @@ from marshmallow import fields, Schema -from ....messaging.models.openapi import OpenAPISchema -from ....messaging.valid import INDY_CRED_DEF_ID, INDY_VERSION, NUM_STR_WHOLE +from ...messaging.models.openapi import OpenAPISchema +from ...messaging.valid import INDY_CRED_DEF_ID, INDY_VERSION, NUM_STR_WHOLE class CredDefValuePrimarySchema(OpenAPISchema): diff --git a/aries_cloudagent/indy/sdk/models/cred_precis.py b/aries_cloudagent/indy/models/cred_precis.py similarity index 94% rename from aries_cloudagent/indy/sdk/models/cred_precis.py rename to aries_cloudagent/indy/models/cred_precis.py index 453602e64c..6fcea3a3ca 100644 --- a/aries_cloudagent/indy/sdk/models/cred_precis.py +++ b/aries_cloudagent/indy/models/cred_precis.py @@ -4,9 +4,9 @@ from marshmallow import EXCLUDE, fields -from ....messaging.models.base import BaseModel, BaseModelSchema -from ....messaging.models.openapi import OpenAPISchema -from ....messaging.valid import ( +from ...messaging.models.base import BaseModel, BaseModelSchema +from ...messaging.models.openapi import OpenAPISchema +from ...messaging.valid import ( INDY_CRED_DEF_ID, INDY_CRED_REV_ID, INDY_REV_REG_ID, diff --git a/aries_cloudagent/indy/sdk/models/cred_request.py b/aries_cloudagent/indy/models/cred_request.py similarity index 92% rename from aries_cloudagent/indy/sdk/models/cred_request.py rename to aries_cloudagent/indy/models/cred_request.py index 07dcef3453..5a022bebc0 100644 --- a/aries_cloudagent/indy/sdk/models/cred_request.py +++ b/aries_cloudagent/indy/models/cred_request.py @@ -4,8 +4,8 @@ from marshmallow import EXCLUDE, fields -from ....messaging.models.base import BaseModel, BaseModelSchema -from ....messaging.valid import INDY_CRED_DEF_ID, INDY_DID, NUM_STR_WHOLE +from ...messaging.models.base import BaseModel, BaseModelSchema +from ...messaging.valid import INDY_CRED_DEF_ID, INDY_DID, NUM_STR_WHOLE class IndyCredRequest(BaseModel): diff --git a/aries_cloudagent/indy/sdk/models/non_rev_interval.py b/aries_cloudagent/indy/models/non_rev_interval.py similarity index 94% rename from aries_cloudagent/indy/sdk/models/non_rev_interval.py rename to aries_cloudagent/indy/models/non_rev_interval.py index 118d004796..65e2eb8e39 100644 --- a/aries_cloudagent/indy/sdk/models/non_rev_interval.py +++ b/aries_cloudagent/indy/models/non_rev_interval.py @@ -4,8 +4,8 @@ from marshmallow import EXCLUDE, fields -from ....messaging.models.base import BaseModel, BaseModelSchema -from ....messaging.valid import INT_EPOCH +from ...messaging.models.base import BaseModel, BaseModelSchema +from ...messaging.valid import INT_EPOCH class IndyNonRevocationInterval(BaseModel): diff --git a/aries_cloudagent/indy/sdk/models/predicate.py b/aries_cloudagent/indy/models/predicate.py similarity index 100% rename from aries_cloudagent/indy/sdk/models/predicate.py rename to aries_cloudagent/indy/models/predicate.py diff --git a/aries_cloudagent/indy/sdk/models/pres_preview.py b/aries_cloudagent/indy/models/pres_preview.py similarity index 97% rename from aries_cloudagent/indy/sdk/models/pres_preview.py rename to aries_cloudagent/indy/models/pres_preview.py index 6eee1f9623..b276821fd9 100644 --- a/aries_cloudagent/indy/sdk/models/pres_preview.py +++ b/aries_cloudagent/indy/models/pres_preview.py @@ -6,14 +6,14 @@ from marshmallow import EXCLUDE, fields -from ....ledger.indy import IndySdkLedger -from ....messaging.models.base import BaseModel, BaseModelSchema -from ....messaging.util import canon -from ....messaging.valid import INDY_CRED_DEF_ID, INDY_PREDICATE -from ....protocols.didcomm_prefix import DIDCommPrefix -from ....wallet.util import b64_to_str - -from ...util import generate_pr_nonce +from ...ledger.indy import IndySdkLedger +from ...messaging.models.base import BaseModel, BaseModelSchema +from ...messaging.util import canon +from ...messaging.valid import INDY_CRED_DEF_ID, INDY_PREDICATE +from ...protocols.didcomm_prefix import DIDCommPrefix +from ...wallet.util import b64_to_str + +from ..util import generate_pr_nonce from .non_rev_interval import IndyNonRevocationInterval from .predicate import Predicate diff --git a/aries_cloudagent/indy/sdk/models/proof.py b/aries_cloudagent/indy/models/proof.py similarity index 99% rename from aries_cloudagent/indy/sdk/models/proof.py rename to aries_cloudagent/indy/models/proof.py index 5f63a8d155..b2aebaf925 100644 --- a/aries_cloudagent/indy/sdk/models/proof.py +++ b/aries_cloudagent/indy/models/proof.py @@ -4,15 +4,15 @@ from marshmallow import EXCLUDE, fields, validate -from ....messaging.models.base import BaseModel, BaseModelSchema -from ....messaging.valid import ( +from ...messaging.models.base import BaseModel, BaseModelSchema +from ...messaging.valid import ( INDY_CRED_DEF_ID, INDY_REV_REG_ID, INDY_SCHEMA_ID, INT_EPOCH, NUM_STR_WHOLE, ) -from ....utils.tracing import AdminAPIMessageTracingSchema +from ...utils.tracing import AdminAPIMessageTracingSchema from .predicate import Predicate from .requested_creds import ( diff --git a/aries_cloudagent/indy/sdk/models/proof_request.py b/aries_cloudagent/indy/models/proof_request.py similarity index 98% rename from aries_cloudagent/indy/sdk/models/proof_request.py rename to aries_cloudagent/indy/models/proof_request.py index 73720fbd49..84e8ce7cba 100644 --- a/aries_cloudagent/indy/sdk/models/proof_request.py +++ b/aries_cloudagent/indy/models/proof_request.py @@ -11,9 +11,9 @@ ValidationError, ) -from ....messaging.models.base import BaseModel, BaseModelSchema -from ....messaging.models.openapi import OpenAPISchema -from ....messaging.valid import ( +from ...messaging.models.base import BaseModel, BaseModelSchema +from ...messaging.models.openapi import OpenAPISchema +from ...messaging.valid import ( INDY_CRED_DEF_ID, INDY_PREDICATE, INDY_VERSION, diff --git a/aries_cloudagent/indy/sdk/models/requested_creds.py b/aries_cloudagent/indy/models/requested_creds.py similarity index 92% rename from aries_cloudagent/indy/sdk/models/requested_creds.py rename to aries_cloudagent/indy/models/requested_creds.py index cda5d66f48..074ec2c140 100644 --- a/aries_cloudagent/indy/sdk/models/requested_creds.py +++ b/aries_cloudagent/indy/models/requested_creds.py @@ -2,8 +2,8 @@ from marshmallow import fields -from ....messaging.models.openapi import OpenAPISchema -from ....messaging.valid import INT_EPOCH +from ...messaging.models.openapi import OpenAPISchema +from ...messaging.valid import INT_EPOCH class IndyRequestedCredsRequestedAttrSchema(OpenAPISchema): diff --git a/aries_cloudagent/indy/sdk/models/revocation.py b/aries_cloudagent/indy/models/revocation.py similarity index 98% rename from aries_cloudagent/indy/sdk/models/revocation.py rename to aries_cloudagent/indy/models/revocation.py index f512d58943..fdc90008ae 100644 --- a/aries_cloudagent/indy/sdk/models/revocation.py +++ b/aries_cloudagent/indy/models/revocation.py @@ -4,8 +4,8 @@ from marshmallow import EXCLUDE, fields, validate -from ....messaging.models.base import BaseModel, BaseModelSchema -from ....messaging.valid import ( +from ...messaging.models.base import BaseModel, BaseModelSchema +from ...messaging.valid import ( BASE58_SHA256_HASH, INDY_CRED_DEF_ID, INDY_REV_REG_ID, diff --git a/aries_cloudagent/indy/sdk/models/schema.py b/aries_cloudagent/indy/models/schema.py similarity index 86% rename from aries_cloudagent/indy/sdk/models/schema.py rename to aries_cloudagent/indy/models/schema.py index 877c170273..4a48aaf1b3 100644 --- a/aries_cloudagent/indy/sdk/models/schema.py +++ b/aries_cloudagent/indy/models/schema.py @@ -2,8 +2,8 @@ from marshmallow import fields -from ....messaging.models.openapi import OpenAPISchema -from ....messaging.valid import INDY_SCHEMA_ID, INDY_VERSION, NATURAL_NUM +from ...messaging.models.openapi import OpenAPISchema +from ...messaging.valid import INDY_SCHEMA_ID, INDY_VERSION, NATURAL_NUM class SchemaSchema(OpenAPISchema): diff --git a/aries_cloudagent/indy/sdk/models/tests/__init__.py b/aries_cloudagent/indy/models/tests/__init__.py similarity index 100% rename from aries_cloudagent/indy/sdk/models/tests/__init__.py rename to aries_cloudagent/indy/models/tests/__init__.py diff --git a/aries_cloudagent/indy/sdk/models/tests/test_cred.py b/aries_cloudagent/indy/models/tests/test_cred.py similarity index 100% rename from aries_cloudagent/indy/sdk/models/tests/test_cred.py rename to aries_cloudagent/indy/models/tests/test_cred.py diff --git a/aries_cloudagent/indy/sdk/models/tests/test_cred_precis.py b/aries_cloudagent/indy/models/tests/test_cred_precis.py similarity index 100% rename from aries_cloudagent/indy/sdk/models/tests/test_cred_precis.py rename to aries_cloudagent/indy/models/tests/test_cred_precis.py diff --git a/aries_cloudagent/indy/sdk/models/tests/test_non_rev_interval.py b/aries_cloudagent/indy/models/tests/test_non_rev_interval.py similarity index 100% rename from aries_cloudagent/indy/sdk/models/tests/test_non_rev_interval.py rename to aries_cloudagent/indy/models/tests/test_non_rev_interval.py diff --git a/aries_cloudagent/indy/sdk/models/tests/test_pred.py b/aries_cloudagent/indy/models/tests/test_pred.py similarity index 100% rename from aries_cloudagent/indy/sdk/models/tests/test_pred.py rename to aries_cloudagent/indy/models/tests/test_pred.py diff --git a/aries_cloudagent/indy/sdk/models/tests/test_pres_preview.py b/aries_cloudagent/indy/models/tests/test_pres_preview.py similarity index 99% rename from aries_cloudagent/indy/sdk/models/tests/test_pres_preview.py rename to aries_cloudagent/indy/models/tests/test_pres_preview.py index 6464edc463..4e3ffcadb7 100644 --- a/aries_cloudagent/indy/sdk/models/tests/test_pres_preview.py +++ b/aries_cloudagent/indy/models/tests/test_pres_preview.py @@ -8,8 +8,8 @@ from asynctest import TestCase as AsyncTestCase from asynctest import mock as async_mock -from .....messaging.util import canon -from .....protocols.didcomm_prefix import DIDCommPrefix +from ....messaging.util import canon +from ....protocols.didcomm_prefix import DIDCommPrefix from ..non_rev_interval import IndyNonRevocationInterval diff --git a/aries_cloudagent/indy/sdk/models/tests/test_proof.py b/aries_cloudagent/indy/models/tests/test_proof.py similarity index 100% rename from aries_cloudagent/indy/sdk/models/tests/test_proof.py rename to aries_cloudagent/indy/models/tests/test_proof.py diff --git a/aries_cloudagent/indy/sdk/models/tests/test_proof_request.py b/aries_cloudagent/indy/models/tests/test_proof_request.py similarity index 100% rename from aries_cloudagent/indy/sdk/models/tests/test_proof_request.py rename to aries_cloudagent/indy/models/tests/test_proof_request.py diff --git a/aries_cloudagent/indy/sdk/models/xform.py b/aries_cloudagent/indy/models/xform.py similarity index 99% rename from aries_cloudagent/indy/sdk/models/xform.py rename to aries_cloudagent/indy/models/xform.py index 4f28418e84..9e7d5e7601 100644 --- a/aries_cloudagent/indy/sdk/models/xform.py +++ b/aries_cloudagent/indy/models/xform.py @@ -1,6 +1,6 @@ """Utilities to deal with indy.""" -from ....indy.holder import IndyHolder +from ...indy.holder import IndyHolder from .pres_preview import IndyPresPreview diff --git a/aries_cloudagent/indy/sdk/verifier.py b/aries_cloudagent/indy/sdk/verifier.py index 5753f2c268..18cb15cf45 100644 --- a/aries_cloudagent/indy/sdk/verifier.py +++ b/aries_cloudagent/indy/sdk/verifier.py @@ -9,7 +9,7 @@ import indy.anoncreds from indy.error import IndyError -from ...indy.sdk.models.xform import indy_proof_req2non_revoc_intervals +from ...indy.models.xform import indy_proof_req2non_revoc_intervals from ...ledger.indy import IndySdkLedger from ...messaging.util import canon, encode diff --git a/aries_cloudagent/messaging/credential_definitions/routes.py b/aries_cloudagent/messaging/credential_definitions/routes.py index 6b772ec02f..baf5009b6b 100644 --- a/aries_cloudagent/messaging/credential_definitions/routes.py +++ b/aries_cloudagent/messaging/credential_definitions/routes.py @@ -17,7 +17,7 @@ from ...admin.request_context import AdminRequestContext from ...indy.issuer import IndyIssuer -from ...indy.sdk.models.cred_def import CredentialDefinitionSchema +from ...indy.models.cred_def import CredentialDefinitionSchema from ...ledger.base import BaseLedger from ...ledger.error import LedgerError from ...protocols.endorse_transaction.v1_0.manager import TransactionManager diff --git a/aries_cloudagent/messaging/schemas/routes.py b/aries_cloudagent/messaging/schemas/routes.py index 09d3b764ce..5715773046 100644 --- a/aries_cloudagent/messaging/schemas/routes.py +++ b/aries_cloudagent/messaging/schemas/routes.py @@ -18,7 +18,7 @@ from ...admin.request_context import AdminRequestContext from ...indy.issuer import IndyIssuer, IndyIssuerError -from ...indy.sdk.models.schema import SchemaSchema +from ...indy.models.schema import SchemaSchema from ...ledger.base import BaseLedger from ...ledger.error import LedgerError from ...protocols.endorse_transaction.v1_0.manager import TransactionManager diff --git a/aries_cloudagent/messaging/valid.py b/aries_cloudagent/messaging/valid.py index aa05d752b5..24650684e8 100644 --- a/aries_cloudagent/messaging/valid.py +++ b/aries_cloudagent/messaging/valid.py @@ -32,6 +32,19 @@ def _deserialize(self, value, attr, data, **kwargs): raise ValidationError("Field should be str or dict") +class StrOrNumberField(Field): + """String or Number field for Marshmallow.""" + + def _serialize(self, value, attr, obj, **kwargs): + return value + + def _deserialize(self, value, attr, data, **kwargs): + if isinstance(value, (str, float, int)): + return value + else: + raise ValidationError("Field should be str or int or float") + + class DictOrDictListField(Field): """Dict or Dict List field for Marshmallow.""" diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/models/credential_exchange.py b/aries_cloudagent/protocols/issue_credential/v1_0/models/credential_exchange.py index f95694e1db..98e2e68844 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/models/credential_exchange.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/models/credential_exchange.py @@ -7,10 +7,10 @@ from marshmallow import fields, validate from .....core.profile import ProfileSession -from .....indy.sdk.models.cred import IndyCredential, IndyCredentialSchema -from .....indy.sdk.models.cred_abstract import IndyCredAbstract, IndyCredAbstractSchema -from .....indy.sdk.models.cred_precis import IndyCredInfo, IndyCredInfoSchema -from .....indy.sdk.models.cred_request import IndyCredRequest, IndyCredRequestSchema +from .....indy.models.cred import IndyCredential, IndyCredentialSchema +from .....indy.models.cred_abstract import IndyCredAbstract, IndyCredAbstractSchema +from .....indy.models.cred_precis import IndyCredInfo, IndyCredInfoSchema +from .....indy.models.cred_request import IndyCredRequest, IndyCredRequestSchema from .....messaging.models.base_record import BaseExchangeRecord, BaseExchangeSchema from .....messaging.valid import INDY_CRED_DEF_ID, INDY_SCHEMA_ID, UUIDFour from .....storage.base import StorageError diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/handler.py index 301110a047..bdb77f0627 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/handler.py @@ -3,20 +3,17 @@ from abc import ABC, abstractclassmethod, abstractmethod import logging -from typing import Mapping, Tuple, Union +from typing import Mapping, Tuple from .....core.error import BaseError from .....core.profile import Profile from .....messaging.decorators.attach_decorator import AttachDecorator -from ..message_types import ATTACHMENT_FORMAT from ..messages.cred_format import V20CredFormat from ..messages.cred_proposal import V20CredProposal from ..messages.cred_offer import V20CredOffer from ..messages.cred_request import V20CredRequest from ..messages.cred_issue import V20CredIssue -from ..models.detail.indy import V20CredExRecordIndy -from ..models.detail.ld_proof import V20CredExRecordLDProof from ..models.cred_ex_record import V20CredExRecord LOGGER = logging.getLogger(__name__) @@ -50,32 +47,7 @@ def profile(self) -> Profile: """ return self._profile - async def get_detail_record( - self, cred_ex_id: str - ) -> Union[V20CredExRecordIndy, V20CredExRecordLDProof]: - """Retrieve credential exchange detail record by cred_ex_id.""" - - async with self.profile.session() as session: - records = await self.format.detail.query_by_cred_ex_id(session, cred_ex_id) - - if len(records) > 1: - LOGGER.warning( - "Cred ex id %s has %d %s detail records: should be 1", - cred_ex_id, - len(records), - self.format.api, - ) - return records[0] if records else None - - async def _check_uniqueness(self, cred_ex_id: str): - """Raise exception on evidence that cred ex already has cred issued to it.""" - async with self.profile.session() as session: - if await self.format.detail.query_by_cred_ex_id(session, cred_ex_id): - raise V20CredFormatError( - f"{self.format.api} detail record already " - f"exists for cred ex id {cred_ex_id}" - ) - + @abstractmethod def get_format_identifier(self, message_type: str) -> str: """Get attachment format identifier for format and message combination. @@ -86,31 +58,10 @@ def get_format_identifier(self, message_type: str) -> str: str: Issue credential attachment format identifier """ - return ATTACHMENT_FORMAT[message_type][self.format.api] + @abstractmethod def get_format_data(self, message_type: str, data: dict) -> CredFormatAttachment: - """Get credential format and attachment objects for use in cred ex messages. - - Returns a tuple of both credential format and attachment decorator for use - in credential exchange messages. It looks up the correct format identifier and - encodes the data as a base64 attachment. - - Args: - message_type (str): The message type for which to return the cred format. - Should be one of the message types defined in the message types file - data (dict): The data to include in the attach decorator - - Returns: - CredFormatAttachment: Credential format and attachment data objects - - """ - return ( - V20CredFormat( - attach_id=self.format.api, - format_=self.get_format_identifier(message_type), - ), - AttachDecorator.data_base64(data, ident=self.format.api), - ) + """Get credential format and attachment objects for use in cred ex messages.""" @abstractclassmethod def validate_fields(cls, message_type: str, attachment_data: dict) -> None: diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/handler.py index a85c976072..bceee98f54 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/handler.py @@ -10,9 +10,9 @@ from ......cache.base import BaseCache from ......indy.issuer import IndyIssuer, IndyIssuerRevocationRegistryFullError from ......indy.holder import IndyHolder, IndyHolderError -from ......indy.sdk.models.cred import IndyCredentialSchema -from ......indy.sdk.models.cred_request import IndyCredRequestSchema -from ......indy.sdk.models.cred_abstract import IndyCredAbstractSchema +from ......indy.models.cred import IndyCredentialSchema +from ......indy.models.cred_request import IndyCredRequestSchema +from ......indy.models.cred_abstract import IndyCredAbstractSchema from ......ledger.base import BaseLedger from ......messaging.credential_definitions.util import ( CRED_DEF_SENT_RECORD_TYPE, @@ -27,6 +27,7 @@ from ...message_types import ( + ATTACHMENT_FORMAT, CRED_20_ISSUE, CRED_20_OFFER, CRED_20_PROPOSAL, @@ -82,6 +83,70 @@ def validate_fields(cls, message_type: str, attachment_data: Mapping): # Validate, throw if not valid Schema(unknown=RAISE).load(attachment_data) + async def get_detail_record(self, cred_ex_id: str) -> V20CredExRecordIndy: + """Retrieve credential exchange detail record by cred_ex_id.""" + + async with self.profile.session() as session: + records = await IndyCredFormatHandler.format.detail.query_by_cred_ex_id( + session, cred_ex_id + ) + + if len(records) > 1: + LOGGER.warning( + "Cred ex id %s has %d %s detail records: should be 1", + cred_ex_id, + len(records), + IndyCredFormatHandler.format.api, + ) + return records[0] if records else None + + async def _check_uniqueness(self, cred_ex_id: str): + """Raise exception on evidence that cred ex already has cred issued to it.""" + async with self.profile.session() as session: + if await IndyCredFormatHandler.format.detail.query_by_cred_ex_id( + session, cred_ex_id + ): + raise V20CredFormatError( + f"{IndyCredFormatHandler.format.api} detail record already " + f"exists for cred ex id {cred_ex_id}" + ) + + def get_format_identifier(self, message_type: str) -> str: + """Get attachment format identifier for format and message combination. + + Args: + message_type (str): Message type for which to return the format identifier + + Returns: + str: Issue credential attachment format identifier + + """ + return ATTACHMENT_FORMAT[message_type][IndyCredFormatHandler.format.api] + + def get_format_data(self, message_type: str, data: dict) -> CredFormatAttachment: + """Get credential format and attachment objects for use in cred ex messages. + + Returns a tuple of both credential format and attachment decorator for use + in credential exchange messages. It looks up the correct format identifier and + encodes the data as a base64 attachment. + + Args: + message_type (str): The message type for which to return the cred format. + Should be one of the message types defined in the message types file + data (dict): The data to include in the attach decorator + + Returns: + CredFormatAttachment: Credential format and attachment data objects + + """ + return ( + V20CredFormat( + attach_id=IndyCredFormatHandler.format.api, + format_=self.get_format_identifier(message_type), + ), + AttachDecorator.data_base64(data, ident=IndyCredFormatHandler.format.api), + ) + async def _match_sent_cred_def_id(self, tag_query: Mapping[str, str]) -> str: """Return most recent matching id of cred def that agent sent to ledger.""" @@ -125,7 +190,7 @@ async def create_offer( cred_proposal_message = cred_ex_record.cred_proposal cred_def_id = await self._match_sent_cred_def_id( - cred_proposal_message.attachment(self.format) + cred_proposal_message.attachment(IndyCredFormatHandler.format) ) async def _create(): @@ -177,7 +242,7 @@ async def create_request( await self._check_uniqueness(cred_ex_record.cred_ex_id) holder_did = request_data.get("holder_did") if request_data else None - cred_offer = cred_ex_record.cred_offer.attachment(self.format) + cred_offer = cred_ex_record.cred_offer.attachment(IndyCredFormatHandler.format) if "nonce" not in cred_offer: raise V20CredFormatError("Missing nonce in credential offer") @@ -238,8 +303,10 @@ async def issue_credential( """Issue indy credential.""" await self._check_uniqueness(cred_ex_record.cred_ex_id) - cred_offer = cred_ex_record.cred_offer.attachment(self.format) - cred_request = cred_ex_record.cred_request.attachment(self.format) + cred_offer = cred_ex_record.cred_offer.attachment(IndyCredFormatHandler.format) + cred_request = cred_ex_record.cred_request.attachment( + IndyCredFormatHandler.format + ) schema_id = cred_offer["schema_id"] cred_def_id = cred_offer["cred_def_id"] @@ -393,7 +460,7 @@ async def store_credential( self, cred_ex_record: V20CredExRecord, cred_id: str = None ) -> None: """Store indy credential.""" - cred = cred_ex_record.cred_issue.attachment(self.format) + cred = cred_ex_record.cred_issue.attachment(IndyCredFormatHandler.format) rev_reg_def = None ledger = self.profile.inject(BaseLedger) @@ -415,7 +482,7 @@ async def store_credential( detail_record = await self.get_detail_record(cred_ex_record.cred_ex_id) if detail_record is None: raise V20CredFormatError( - f"No credential exchange {self.format.aries} " + f"No credential exchange {IndyCredFormatHandler.format.aries} " f"detail record found for cred ex id {cred_ex_record.cred_ex_id}" ) cred_id_stored = await holder.store_credential( diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/tests/test_handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/tests/test_handler.py index 91642c7ef1..d122848f7d 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/tests/test_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/tests/test_handler.py @@ -35,9 +35,12 @@ CRED_20_REQUEST, CRED_20_ISSUE, ) -from ..handler import IndyCredFormatHandler + from ...handler import LOGGER, V20CredFormatError +from ..handler import IndyCredFormatHandler +from ..handler import LOGGER as INDY_LOGGER + TEST_DID = "LjgpST2rjsoxYegQDRm7EL" SCHEMA_NAME = "bc-reg" SCHEMA_TXN = 12 @@ -276,7 +279,7 @@ async def test_get_indy_detail_record(self): await details_indy[1].save(self.session) # exercise logger warning on get() with async_mock.patch.object( - LOGGER, "warning", async_mock.MagicMock() + INDY_LOGGER, "warning", async_mock.MagicMock() ) as mock_warning: assert await self.handler.get_detail_record(cred_ex_id) in details_indy mock_warning.assert_called_once() diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py index 9535d29e27..b72c96a289 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py @@ -6,7 +6,11 @@ from marshmallow import EXCLUDE, INCLUDE +from pyld import jsonld +from pyld.jsonld import JsonLdProcessor + from ......did.did_key import DIDKey +from ......messaging.decorators.attach_decorator import AttachDecorator from ......storage.vc_holder.base import VCHolder from ......storage.vc_holder.vc_record import VCRecord from ......vc.vc_ld import ( @@ -27,11 +31,12 @@ WalletKeyPair, ) from ......vc.ld_proofs.constants import SECURITY_CONTEXT_BBS_URL -from ......wallet.key_type import KeyType -from ......wallet.error import WalletNotFoundError from ......wallet.base import BaseWallet, DIDInfo +from ......wallet.error import WalletNotFoundError +from ......wallet.key_type import KeyType from ...message_types import ( + ATTACHMENT_FORMAT, CRED_20_ISSUE, CRED_20_OFFER, CRED_20_PROPOSAL, @@ -113,6 +118,61 @@ def validate_fields(cls, message_type: str, attachment_data: Mapping) -> None: # Validate, throw if not valid Schema(unknown=EXCLUDE).load(attachment_data) + async def get_detail_record(self, cred_ex_id: str) -> V20CredExRecordLDProof: + """Retrieve credential exchange detail record by cred_ex_id.""" + + async with self.profile.session() as session: + records = await LDProofCredFormatHandler.format.detail.query_by_cred_ex_id( + session, cred_ex_id + ) + + if len(records) > 1: + LOGGER.warning( + "Cred ex id %s has %d %s detail records: should be 1", + cred_ex_id, + len(records), + LDProofCredFormatHandler.format.api, + ) + return records[0] if records else None + + def get_format_identifier(self, message_type: str) -> str: + """Get attachment format identifier for format and message combination. + + Args: + message_type (str): Message type for which to return the format identifier + + Returns: + str: Issue credential attachment format identifier + + """ + return ATTACHMENT_FORMAT[message_type][LDProofCredFormatHandler.format.api] + + def get_format_data(self, message_type: str, data: dict) -> CredFormatAttachment: + """Get credential format and attachment objects for use in cred ex messages. + + Returns a tuple of both credential format and attachment decorator for use + in credential exchange messages. It looks up the correct format identifier and + encodes the data as a base64 attachment. + + Args: + message_type (str): The message type for which to return the cred format. + Should be one of the message types defined in the message types file + data (dict): The data to include in the attach decorator + + Returns: + CredFormatAttachment: Credential format and attachment data objects + + """ + return ( + V20CredFormat( + attach_id=LDProofCredFormatHandler.format.api, + format_=self.get_format_identifier(message_type), + ), + AttachDecorator.data_base64( + data, ident=LDProofCredFormatHandler.format.api + ), + ) + async def _assert_can_issue_with_id_and_proof_type( self, issuer_id: str, proof_type: str ): @@ -328,7 +388,9 @@ async def create_offer( # Parse proposal. Data is stored in proposal if we received a proposal # but also when we create an offer (manager does some weird stuff) - offer_data = cred_ex_record.cred_proposal.attachment(self.format) + offer_data = cred_ex_record.cred_proposal.attachment( + LDProofCredFormatHandler.format + ) detail = LDProofVCDetail.deserialize(offer_data) detail = await self._prepare_detail(detail) @@ -349,11 +411,15 @@ async def create_request( ) -> CredFormatAttachment: """Create linked data proof credential request.""" if cred_ex_record.cred_offer: - request_data = cred_ex_record.cred_offer.attachment(self.format) + request_data = cred_ex_record.cred_offer.attachment( + LDProofCredFormatHandler.format + ) # API data is stored in proposal (when starting from request) # It is a bit of a strage flow IMO. elif cred_ex_record.cred_proposal: - request_data = cred_ex_record.cred_proposal.attachment(self.format) + request_data = cred_ex_record.cred_proposal.attachment( + LDProofCredFormatHandler.format + ) else: raise V20CredFormatError( "Cannot create linked data proof request without offer or input data" @@ -378,7 +444,9 @@ async def issue_credential( "Cannot issue credential without credential request" ) - detail_dict = cred_ex_record.cred_request.attachment(self.format) + detail_dict = cred_ex_record.cred_request.attachment( + LDProofCredFormatHandler.format + ) detail = LDProofVCDetail.deserialize(detail_dict) detail = await self._prepare_detail(detail) @@ -405,8 +473,10 @@ async def receive_credential( self, cred_ex_record: V20CredExRecord, cred_issue_message: V20CredIssue ) -> None: """Receive linked data proof credential.""" - cred_dict = cred_issue_message.attachment(self.format) - detail_dict = cred_ex_record.cred_request.attachment(self.format) + cred_dict = cred_issue_message.attachment(LDProofCredFormatHandler.format) + detail_dict = cred_ex_record.cred_request.attachment( + LDProofCredFormatHandler.format + ) vc = VerifiableCredential.deserialize(cred_dict, unknown=INCLUDE) detail = LDProofVCDetail.deserialize(detail_dict) @@ -472,7 +542,9 @@ async def store_credential( ) -> None: """Store linked data proof credential.""" # Get attachment data - cred_dict: dict = cred_ex_record.cred_issue.attachment(self.format) + cred_dict: dict = cred_ex_record.cred_issue.attachment( + LDProofCredFormatHandler.format + ) # Deserialize objects credential = VerifiableCredential.deserialize(cred_dict, unknown=INCLUDE) @@ -498,10 +570,17 @@ async def store_credential( if not result.verified: raise V20CredFormatError(f"Received invalid credential: {result}") + # Saving expanded type as a cred_tag + expanded = jsonld.expand(cred_dict) + types = JsonLdProcessor.get_values( + expanded[0], + "@type", + ) + # create VC record for storage vc_record = VCRecord( contexts=credential.context_urls, - types=credential.type, + expanded_types=types, issuer_id=credential.issuer_id, subject_ids=credential.credential_subject_ids, schema_ids=[], # Schemas not supported yet diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py index eaee864bec..460fddc5b8 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py @@ -49,6 +49,7 @@ from ...handler import LOGGER, V20CredFormatError from ..handler import LDProofCredFormatHandler +from ..handler import LOGGER as LD_PROOF_LOGGER TEST_DID_SOV = "did:sov:LjgpST2rjsoxYegQDRm7EL" TEST_DID_KEY = "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" @@ -173,7 +174,7 @@ async def test_get_ld_proof_detail_record(self): await details_ld_proof[1].save(self.session) # exercise logger warning on get() with async_mock.patch.object( - LOGGER, "warning", async_mock.MagicMock() + LD_PROOF_LOGGER, "warning", async_mock.MagicMock() ) as mock_warning: assert await self.handler.get_detail_record(cred_ex_id) in details_ld_proof mock_warning.assert_called_once() @@ -878,11 +879,12 @@ async def test_store_credential(self): document_loader=custom_document_loader, purpose=mock_get_proof_purpose.return_value, ) - self.holder.store_credential.assert_called_once_with( VCRecord( contexts=LD_PROOF_VC["@context"], - types=LD_PROOF_VC["type"], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential" + ], issuer_id=LD_PROOF_VC["issuer"], subject_ids=[], schema_ids=[], # Schemas not supported yet @@ -890,7 +892,6 @@ async def test_store_credential(self): cred_value=LD_PROOF_VC, given_id=None, record_id=cred_id, - cred_tags=None, # Tags should be derived from credential values ) ) diff --git a/aries_cloudagent/protocols/out_of_band/v1_0/manager.py b/aries_cloudagent/protocols/out_of_band/v1_0/manager.py index fc4dbe4b1a..5ef75a3c14 100644 --- a/aries_cloudagent/protocols/out_of_band/v1_0/manager.py +++ b/aries_cloudagent/protocols/out_of_band/v1_0/manager.py @@ -11,17 +11,17 @@ from ....connections.util import mediation_record_if_id from ....core.error import BaseError from ....core.profile import ProfileSession +from ....did.did_key import DIDKey from ....indy.holder import IndyHolder -from ....indy.sdk.models.xform import indy_proof_req_preview2indy_requested_creds -from ....messaging.responder import BaseResponder +from ....indy.models.xform import indy_proof_req_preview2indy_requested_creds from ....messaging.decorators.attach_decorator import AttachDecorator +from ....messaging.responder import BaseResponder from ....multitenant.manager import MultitenantManager from ....storage.error import StorageNotFoundError from ....transport.inbound.receipt import MessageReceipt from ....wallet.base import BaseWallet from ....wallet.util import b64_to_bytes from ....wallet.key_type import KeyType -from ....did.did_key import DIDKey from ...coordinate_mediation.v1_0.manager import MediationManager from ...connections.v1_0.manager import ConnectionManager @@ -35,8 +35,6 @@ from ...present_proof.v1_0.models.presentation_exchange import V10PresentationExchange from ...present_proof.v2_0.manager import V20PresManager from ...present_proof.v2_0.message_types import PRES_20_REQUEST -from ...present_proof.v2_0.messages.pres_format import V20PresFormat -from ...present_proof.v2_0.messages.pres_request import V20PresRequest from ...present_proof.v2_0.models.pres_exchange import V20PresExRecord from .messages.invitation import HSProto, InvitationMessage @@ -194,7 +192,7 @@ async def create_invitation( ) message_attachments.append( InvitationMessage.wrap_message( - pres_ex_rec.pres_request.attachment() + pres_ex_rec.pres_request.serialize() ) ) else: @@ -702,30 +700,11 @@ async def _process_pres_request_v2( pres_ex_record = await pres_mgr.receive_pres_request(pres_ex_record) if pres_ex_record.auto_present: - indy_proof_request = V20PresRequest.deserialize( - pres_request_msg - ).attachment( - V20PresFormat.Format.INDY - ) # assumption will change for DIF - try: - req_creds = await indy_proof_req_preview2indy_requested_creds( - indy_proof_req=indy_proof_request, - preview=None, - holder=self._session.inject(IndyHolder), - ) - except ValueError as err: - self._logger.warning(f"{err}") - raise OutOfBandManagerError( - f"Cannot auto-respond to presentation request attachment: {err}" - ) - (pres_ex_record, pres_msg) = await pres_mgr.create_pres( pres_ex_record=pres_ex_record, - requested_credentials=req_creds, comment=( - "auto-presented for proof request nonce={}".format( - indy_proof_request["nonce"] - ) + f"auto-presented for proof requests" + f", pres_ex_record: {pres_ex_record.pres_ex_id}" ), ) responder = self._session.inject(BaseResponder, required=False) @@ -739,7 +718,7 @@ async def _process_pres_request_v2( else: raise OutOfBandManagerError( ( - "Configuration sets auto_present false: cannot " + "Configuration set auto_present false: cannot " "respond automatically to presentation requests" ) ) diff --git a/aries_cloudagent/protocols/out_of_band/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/out_of_band/v1_0/tests/test_manager.py index 927155693b..628000e8af 100644 --- a/aries_cloudagent/protocols/out_of_band/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/out_of_band/v1_0/tests/test_manager.py @@ -13,7 +13,7 @@ from .....core.in_memory import InMemoryProfile from .....did.did_key import DIDKey from .....indy.holder import IndyHolder -from .....indy.sdk.models.pres_preview import ( +from .....indy.models.pres_preview import ( IndyPresAttrSpec, IndyPresPredSpec, IndyPresPreview, @@ -55,12 +55,13 @@ from .....protocols.present_proof.v2_0.messages.pres_format import V20PresFormat from .....protocols.present_proof.v2_0.messages.pres_request import V20PresRequest from .....storage.error import StorageNotFoundError +from .....storage.vc_holder.base import VCHolder +from .....storage.vc_holder.vc_record import VCRecord from .....transport.inbound.receipt import MessageReceipt -from .....wallet.base import BaseWallet from .....wallet.did_info import DIDInfo, KeyInfo +from .....wallet.did_method import DIDMethod from .....wallet.in_memory import InMemoryWallet from .....wallet.key_type import KeyType -from .....wallet.did_method import DIDMethod from ....didcomm_prefix import DIDCommPrefix from ....issue_credential.v1_0.models.credential_exchange import V10CredentialExchange @@ -137,6 +138,44 @@ class TestConfig: }} }}""" ) + DIF_PROOF_REQ = { + "presentation_definition": { + "id": "32f54163-7166-48f1-93d8-ff217bdb0654", + "submission_requirements": [ + { + "name": "Citizenship Information", + "rule": "pick", + "min": 1, + "from": "A", + } + ], + "input_descriptors": [ + { + "id": "citizenship_input_1", + "name": "EU Driver's License", + "group": ["A"], + "schema": [ + { + "uri": "https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints": { + "limit_disclosure": "required", + "fields": [ + { + "path": ["$.credentialSubject.givenName"], + "purpose": "The claim must be from one of the specified issuers", + "filter": { + "type": "string", + "enum": ["JOHN", "CAI"], + }, + } + ], + }, + } + ], + }, + } PRES_PREVIEW = IndyPresPreview( attributes=[ @@ -193,6 +232,23 @@ class TestConfig: AttachDecorator.data_base64(mapping=INDY_PROOF_REQ, ident="indy") ], ) + + DIF_PRES_REQ_V2 = V20PresRequest( + comment="some comment", + will_confirm=True, + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(mapping=DIF_PROOF_REQ, ident="dif") + ], + ) + req_attach_v2 = AttachDecorator.data_json( mapping=PRES_REQ_V2.serialize(), ident="request-0", @@ -548,11 +604,7 @@ async def test_create_invitation_attachment_present_proof_v2_0(self): ) mock_retrieve_pxid_1.side_effect = StorageNotFoundError() mock_retrieve_pxid_2.return_value = async_mock.MagicMock( - pres_request=async_mock.MagicMock( - attachment=async_mock.MagicMock( - return_value=TestConfig.PRES_REQ_V2.serialize() - ) - ) + pres_request=TestConfig.PRES_REQ_V2 ) invi_rec = await self.manager.create_invitation( my_endpoint=TestConfig.test_endpoint, @@ -570,6 +622,337 @@ async def test_create_invitation_attachment_present_proof_v2_0(self): self.manager.session, "dummy-id" ) + async def test_dif_req_v2_attach_pres_existing_conn_auto_present_pres_msg_with_challenge( + self, + ): + self.session.context.update_settings({"public_invites": True}) + self.session.context.update_settings( + {"debug.auto_respond_presentation_request": True} + ) + test_exist_conn = ConnRecord( + my_did=TestConfig.test_did, + their_did=TestConfig.test_target_did, + their_public_did=TestConfig.test_target_did, + invitation_msg_id="12345678-0123-4567-1234-567812345678", + their_role=ConnRecord.Role.REQUESTER, + ) + await test_exist_conn.save(self.session) + await test_exist_conn.metadata_set(self.session, "reuse_msg_state", "initial") + await test_exist_conn.metadata_set(self.session, "reuse_msg_id", "test_123") + receipt = MessageReceipt( + recipient_did=TestConfig.test_did, + recipient_did_public=False, + sender_did=TestConfig.test_target_did, + ) + dif_proof_req = deepcopy(TestConfig.DIF_PROOF_REQ) + dif_proof_req["options"] = {} + dif_proof_req["options"]["challenge"] = "3fa85f64-5717-4562-b3fc-2c963f66afa7" + dif_pres_req_v2 = V20PresRequest( + comment="some comment", + will_confirm=True, + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(mapping=dif_proof_req, ident="dif") + ], + ) + + px2_rec = test_module.V20PresExRecord( + auto_present=True, + pres_request=dif_pres_req_v2.serialize(), + ) + + dif_req_attach_v2 = AttachDecorator.data_json( + mapping=dif_pres_req_v2.serialize(), + ident="request-0", + ).serialize() + + with async_mock.patch.object( + DIDXManager, + "receive_invitation", + autospec=True, + ) as didx_mgr_receive_invitation, async_mock.patch.object( + V20PresManager, + "receive_pres_request", + autospec=True, + ) as pres_mgr_receive_pres_req, async_mock.patch( + "aries_cloudagent.protocols.out_of_band.v1_0.manager.InvitationMessage", + autospec=True, + ) as inv_message_cls, async_mock.patch.object( + OutOfBandManager, + "fetch_connection_targets", + autospec=True, + ) as oob_mgr_fetch_conn, async_mock.patch.object( + OutOfBandManager, + "find_existing_connection", + autospec=True, + ) as oob_mgr_find_existing_conn, async_mock.patch.object( + OutOfBandManager, + "check_reuse_msg_state", + autospec=True, + ) as oob_mgr_check_reuse_state, async_mock.patch.object( + OutOfBandManager, + "create_handshake_reuse_message", + autospec=True, + ) as oob_mgr_create_reuse_msg, async_mock.patch.object( + OutOfBandManager, + "receive_reuse_message", + autospec=True, + ) as oob_mgr_receive_reuse_msg, async_mock.patch.object( + OutOfBandManager, + "receive_reuse_accepted_message", + autospec=True, + ) as oob_mgr_receive_accept_msg, async_mock.patch.object( + OutOfBandManager, + "receive_problem_report", + autospec=True, + ) as oob_mgr_receive_problem_report, async_mock.patch.object( + V20PresManager, + "create_pres", + autospec=True, + ) as pres_mgr_create_pres: + oob_mgr_find_existing_conn.return_value = test_exist_conn + pres_mgr_receive_pres_req.return_value = px2_rec + pres_mgr_create_pres.return_value = ( + px2_rec, + V20Pres( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20][ + V20PresFormat.Format.DIF.api + ], + ) + ], + presentations_attach=[ + AttachDecorator.data_json( + mapping={"bogus": "proof"}, + ident="dif", + ) + ], + ), + ) + self.session.context.injector.bind_instance( + VCHolder, + async_mock.MagicMock( + search_credentials=async_mock.MagicMock( + return_value=async_mock.MagicMock( + fetch=async_mock.CoroutineMock( + return_value=[ + VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=[ + "did:example:ebfeb1f712ebc6f1c276e12ec21" + ], + proof_types=["Ed25519Signature2018"], + schema_ids=[ + "https://example.org/examples/degree.json" + ], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + ) + ] + ) + ) + ) + ), + ) + mock_oob_invi = async_mock.MagicMock( + handshake_protocols=[ + pfx.qualify(HSProto.RFC23.name) for pfx in DIDCommPrefix + ], + services=[TestConfig.test_target_did], + requests_attach=[AttachDecorator.deserialize(dif_req_attach_v2)], + ) + + inv_message_cls.deserialize.return_value = mock_oob_invi + + conn_rec = await self.manager.receive_invitation( + mock_oob_invi, use_existing_connection=True + ) + assert ConnRecord.deserialize(conn_rec) + + async def test_dif_req_v2_attach_pres_existing_conn_auto_present_pres_msg_with_nonce( + self, + ): + self.session.context.update_settings({"public_invites": True}) + self.session.context.update_settings( + {"debug.auto_respond_presentation_request": True} + ) + test_exist_conn = ConnRecord( + my_did=TestConfig.test_did, + their_did=TestConfig.test_target_did, + their_public_did=TestConfig.test_target_did, + invitation_msg_id="12345678-0123-4567-1234-567812345678", + their_role=ConnRecord.Role.REQUESTER, + ) + await test_exist_conn.save(self.session) + await test_exist_conn.metadata_set(self.session, "reuse_msg_state", "initial") + await test_exist_conn.metadata_set(self.session, "reuse_msg_id", "test_123") + receipt = MessageReceipt( + recipient_did=TestConfig.test_did, + recipient_did_public=False, + sender_did=TestConfig.test_target_did, + ) + + dif_proof_req = deepcopy(TestConfig.DIF_PROOF_REQ) + dif_proof_req["options"] = {} + dif_proof_req["options"]["nonce"] = "12345" + dif_pres_req_v2 = V20PresRequest( + comment="some comment", + will_confirm=True, + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(mapping=dif_proof_req, ident="dif") + ], + ) + + px2_rec = test_module.V20PresExRecord( + auto_present=True, + pres_request=dif_pres_req_v2.serialize(), + ) + + dif_req_attach_v2 = AttachDecorator.data_json( + mapping=dif_pres_req_v2.serialize(), + ident="request-0", + ).serialize() + + with async_mock.patch.object( + DIDXManager, + "receive_invitation", + autospec=True, + ) as didx_mgr_receive_invitation, async_mock.patch.object( + V20PresManager, + "receive_pres_request", + autospec=True, + ) as pres_mgr_receive_pres_req, async_mock.patch( + "aries_cloudagent.protocols.out_of_band.v1_0.manager.InvitationMessage", + autospec=True, + ) as inv_message_cls, async_mock.patch.object( + OutOfBandManager, + "fetch_connection_targets", + autospec=True, + ) as oob_mgr_fetch_conn, async_mock.patch.object( + OutOfBandManager, + "find_existing_connection", + autospec=True, + ) as oob_mgr_find_existing_conn, async_mock.patch.object( + OutOfBandManager, + "check_reuse_msg_state", + autospec=True, + ) as oob_mgr_check_reuse_state, async_mock.patch.object( + OutOfBandManager, + "create_handshake_reuse_message", + autospec=True, + ) as oob_mgr_create_reuse_msg, async_mock.patch.object( + OutOfBandManager, + "receive_reuse_message", + autospec=True, + ) as oob_mgr_receive_reuse_msg, async_mock.patch.object( + OutOfBandManager, + "receive_reuse_accepted_message", + autospec=True, + ) as oob_mgr_receive_accept_msg, async_mock.patch.object( + OutOfBandManager, + "receive_problem_report", + autospec=True, + ) as oob_mgr_receive_problem_report, async_mock.patch.object( + V20PresManager, + "create_pres", + autospec=True, + ) as pres_mgr_create_pres: + oob_mgr_find_existing_conn.return_value = test_exist_conn + pres_mgr_receive_pres_req.return_value = px2_rec + pres_mgr_create_pres.return_value = ( + px2_rec, + V20Pres( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20][ + V20PresFormat.Format.DIF.api + ], + ) + ], + presentations_attach=[ + AttachDecorator.data_json( + mapping={"bogus": "proof"}, + ident="dif", + ) + ], + ), + ) + self.session.context.injector.bind_instance( + VCHolder, + async_mock.MagicMock( + search_credentials=async_mock.MagicMock( + return_value=async_mock.MagicMock( + fetch=async_mock.CoroutineMock( + return_value=[ + VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=[ + "did:example:ebfeb1f712ebc6f1c276e12ec21" + ], + proof_types=["Ed25519Signature2018"], + schema_ids=[ + "https://example.org/examples/degree.json" + ], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + ) + ] + ) + ) + ) + ), + ) + mock_oob_invi = async_mock.MagicMock( + handshake_protocols=[ + pfx.qualify(HSProto.RFC23.name) for pfx in DIDCommPrefix + ], + services=[TestConfig.test_target_did], + requests_attach=[AttachDecorator.deserialize(dif_req_attach_v2)], + ) + + inv_message_cls.deserialize.return_value = mock_oob_invi + + conn_rec = await self.manager.receive_invitation( + mock_oob_invi, use_existing_connection=True + ) + assert ConnRecord.deserialize(conn_rec) + async def test_create_invitation_public_x_no_public_invites(self): self.session.context.update_settings({"public_invites": False}) @@ -2200,7 +2583,10 @@ async def test_req_v2_attach_presentation_existing_conn_no_auto_present(self): await self.manager.receive_invitation( mock_oob_invi, use_existing_connection=True ) - assert "Configuration sets auto_present false" in str(context.exception) + assert ( + "Configuration set auto_present false: cannot respond automatically to presentation requests" + == str(context.exception) + ) async def test_req_v2_attach_presentation_existing_conn_auto_present_pres_msg(self): self.session.context.update_settings({"public_invites": True}) @@ -2332,7 +2718,7 @@ async def test_req_v2_attach_presentation_existing_conn_auto_present_pres_msg(se async def test_req_v2_attach_pres_catch_value_error(self): self.session.context.update_settings({"public_invites": True}) self.session.context.update_settings( - {"debug.auto_respond_presentation_request": True} + {"debug.auto_respond_presentation_request": False} ) test_exist_conn = ConnRecord( my_did=TestConfig.test_did, @@ -2351,7 +2737,7 @@ async def test_req_v2_attach_pres_catch_value_error(self): ) px2_rec = test_module.V20PresExRecord( - auto_present=True, + auto_present=False, pres_request=TestConfig.PRES_REQ_V2.serialize(), ) @@ -2443,7 +2829,7 @@ async def test_req_v2_attach_pres_catch_value_error(self): await self.manager.receive_invitation( mock_oob_invi, use_existing_connection=True ) - assert "Cannot auto-respond" in str(context.exception) + assert "cannot respond automatically" in str(context.exception) async def test_req_attach_presentation_cred_offer(self): self.session.context.update_settings({"public_invites": True}) diff --git a/aries_cloudagent/protocols/present_proof/dif/__init__.py b/aries_cloudagent/protocols/present_proof/dif/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/aries_cloudagent/protocols/present_proof/dif/pres_exch.py b/aries_cloudagent/protocols/present_proof/dif/pres_exch.py new file mode 100644 index 0000000000..14afabaa2e --- /dev/null +++ b/aries_cloudagent/protocols/present_proof/dif/pres_exch.py @@ -0,0 +1,915 @@ +"""Schemas for dif presentation exchange attachment.""" +from marshmallow import ( + fields, + validate, + EXCLUDE, + INCLUDE, + pre_load, + post_dump, + ValidationError, +) +from typing import Sequence, Union, Mapping + +from ....messaging.models.base import BaseModelSchema, BaseModel +from ....messaging.valid import ( + UUID4, + StrOrDictField, + StrOrNumberField, +) +from ....vc.vc_ld.models import LinkedDataProofSchema + + +class ClaimFormat(BaseModel): + """Defines Claim field.""" + + class Meta: + """ClaimFormat metadata.""" + + schema_class = "ClaimFormatSchema" + + def __init__( + self, + *, + jwt: Mapping = None, + jwt_vc: Mapping = None, + jwt_vp: Mapping = None, + ldp: Mapping = None, + ldp_vc: Mapping = None, + ldp_vp: Mapping = None, + ): + """Initialize format.""" + self.jwt = jwt + self.jwt_vc = jwt_vc + self.jwt_vp = jwt_vp + self.ldp = ldp + self.ldp_vc = ldp_vc + self.ldp_vp = ldp_vp + + +class ClaimFormatSchema(BaseModelSchema): + """Single ClaimFormat Schema.""" + + class Meta: + """ClaimFormatSchema metadata.""" + + model_class = ClaimFormat + unknown = EXCLUDE + + jwt = fields.Dict( + required=False, + ) + jwt_vc = fields.Dict( + required=False, + ) + jwt_vp = fields.Dict( + required=False, + ) + ldp = fields.Dict( + required=False, + ) + ldp_vc = fields.Dict( + required=False, + ) + ldp_vp = fields.Dict( + required=False, + ) + + +class SubmissionRequirements(BaseModel): + """describes input to be submitted via a presentation submission.""" + + class Meta: + """SubmissionRequirements metadata.""" + + schema_class = "SubmissionRequirementsSchema" + + def __init__( + self, + *, + _name: str = None, + purpose: str = None, + rule: str = None, + count: int = None, + minimum: int = None, + maximum: int = None, + _from: str = None, + # Self_reference + from_nested: Sequence = None, + ): + """Initialize SubmissionRequirement.""" + self._name = _name + self.purpose = purpose + self.rule = rule + self.count = count + self.minimum = minimum + self.maximum = maximum + self._from = _from + self.from_nested = from_nested + + +class SubmissionRequirementsSchema(BaseModelSchema): + """Single Presentation Definition Schema.""" + + class Meta: + """SubmissionRequirementsSchema metadata.""" + + model_class = SubmissionRequirements + unknown = EXCLUDE + + _name = fields.Str(description="Name", required=False, data_key="name") + purpose = fields.Str(description="Purpose", required=False) + rule = fields.Str( + description="Selection", + required=False, + validate=validate.OneOf(["all", "pick"]), + ) + count = fields.Int( + description="Count Value", + example=1234, + required=False, + strict=True, + ) + minimum = fields.Int( + description="Min Value", + example=1234, + required=False, + strict=True, + data_key="min", + ) + maximum = fields.Int( + description="Max Value", + example=1234, + required=False, + strict=True, + data_key="max", + ) + _from = fields.Str(description="From", required=False, data_key="from") + # Self References + from_nested = fields.List( + fields.Nested(lambda: SubmissionRequirementsSchema(exclude=("from_nested",))), + required=False, + ) + + @pre_load + def validate_from(self, data, **kwargs): + """Support validation of from and from_nested.""" + if "from" in data and "from_nested" in data: + raise ValidationError( + "Both from and from_nested cannot be " + "specified in the submission requirement" + ) + if "from" not in data and "from_nested" not in data: + raise ValidationError( + "Either from or from_nested needs to be " + "specified in the submission requirement" + ) + return data + + +class SchemaInputDescriptor(BaseModel): + """SchemaInputDescriptor.""" + + class Meta: + """SchemaInputDescriptor metadata.""" + + schema_class = "SchemaInputDescriptorSchema" + + def __init__( + self, + *, + uri: str = None, + required: bool = False, + ): + """Initialize InputDescriptors.""" + self.uri = uri + self.required = required + + +class SchemaInputDescriptorSchema(BaseModelSchema): + """Single SchemaField Schema.""" + + class Meta: + """SchemaInputDescriptorSchema metadata.""" + + model_class = SchemaInputDescriptor + unknown = EXCLUDE + + uri = fields.Str( + description="URI", + required=False, + ) + required = fields.Bool(description="Required", required=False) + + +class DIFHolder(BaseModel): + """Single Holder object for Constraints.""" + + class Meta: + """Holder metadata.""" + + schema_class = "DIFHolderSchema" + + def __init__( + self, + *, + field_ids: Sequence[str] = None, + directive: str = None, + ): + """Initialize Holder.""" + self.field_ids = field_ids + self.directive = directive + + +class DIFHolderSchema(BaseModelSchema): + """Single Holder Schema.""" + + class Meta: + """DIFHolderSchema metadata.""" + + model_class = DIFHolder + unknown = EXCLUDE + + field_ids = fields.List( + fields.Str( + description="FieldID", + required=False, + **UUID4, + ), + required=False, + data_key="field_id", + ) + directive = fields.Str( + description="Preference", + required=False, + validate=validate.OneOf(["required", "preferred"]), + ) + + +class Filter(BaseModel): + """Single Filter for the Constraint object.""" + + class Meta: + """Filter metadata.""" + + schema_class = "FilterSchema" + + def __init__( + self, + *, + _not: bool = False, + _type: str = None, + fmt: str = None, + pattern: str = None, + minimum: str = None, + maximum: str = None, + min_length: int = None, + max_length: int = None, + exclusive_min: str = None, + exclusive_max: str = None, + const: str = None, + enums: Sequence[str] = None, + ): + """Initialize Filter.""" + self._type = _type + self.fmt = fmt + self.pattern = pattern + self.minimum = minimum + self.maximum = maximum + self.min_length = min_length + self.max_length = max_length + self.exclusive_min = exclusive_min + self.exclusive_max = exclusive_max + self.const = const + self.enums = enums + self._not = _not + + +class FilterSchema(BaseModelSchema): + """Single Filter Schema.""" + + class Meta: + """FilterSchema metadata.""" + + model_class = Filter + unknown = EXCLUDE + + _type = fields.Str(description="Type", required=False, data_key="type") + fmt = fields.Str( + description="Format", + required=False, + data_key="format", + ) + pattern = fields.Str( + description="Pattern", + required=False, + ) + minimum = StrOrNumberField( + description="Minimum", + required=False, + ) + maximum = StrOrNumberField( + description="Maximum", + required=False, + ) + min_length = fields.Int( + description="Min Length", + example=1234, + strict=True, + required=False, + data_key="minLength", + ) + max_length = fields.Int( + description="Max Length", + example=1234, + strict=True, + required=False, + data_key="maxLength", + ) + exclusive_min = StrOrNumberField( + description="ExclusiveMinimum", + required=False, + data_key="exclusiveMinimum", + ) + exclusive_max = StrOrNumberField( + description="ExclusiveMaximum", + required=False, + data_key="exclusiveMaximum", + ) + const = StrOrNumberField( + description="Const", + required=False, + ) + enums = fields.List( + StrOrNumberField(description="Enum", required=False), + required=False, + data_key="enum", + ) + _not = fields.Boolean( + description="Not", + required=False, + example=False, + data_key="not", + ) + + @pre_load + def extract_info(self, data, **kwargs): + """Enum validation and not filter logic.""" + if "not" in data: + new_data = {"not": True} + for key, value in data.get("not").items(): + new_data[key] = value + data = new_data + if "enum" in data: + if type(data.get("enum")) is not list: + raise ValidationError("enum is not specified as a list") + return data + + @post_dump + def serialize_reformat(self, data, **kwargs): + """Support serialization of not filter according to DIF spec.""" + if data.pop("not", False): + return {"not": data} + + return data + + +class DIFField(BaseModel): + """Single Field object for the Constraint.""" + + class Meta: + """Field metadata.""" + + schema_class = "DIFFieldSchema" + + def __init__( + self, + *, + paths: Sequence[str] = None, + purpose: str = None, + predicate: str = None, + _filter: Filter = None, + ): + """Initialize Field.""" + self.paths = paths + self.purpose = purpose + self.predicate = predicate + self._filter = _filter + + +class DIFFieldSchema(BaseModelSchema): + """Single Field Schema.""" + + class Meta: + """DIFFieldSchema metadata.""" + + model_class = DIFField + unknown = EXCLUDE + + paths = fields.List( + fields.Str(description="Path", required=False), + required=False, + data_key="path", + ) + purpose = fields.Str( + description="Purpose", + required=False, + ) + predicate = fields.Str( + description="Preference", + required=False, + validate=validate.OneOf(["required", "preferred"]), + ) + _filter = fields.Nested(FilterSchema, data_key="filter") + + +class Constraints(BaseModel): + """Single Constraints which describes InputDescriptor's Contraint field.""" + + class Meta: + """Constraints metadata.""" + + schema_class = "ConstraintsSchema" + + def __init__( + self, + *, + subject_issuer: str = None, + limit_disclosure: bool = None, + holders: Sequence[DIFHolder] = None, + _fields: Sequence[DIFField] = None, + status_active: str = None, + status_suspended: str = None, + status_revoked: str = None, + ): + """Initialize Constraints for Input Descriptor.""" + self.subject_issuer = subject_issuer + self.limit_disclosure = limit_disclosure + self.holders = holders + self._fields = _fields + self.status_active = status_active + self.status_suspended = status_suspended + self.status_revoked = status_revoked + + +class ConstraintsSchema(BaseModelSchema): + """Single Constraints Schema.""" + + class Meta: + """ConstraintsSchema metadata.""" + + model_class = Constraints + unknown = EXCLUDE + + subject_issuer = fields.Str( + description="SubjectIsIssuer", + required=False, + validate=validate.OneOf(["required", "preferred"]), + data_key="subject_is_issuer", + ) + limit_disclosure = fields.Str(description="LimitDisclosure", required=False) + holders = fields.List( + fields.Nested(DIFHolderSchema), + required=False, + data_key="is_holder", + ) + _fields = fields.List( + fields.Nested(DIFFieldSchema), + required=False, + data_key="fields", + ) + status_active = fields.Str( + required=False, + validate=validate.OneOf(["required", "allowed", "disallowed"]), + ) + status_suspended = fields.Str( + required=False, + validate=validate.OneOf(["required", "allowed", "disallowed"]), + ) + status_revoked = fields.Str( + required=False, + validate=validate.OneOf(["required", "allowed", "disallowed"]), + ) + + @pre_load + def extract_info(self, data, **kwargs): + """Support deserialization of statuses according to DIF spec.""" + if "statuses" in data: + if "active" in data.get("statuses"): + if "directive" in data.get("statuses").get("active"): + data["status_active"] = data["statuses"]["active"]["directive"] + if "suspended" in data.get("statuses"): + if "directive" in data.get("statuses").get("suspended"): + data["status_suspended"] = data["statuses"]["suspended"][ + "directive" + ] + if "revoked" in data.get("statuses"): + if "directive" in data.get("statuses").get("revoked"): + data["status_revoked"] = data["statuses"]["revoked"]["directive"] + return data + + @post_dump + def reformat_data(self, data, **kwargs): + """Support serialization of statuses according to DIF spec.""" + if "status_active" in data: + statuses = data.get("statuses", {}) + statuses["active"] = {"directive": data.get("status_active")} + data["statuses"] = statuses + del data["status_active"] + if "status_suspended" in data: + statuses = data.get("statuses", {}) + statuses["suspended"] = {"directive": data.get("status_suspended")} + data["statuses"] = statuses + del data["status_suspended"] + if "status_revoked" in data: + statuses = data.get("statuses", {}) + statuses["revoked"] = {"directive": data.get("status_revoked")} + data["statuses"] = statuses + del data["status_revoked"] + return data + + +class InputDescriptors(BaseModel): + """Input Descriptors.""" + + class Meta: + """InputDescriptors metadata.""" + + schema_class = "InputDescriptorsSchema" + + def __init__( + self, + *, + id: str = None, + groups: Sequence[str] = None, + name: str = None, + purpose: str = None, + metadata: dict = None, + constraint: Constraints = None, + schemas: Sequence[SchemaInputDescriptor] = None, + ): + """Initialize InputDescriptors.""" + self.id = id + self.groups = groups + self.name = name + self.purpose = purpose + self.metadata = metadata + self.constraint = constraint + self.schemas = schemas + + +class InputDescriptorsSchema(BaseModelSchema): + """Single InputDescriptors Schema.""" + + class Meta: + """InputDescriptorsSchema metadata.""" + + model_class = InputDescriptors + unknown = EXCLUDE + + id = fields.Str(description="ID", required=False) + groups = fields.List( + fields.Str( + description="Group", + required=False, + ), + required=False, + data_key="group", + ) + name = fields.Str(description="Name", required=False) + purpose = fields.Str(description="Purpose", required=False) + metadata = fields.Dict(description="Metadata dictionary", required=False) + constraint = fields.Nested( + ConstraintsSchema, required=False, data_key="constraints" + ) + schemas = fields.List( + fields.Nested(SchemaInputDescriptorSchema), required=False, data_key="schema" + ) + + +class Requirement(BaseModel): + """Single Requirement generated from toRequirement function.""" + + class Meta: + """Requirement metadata.""" + + schema_class = "RequirementSchema" + + def __init__( + self, + *, + count: int = None, + maximum: int = None, + minimum: int = None, + input_descriptors: Sequence[InputDescriptors] = None, + nested_req: Sequence = None, + ): + """Initialize Requirement.""" + self.count = count + self.maximum = maximum + self.minimum = minimum + self.input_descriptors = input_descriptors + self.nested_req = nested_req + + +class RequirementSchema(BaseModelSchema): + """Single Requirement Schema.""" + + class Meta: + """RequirementSchema metadata.""" + + model_class = Requirement + unknown = EXCLUDE + + count = fields.Int( + description="Count Value", + example=1234, + strict=True, + required=False, + ) + maximum = fields.Int( + description="Max Value", + example=1234, + strict=True, + required=False, + ) + minimum = fields.Int( + description="Min Value", + example=1234, + strict=True, + required=False, + ) + input_descriptors = fields.List( + fields.Nested(InputDescriptorsSchema), + required=False, + ) + # Self References + nested_req = fields.List( + fields.Nested(lambda: RequirementSchema(exclude=("_nested_req",))), + required=False, + ) + + +class PresentationDefinition(BaseModel): + """https://identity.foundation/presentation-exchange/.""" + + class Meta: + """PresentationDefinition metadata.""" + + schema_class = "PresentationDefinitionSchema" + + def __init__( + self, + *, + id: str = None, + name: str = None, + purpose: str = None, + fmt: ClaimFormat = None, + submission_requirements: Sequence[SubmissionRequirements] = None, + input_descriptors: Sequence[InputDescriptors] = None, + **kwargs, + ): + """Initialize flattened single-JWS to include in attach decorator data.""" + super().__init__(**kwargs) + self.id = id + self.name = name + self.purpose = purpose + self.fmt = fmt + self.submission_requirements = submission_requirements + self.input_descriptors = input_descriptors + + +class PresentationDefinitionSchema(BaseModelSchema): + """Single Presentation Definition Schema.""" + + class Meta: + """PresentationDefinitionSchema metadata.""" + + model_class = PresentationDefinition + unknown = EXCLUDE + + id = fields.Str( + required=False, + description="Unique Resource Identifier", + **UUID4, + ) + name = fields.Str( + description=( + "Human-friendly name that describes" + " what the presentation definition pertains to" + ), + required=False, + ) + purpose = fields.Str( + description=( + "Describes the purpose for which" + " the Presentation Definition's inputs are being requested" + ), + required=False, + ) + fmt = fields.Nested( + ClaimFormatSchema, + required=False, + data_key="format", + ) + submission_requirements = fields.List( + fields.Nested(SubmissionRequirementsSchema), + required=False, + ) + input_descriptors = fields.List( + fields.Nested(InputDescriptorsSchema), + required=False, + ) + + +class InputDescriptorMapping(BaseModel): + """Single InputDescriptorMapping object.""" + + class Meta: + """InputDescriptorMapping metadata.""" + + schema_class = "InputDescriptorMappingSchema" + + def __init__( + self, + *, + id: str = None, + fmt: str = None, + path: str = None, + ): + """Initialize InputDescriptorMapping.""" + self.id = id + self.fmt = fmt + self.path = path + + +class InputDescriptorMappingSchema(BaseModelSchema): + """Single InputDescriptorMapping Schema.""" + + class Meta: + """InputDescriptorMappingSchema metadata.""" + + model_class = InputDescriptorMapping + unknown = EXCLUDE + + id = fields.Str( + description="ID", + required=False, + ) + fmt = fields.Str( + description="Format", + required=False, + default="ldp_vp", + data_key="format", + ) + path = fields.Str( + description="Path", + required=False, + ) + + +class PresentationSubmission(BaseModel): + """Single PresentationSubmission object.""" + + class Meta: + """PresentationSubmission metadata.""" + + schema_class = "PresentationSubmissionSchema" + + def __init__( + self, + *, + id: str = None, + definition_id: str = None, + descriptor_maps: Sequence[InputDescriptorMapping] = None, + ): + """Initialize InputDescriptorMapping.""" + self.id = id + self.definition_id = definition_id + self.descriptor_maps = descriptor_maps + + +class PresentationSubmissionSchema(BaseModelSchema): + """Single PresentationSubmission Schema.""" + + class Meta: + """PresentationSubmissionSchema metadata.""" + + model_class = PresentationSubmission + unknown = EXCLUDE + + id = fields.Str( + description="ID", + required=False, + **UUID4, + ) + definition_id = fields.Str( + description="DefinitionID", + required=False, + **UUID4, + ) + descriptor_maps = fields.List( + fields.Nested(InputDescriptorMappingSchema), + required=False, + data_key="descriptor_map", + ) + + +class VerifiablePresentation(BaseModel): + """Single VerifiablePresentation object.""" + + class Meta: + """VerifiablePresentation metadata.""" + + schema_class = "VerifiablePresentationSchema" + + def __init__( + self, + *, + id: str = None, + contexts: Sequence[Union[str, dict]] = None, + types: Sequence[str] = None, + credentials: Sequence[dict] = None, + proof: Sequence[dict] = None, + presentation_submission: PresentationSubmission = None, + ): + """Initialize VerifiablePresentation.""" + self.id = id + self.contexts = contexts + self.types = types + self.credentials = credentials + self.proof = proof + self.presentation_submission = presentation_submission + + +class VerifiablePresentationSchema(BaseModelSchema): + """Single Verifiable Presentation Schema.""" + + class Meta: + """VerifiablePresentationSchema metadata.""" + + model_class = VerifiablePresentation + unknown = INCLUDE + + id = fields.Str( + description="ID", + required=False, + **UUID4, + ) + contexts = fields.List( + StrOrDictField(), + data_key="@context", + ) + types = fields.List( + fields.Str(description="Types", required=False), + data_key="type", + ) + credentials = fields.List( + fields.Dict(description="Credentials", required=False), + data_key="verifiableCredential", + ) + proof = fields.Nested( + LinkedDataProofSchema(), + required=True, + description="The proof of the credential", + ) + presentation_submission = fields.Nested(PresentationSubmissionSchema) + + +class DIFOptions(BaseModel): + """Single DIFOptions object.""" + + class Meta: + """DIFOptions metadata.""" + + schema_class = "DIFOptionsSchema" + + def __init__( + self, + *, + challenge: str = None, + domain: str = None, + ): + """Initialize DIFOptions.""" + self.challenge = challenge + self.domain = domain + + +class DIFOptionsSchema(BaseModelSchema): + """Schema for options required for the Prover to fulfill the Verifier's request.""" + + class Meta: + """DIFOptionsSchema metadata.""" + + model_class = DIFOptions + unknown = EXCLUDE + + challenge = fields.String( + description="Challenge protect against replay attack", + required=False, + **UUID4, + ) + domain = fields.String( + description="Domain protect against replay attack", + required=False, + example="4jt78h47fh47", + ) diff --git a/aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py b/aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py new file mode 100644 index 0000000000..8d7dfa0be5 --- /dev/null +++ b/aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py @@ -0,0 +1,1209 @@ +""" +Utilities for dif presentation exchange attachment. + +General Flow: +create_vp -> +make_requirement [create a Requirement from SubmissionRequirements and Descriptors] -> +apply_requirement [filter credentials] -> +merge [return applicable credential list and descriptor_map for presentation_submission] +returns VerifiablePresentation +""" +import pytz +import re + +from datetime import datetime +from dateutil.parser import parse as dateutil_parser +from dateutil.parser import ParserError +from jsonpath_ng import parse +from pyld import jsonld +from pyld.jsonld import JsonLdProcessor +from typing import Sequence, Optional, Tuple +from unflatten import unflatten +from uuid import uuid4 + +from ....core.error import BaseError +from ....core.profile import Profile +from ....did.did_key import DIDKey +from ....storage.vc_holder.vc_record import VCRecord +from ....vc.ld_proofs import ( + Ed25519Signature2018, + BbsBlsSignature2020, + BbsBlsSignatureProof2020, + WalletKeyPair, + DocumentLoader, +) +from ....vc.ld_proofs.constants import ( + SECURITY_CONTEXT_BBS_URL, + EXPANDED_TYPE_CREDENTIALS_CONTEXT_V1_VC_TYPE, +) +from ....vc.vc_ld.prove import sign_presentation, create_presentation, derive_credential +from ....wallet.base import BaseWallet, DIDInfo +from ....wallet.key_type import KeyType + +from .pres_exch import ( + PresentationDefinition, + InputDescriptors, + DIFField, + Filter, + Constraints, + SubmissionRequirements, + Requirement, + SchemaInputDescriptor, + InputDescriptorMapping, + PresentationSubmission, +) + +PRESENTATION_SUBMISSION_JSONLD_CONTEXT = ( + "https://identity.foundation/presentation-exchange/submission/v1" +) +PRESENTATION_SUBMISSION_JSONLD_TYPE = "PresentationSubmission" + + +class DIFPresExchError(BaseError): + """Base class for DIF Presentation Exchange related errors.""" + + +class DIFPresExchHandler: + """Base Presentation Exchange Handler.""" + + ISSUE_SIGNATURE_SUITE_KEY_TYPE_MAPPING = { + Ed25519Signature2018: KeyType.ED25519, + } + + if BbsBlsSignature2020.BBS_SUPPORTED: + ISSUE_SIGNATURE_SUITE_KEY_TYPE_MAPPING[BbsBlsSignature2020] = KeyType.BLS12381G2 + + DERIVE_SIGNATURE_SUITE_KEY_TYPE_MAPPING = { + BbsBlsSignatureProof2020: KeyType.BLS12381G2, + } + PROOF_TYPE_SIGNATURE_SUITE_MAPPING = { + suite.signature_type: suite + for suite, key_type in ISSUE_SIGNATURE_SUITE_KEY_TYPE_MAPPING.items() + } + DERIVED_PROOF_TYPE_SIGNATURE_SUITE_MAPPING = { + suite.signature_type: suite + for suite, key_type in DERIVE_SIGNATURE_SUITE_KEY_TYPE_MAPPING.items() + } + + def __init__( + self, + profile: Profile, + pres_signing_did: str = None, + proof_type: str = None, + ): + """Initialize PresExchange Handler.""" + super().__init__() + self.profile = profile + self.pres_signing_did = pres_signing_did + if not proof_type: + self.proof_type = Ed25519Signature2018.signature_type + else: + self.proof_type = proof_type + + async def _get_issue_suite( + self, + *, + wallet: BaseWallet, + issuer_id: str, + ): + """Get signature suite for signing presentation.""" + did_info = await self._did_info_for_did(issuer_id) + verification_method = self._get_verification_method(issuer_id) + + # Get signature class based on proof type + SignatureClass = self.PROOF_TYPE_SIGNATURE_SUITE_MAPPING[self.proof_type] + + # Generically create signature class + return SignatureClass( + verification_method=verification_method, + key_pair=WalletKeyPair( + wallet=wallet, + key_type=self.ISSUE_SIGNATURE_SUITE_KEY_TYPE_MAPPING[SignatureClass], + public_key_base58=did_info.verkey if did_info else None, + ), + ) + + async def _get_derive_suite( + self, + *, + wallet: BaseWallet, + ): + """Get signature suite for deriving credentials.""" + # Get signature class based on proof type + SignatureClass = self.DERIVED_PROOF_TYPE_SIGNATURE_SUITE_MAPPING[ + "BbsBlsSignatureProof2020" + ] + + # Generically create signature class + return SignatureClass( + key_pair=WalletKeyPair( + wallet=wallet, + key_type=self.DERIVE_SIGNATURE_SUITE_KEY_TYPE_MAPPING[SignatureClass], + ), + ) + + def _get_verification_method(self, did: str): + """Get the verification method for a did.""" + if did.startswith("did:key:"): + return DIDKey.from_did(did).key_id + elif did.startswith("did:sov:"): + # key-1 is what uniresolver uses for key id + return did + "#key-1" + else: + raise DIFPresExchError( + f"Unable to get retrieve verification method for did {did}" + ) + + async def _did_info_for_did(self, did: str) -> DIDInfo: + """Get the did info for specified did. + + If the did starts with did:sov it will remove the prefix for + backwards compatibility with not fully qualified did. + + Args: + did (str): The did to retrieve from the wallet. + + Raises: + WalletNotFoundError: If the did is not found in the wallet. + + Returns: + DIDInfo: did information + + """ + async with self.profile.session() as session: + wallet = session.inject(BaseWallet) + + # If the did starts with did:sov we need to query without + if did.startswith("did:sov:"): + return await wallet.get_local_did(did.replace("did:sov:", "")) + + # All other methods we can just query + return await wallet.get_local_did(did) + + async def get_sign_key_credential_subject_id( + self, applicable_creds: Sequence[VCRecord] + ) -> Tuple[Optional[str], Sequence[dict]]: + """Get the issuer_id and filtered_creds from enclosed credentials subject_ids.""" + issuer_id = None + filtered_creds_list = [] + if self.proof_type == BbsBlsSignature2020.signature_type: + reqd_key_type = KeyType.BLS12381G2 + else: + reqd_key_type = KeyType.ED25519 + for cred in applicable_creds: + if len(cred.subject_ids) > 0: + if not issuer_id: + for cred_subject_id in cred.subject_ids: + if not cred_subject_id.startswith("urn:"): + did_info = await self._did_info_for_did(cred_subject_id) + if did_info.key_type == reqd_key_type: + issuer_id = cred_subject_id + filtered_creds_list.append(cred.cred_value) + break + else: + if issuer_id in cred.subject_ids: + filtered_creds_list.append(cred.cred_value) + else: + raise DIFPresExchError( + "Applicable credentials have different credentialSubject.id, " + "multiple proofs are not supported currently" + ) + return (issuer_id, filtered_creds_list) + + async def to_requirement( + self, sr: SubmissionRequirements, descriptors: Sequence[InputDescriptors] + ) -> Requirement: + """ + Return Requirement. + + Args: + sr: submission_requirement + descriptors: list of input_descriptors + Raises: + DIFPresExchError: If not able to create requirement + + """ + input_descriptors = [] + nested = [] + total_count = 0 + + if sr._from: + if sr._from != "": + for descriptor in descriptors: + if self.contains(descriptor.groups, sr._from): + input_descriptors.append(descriptor) + total_count = len(input_descriptors) + if total_count == 0: + raise DIFPresExchError(f"No descriptors for from: {sr._from}") + else: + for submission_requirement in sr.from_nested: + try: + # recursion logic + requirement = await self.to_requirement( + submission_requirement, descriptors + ) + nested.append(requirement) + except Exception as err: + raise DIFPresExchError( + ( + "Error creating requirement from " + f"nested submission_requirements, {err}" + ) + ) + total_count = len(nested) + count = sr.count + if sr.rule == "all": + count = total_count + requirement = Requirement( + count=count, + maximum=sr.maximum, + minimum=sr.minimum, + input_descriptors=input_descriptors, + nested_req=nested, + ) + return requirement + + async def make_requirement( + self, + srs: Sequence[SubmissionRequirements] = None, + descriptors: Sequence[InputDescriptors] = None, + ) -> Requirement: + """ + Return Requirement. + + Creates and return Requirement with nesting if required + using to_requirement() + + Args: + srs: list of submission_requirements + descriptors: list of input_descriptors + Raises: + DIFPresExchError: If not able to create requirement + + """ + if not srs: + srs = [] + if not descriptors: + descriptors = [] + if len(srs) == 0: + requirement = Requirement( + count=len(descriptors), + input_descriptors=descriptors, + ) + return requirement + requirement = Requirement( + count=len(srs), + nested_req=[], + ) + for submission_requirement in srs: + try: + requirement.nested_req.append( + await self.to_requirement(submission_requirement, descriptors) + ) + except Exception as err: + raise DIFPresExchError( + f"Error creating requirement inside to_requirement function, {err}" + ) + return requirement + + def is_len_applicable(self, req: Requirement, val: int) -> bool: + """ + Check and validate requirement minimum, maximum and count. + + Args: + req: Requirement + val: int value to check + Return: + bool + + """ + if req.count: + if req.count > 0 and val != req.count: + return False + if req.minimum: + if req.minimum > 0 and req.minimum > val: + return False + if req.maximum: + if req.maximum > 0 and req.maximum < val: + return False + return True + + def contains(self, data: Sequence[str], e: str) -> bool: + """ + Check for e in data. + + Returns True if e exists in data else return False + + Args: + data: Sequence of str + e: str value to check + Return: + bool + + """ + data_list = list(data) if data else [] + for k in data_list: + if e == k: + return True + return False + + async def filter_constraints( + self, + constraints: Constraints, + credentials: Sequence[VCRecord], + ) -> Sequence[VCRecord]: + """ + Return list of applicable VCRecords after applying filtering. + + Args: + constraints: Constraints + credentials: Sequence of credentials + to apply filtering on + Return: + Sequence of applicable VCRecords + + """ + document_loader = self.profile.inject(DocumentLoader) + + result = [] + for credential in credentials: + if constraints.subject_issuer == "required" and not self.subject_is_issuer( + credential=credential + ): + continue + + applicable = False + for field in constraints._fields: + applicable = await self.filter_by_field(field, credential) + if applicable: + break + if not applicable: + continue + + if constraints.limit_disclosure == "required": + credential_dict = credential.cred_value + new_credential_dict = self.reveal_doc( + credential_dict=credential_dict, constraints=constraints + ) + async with self.profile.session() as session: + wallet = session.inject(BaseWallet) + derive_suite = await self._get_derive_suite( + wallet=wallet, + ) + signed_new_credential_dict = await derive_credential( + credential=credential_dict, + reveal_document=new_credential_dict, + suite=derive_suite, + document_loader=document_loader, + ) + credential = self.create_vcrecord(signed_new_credential_dict) + result.append(credential) + return result + + def create_vcrecord(self, cred_dict: dict) -> VCRecord: + """Return VCRecord from a credential dict.""" + proofs = cred_dict.get("proof") or [] + proof_types = None + if type(proofs) is dict: + proofs = [proofs] + if proofs: + proof_types = [proof.get("type") for proof in proofs] + contexts = [ctx for ctx in cred_dict.get("@context") if type(ctx) is str] + if "@graph" in cred_dict: + for enclosed_data in cred_dict.get("@graph"): + if ( + enclosed_data["id"].startswith("urn:") + and "credentialSubject" in enclosed_data + ): + cred_dict.update(enclosed_data) + del cred_dict["@graph"] + break + given_id = cred_dict.get("id") + # issuer + issuer = cred_dict.get("issuer") + if type(issuer) is dict: + issuer = issuer.get("id") + + # subjects + subject_ids = None + subjects = cred_dict.get("credentialSubject") + if subjects: + if type(subjects) is dict: + subjects = [subjects] + subject_ids = [ + subject.get("id") for subject in subjects if ("id" in subject) + ] + else: + cred_dict["credentialSubject"] = {} + + # Schemas + schemas = cred_dict.get("credentialSchema", []) + if type(schemas) is dict: + schemas = [schemas] + schema_ids = [schema.get("id") for schema in schemas] + expanded = jsonld.expand(cred_dict) + types = JsonLdProcessor.get_values( + expanded[0], + "@type", + ) + return VCRecord( + contexts=contexts, + expanded_types=types, + issuer_id=issuer, + subject_ids=subject_ids, + proof_types=proof_types, + given_id=given_id, + cred_value=cred_dict, + schema_ids=schema_ids, + ) + + def reveal_doc(self, credential_dict: dict, constraints: Constraints): + """Generate reveal_doc dict for deriving credential.""" + derived = { + "@context": credential_dict.get("@context"), + "type": credential_dict.get("type"), + "@explicit": True, + "@requireAll": True, + "issuanceDate": {}, + "issuer": {}, + } + unflatten_dict = {} + for field in constraints._fields: + for path in field.paths: + jsonpath = parse(path) + match = jsonpath.find(credential_dict) + if len(match) == 0: + continue + for match_item in match: + full_path = str(match_item.full_path) + if bool(re.search(pattern=r"\[[0-9]+\]", string=full_path)): + full_path = full_path.replace(".[", "[") + unflatten_dict[full_path] = {} + explicit_key_path = None + key_list = full_path.split(".")[:-1] + for key in key_list: + if not explicit_key_path: + explicit_key_path = key + else: + explicit_key_path = explicit_key_path + "." + key + unflatten_dict[explicit_key_path + ".@explicit"] = True + unflatten_dict[explicit_key_path + ".@requireAll"] = True + derived = self.new_credential_builder(derived, unflatten_dict) + # Fix issue related to credentialSubject type property + if "credentialSubject" in derived.keys(): + if "type" in credential_dict.get("credentialSubject"): + derived["credentialSubject"]["type"] = credential_dict.get( + "credentialSubject" + ).get("type") + if "credentialSubject" not in derived.keys(): + if isinstance(credential_dict.get("credentialSubject"), list): + derived["credentialSubject"] = [] + elif isinstance(credential_dict.get("credentialSubject"), dict): + derived["credentialSubject"] = {} + return derived + + def new_credential_builder( + self, new_credential: dict, unflatten_dict: dict + ) -> dict: + """ + Update and return the new_credential. + + Args: + new_credential: credential dict to be updated and returned + unflatten_dict: dict with traversal path as key and match_value as value + Return: + dict + + """ + new_credential.update(unflatten(unflatten_dict)) + return new_credential + + async def filter_by_field(self, field: DIFField, credential: VCRecord) -> bool: + """ + Apply filter on VCRecord. + + Checks if a credential is applicable + + Args: + field: Field contains filtering spec + credential: credential to apply filtering on + Return: + bool + + """ + credential_dict = credential.cred_value + for path in field.paths: + if "$.proof." in path: + raise DIFPresExchError( + "JSON Path expression matching on proof object " + "is not currently supported" + ) + jsonpath = parse(path) + match = jsonpath.find(credential_dict) + if len(match) == 0: + continue + for match_item in match: + # No filter in constraint + if not field._filter: + return True + if self.validate_patch(match_item.value, field._filter): + return True + return False + + def validate_patch(self, to_check: any, _filter: Filter) -> bool: + """ + Apply filter on match_value. + + Utility function used in applying filtering to a cred + by triggering checks according to filter specification + + Args: + to_check: value to check, extracted from match + _filter: Filter + Return: + bool + + """ + return_val = False + if _filter._type: + if self.check_filter_only_type_enforced(_filter): + if _filter._type == "number": + if isinstance(to_check, (int, float)): + return True + elif _filter._type == "string": + if isinstance(to_check, str): + if _filter.fmt == "date" or _filter.fmt == "date-time": + try: + to_compare_date = dateutil_parser(to_check) + if isinstance(to_compare_date, datetime): + return True + except (ParserError, TypeError): + return False + else: + return True + else: + if _filter._type == "number": + return_val = self.process_numeric_val(to_check, _filter) + elif _filter._type == "string": + return_val = self.process_string_val(to_check, _filter) + else: + if _filter.enums: + return_val = self.enum_check(val=to_check, _filter=_filter) + if _filter.const: + return_val = self.const_check(val=to_check, _filter=_filter) + + if _filter._not: + return not return_val + return return_val + + def check_filter_only_type_enforced(self, _filter: Filter) -> bool: + """ + Check if only type is specified in filter. + + Args: + _filter: Filter + Return: + bool + + """ + if ( + _filter.pattern is None + and _filter.minimum is None + and _filter.maximum is None + and _filter.min_length is None + and _filter.max_length is None + and _filter.exclusive_min is None + and _filter.exclusive_max is None + and _filter.const is None + and _filter.enums is None + ): + return True + else: + return False + + def process_numeric_val(self, val: any, _filter: Filter) -> bool: + """ + Trigger Filter checks. + + Trigger appropriate check for a number type filter, + according to _filter spec. + + Args: + val: value to check, extracted from match + _filter: Filter + Return: + bool + + """ + if _filter.exclusive_max: + return self.exclusive_maximum_check(val, _filter) + elif _filter.exclusive_min: + return self.exclusive_minimum_check(val, _filter) + elif _filter.minimum: + return self.minimum_check(val, _filter) + elif _filter.maximum: + return self.maximum_check(val, _filter) + elif _filter.const: + return self.const_check(val, _filter) + elif _filter.enums: + return self.enum_check(val, _filter) + else: + return False + + def process_string_val(self, val: any, _filter: Filter) -> bool: + """ + Trigger Filter checks. + + Trigger appropriate check for a string type filter, + according to _filter spec. + + Args: + val: value to check, extracted from match + _filter: Filter + Return: + bool + + """ + if _filter.min_length or _filter.max_length: + return self.length_check(val, _filter) + elif _filter.pattern: + return self.pattern_check(val, _filter) + elif _filter.enums: + return self.enum_check(val, _filter) + elif _filter.exclusive_max and _filter.fmt: + return self.exclusive_maximum_check(val, _filter) + elif _filter.exclusive_min and _filter.fmt: + return self.exclusive_minimum_check(val, _filter) + elif _filter.minimum and _filter.fmt: + return self.minimum_check(val, _filter) + elif _filter.maximum and _filter.fmt: + return self.maximum_check(val, _filter) + elif _filter.const: + return self.const_check(val, _filter) + else: + return False + + def exclusive_minimum_check(self, val: any, _filter: Filter) -> bool: + """ + Exclusiveminimum check. + + Returns True if value greater than filter specified check + + Args: + val: value to check, extracted from match + _filter: Filter + Return: + bool + + """ + try: + if _filter.fmt: + utc = pytz.UTC + if _filter.fmt == "date" or _filter.fmt == "date-time": + to_compare_date = dateutil_parser(_filter.exclusive_min).replace( + tzinfo=utc + ) + given_date = dateutil_parser(str(val)).replace(tzinfo=utc) + return given_date > to_compare_date + else: + if self.is_numeric(val): + return val > _filter.exclusive_min + return False + except (TypeError, ValueError, ParserError): + return False + + def exclusive_maximum_check(self, val: any, _filter: Filter) -> bool: + """ + Exclusivemaximum check. + + Returns True if value less than filter specified check + + Args: + val: value to check, extracted from match + _filter: Filter + Return: + bool + + """ + try: + if _filter.fmt: + utc = pytz.UTC + if _filter.fmt == "date" or _filter.fmt == "date-time": + to_compare_date = dateutil_parser(_filter.exclusive_max).replace( + tzinfo=utc + ) + given_date = dateutil_parser(str(val)).replace(tzinfo=utc) + return given_date < to_compare_date + else: + if self.is_numeric(val): + return val < _filter.exclusive_max + return False + except (TypeError, ValueError, ParserError): + return False + + def maximum_check(self, val: any, _filter: Filter) -> bool: + """ + Maximum check. + + Returns True if value less than equal to filter specified check + + Args: + val: value to check, extracted from match + _filter: Filter + Return: + bool + + """ + try: + if _filter.fmt: + utc = pytz.UTC + if _filter.fmt == "date" or _filter.fmt == "date-time": + to_compare_date = dateutil_parser(_filter.maximum).replace( + tzinfo=utc + ) + given_date = dateutil_parser(str(val)).replace(tzinfo=utc) + return given_date <= to_compare_date + else: + if self.is_numeric(val): + return val <= _filter.maximum + return False + except (TypeError, ValueError, ParserError): + return False + + def minimum_check(self, val: any, _filter: Filter) -> bool: + """ + Minimum check. + + Returns True if value greater than equal to filter specified check + + Args: + val: value to check, extracted from match + _filter: Filter + Return: + bool + + """ + try: + if _filter.fmt: + utc = pytz.UTC + if _filter.fmt == "date" or _filter.fmt == "date-time": + to_compare_date = dateutil_parser(_filter.minimum).replace( + tzinfo=utc + ) + given_date = dateutil_parser(str(val)).replace(tzinfo=utc) + return given_date >= to_compare_date + else: + if self.is_numeric(val): + return val >= _filter.minimum + return False + except (TypeError, ValueError, ParserError): + return False + + def length_check(self, val: any, _filter: Filter) -> bool: + """ + Length check. + + Returns True if length value string meets the minLength and maxLength specs + + Args: + val: value to check, extracted from match + _filter: Filter + Return: + bool + + """ + given_len = len(str(val)) + if _filter.max_length and _filter.min_length: + if given_len <= _filter.max_length and given_len >= _filter.min_length: + return True + elif _filter.max_length and not _filter.min_length: + if given_len <= _filter.max_length: + return True + elif not _filter.max_length and _filter.min_length: + if given_len >= _filter.min_length: + return True + return False + + def pattern_check(self, val: any, _filter: Filter) -> bool: + """ + Pattern check. + + Returns True if value string matches the specified pattern + + Args: + val: value to check, extracted from match + _filter: Filter + Return: + bool + + """ + if _filter.pattern: + return bool(re.search(pattern=_filter.pattern, string=str(val))) + return False + + def const_check(self, val: any, _filter: Filter) -> bool: + """ + Const check. + + Returns True if value is equal to filter specified check + + Args: + val: value to check, extracted from match + _filter: Filter + Return: + bool + + """ + if val == _filter.const: + return True + return False + + def enum_check(self, val: any, _filter: Filter) -> bool: + """ + Enum check. + + Returns True if value is contained to filter specified list + + Args: + val: value to check, extracted from match + _filter: Filter + Return: + bool + + """ + if val in _filter.enums: + return True + return False + + def subject_is_issuer(self, credential: VCRecord) -> bool: + """ + subject_is_issuer check. + + Returns True if cred issuer_id is in subject_ids + + Args: + credential: VCRecord + Return: + bool + + """ + subject_ids = credential.subject_ids + for subject_id in subject_ids: + issuer_id = credential.issuer_id + if subject_id != "" and subject_id == issuer_id: + return True + return False + + async def filter_schema( + self, credentials: Sequence[VCRecord], schemas: Sequence[SchemaInputDescriptor] + ) -> Sequence[VCRecord]: + """ + Filter by schema. + + Returns list of credentials where credentialSchema.id or types matched + with input_descriptors.schema.uri + + Args: + credentials: list of VCRecords to check + schemas: list of schemas from the input_descriptors + Return: + Sequence of filtered VCRecord + + """ + result = [] + for credential in credentials: + applicable = False + for schema in schemas: + applicable = self.credential_match_schema( + credential=credential, schema_id=schema.uri + ) + if schema.required and not applicable: + break + if applicable: + if schema.uri in [ + EXPANDED_TYPE_CREDENTIALS_CONTEXT_V1_VC_TYPE, + ]: + continue + else: + break + if applicable: + result.append(credential) + return result + + def credential_match_schema(self, credential: VCRecord, schema_id: str) -> bool: + """ + Credential matching by schema. + + Used by filter_schema to check if credential.schema_ids or credential.types + matched with schema_id + + Args: + credential: VCRecord to check + schema_id: schema uri to check + Return: + bool + """ + if schema_id in credential.schema_ids: + return True + if schema_id in credential.expanded_types: + return True + return False + + async def apply_requirements( + self, req: Requirement, credentials: Sequence[VCRecord] + ) -> dict: + """ + Apply Requirement. + + Args: + req: Requirement + credentials: Sequence of credentials to check against + Return: + dict of input_descriptor ID key to list of credential_json + """ + # Dict for storing descriptor_id keys and list of applicable + # credentials values + result = {} + # Get all input_descriptors attached to the PresentationDefinition + descriptor_list = req.input_descriptors or [] + for descriptor in descriptor_list: + # Filter credentials to apply filtering + # upon by matching each credentialSchema.id + # or expanded types on each InputDescriptor's schema URIs + filtered_by_schema = await self.filter_schema( + credentials=credentials, schemas=descriptor.schemas + ) + # Filter credentials based upon path expressions specified in constraints + filtered = await self.filter_constraints( + constraints=descriptor.constraint, + credentials=filtered_by_schema, + ) + if len(filtered) != 0: + result[descriptor.id] = filtered + + if len(descriptor_list) != 0: + # Applies min, max or count attributes of submission_requirement + if self.is_len_applicable(req, len(result)): + return result + return {} + + nested_result = [] + given_id_descriptors = {} + # recursion logic for nested requirements + for requirement in req.nested_req: + # recursive call + result = await self.apply_requirements(requirement, credentials) + if result == {}: + continue + # given_id_descriptors maps applicable credentials + # to their respective descriptor. + # Structure: {cred.given_id: { + # desc_id_1: {} + # }, + # ...... + # } + # This will be used to construct exclude dict. + for descriptor_id in result.keys(): + credential_list = result.get(descriptor_id) + for credential in credential_list: + if credential.given_id not in given_id_descriptors: + given_id_descriptors[credential.given_id] = {} + given_id_descriptors[credential.given_id][descriptor_id] = {} + + if len(result.keys()) != 0: + nested_result.append(result) + + exclude = {} + for given_id in given_id_descriptors.keys(): + # Check if number of applicable credentials + # does not meet requirement specification + if not self.is_len_applicable(req, len(given_id_descriptors[given_id])): + for descriptor_id in given_id_descriptors[given_id]: + # Add to exclude dict + # with cred.given_id + descriptor_id as key + exclude[descriptor_id + given_id] = {} + # merging credentials and excluding credentials that don't satisfy the requirement + return await self.merge_nested_results( + nested_result=nested_result, exclude=exclude + ) + + def is_numeric(self, val: any) -> bool: + """ + Check if val is an int or float. + + Args: + val: to check + Return: + bool + """ + if isinstance(val, float) or isinstance(val, int): + return True + else: + return False + + async def merge_nested_results( + self, nested_result: Sequence[dict], exclude: dict + ) -> dict: + """ + Merge nested results with merged credentials. + + Args: + nested_result: Sequence of dict containing input_descriptor.id as keys + and list of creds as values + exclude: dict containing info about credentials to exclude + Return: + dict with input_descriptor.id as keys and merged_credentials_list as values + """ + result = {} + for res in nested_result: + for key in res.keys(): + credentials = res[key] + given_id_dict = {} + merged_credentials = [] + + if key in result: + for credential in result[key]: + if credential.given_id not in given_id_dict: + merged_credentials.append(credential) + given_id_dict[credential.given_id] = {} + + for credential in credentials: + if credential.given_id not in given_id_dict: + if (key + (credential.given_id)) not in exclude: + merged_credentials.append(credential) + given_id_dict[credential.given_id] = {} + result[key] = merged_credentials + return result + + async def create_vp( + self, + credentials: Sequence[VCRecord], + pd: PresentationDefinition, + challenge: str = None, + domain: str = None, + ) -> dict: + """ + Create VerifiablePresentation. + + Args: + credentials: Sequence of VCRecords + pd: PresentationDefinition + Return: + VerifiablePresentation + """ + document_loader = self.profile.inject(DocumentLoader) + req = await self.make_requirement( + srs=pd.submission_requirements, descriptors=pd.input_descriptors + ) + result = await self.apply_requirements(req=req, credentials=credentials) + applicable_creds, descriptor_maps = await self.merge(result) + applicable_creds_list = [] + for credential in applicable_creds: + applicable_creds_list.append(credential.cred_value) + # submission_property + submission_property = PresentationSubmission( + id=str(uuid4()), definition_id=pd.id, descriptor_maps=descriptor_maps + ) + if self.check_sign_pres(applicable_creds): + ( + issuer_id, + filtered_creds_list, + ) = await self.get_sign_key_credential_subject_id( + applicable_creds=applicable_creds + ) + if not issuer_id and len(filtered_creds_list) == 0: + vp = await create_presentation(credentials=applicable_creds_list) + vp["presentation_submission"] = submission_property.serialize() + if self.proof_type is BbsBlsSignature2020.signature_type: + vp["@context"].append(SECURITY_CONTEXT_BBS_URL) + return vp + else: + vp = await create_presentation(credentials=filtered_creds_list) + vp["presentation_submission"] = submission_property.serialize() + if self.proof_type is BbsBlsSignature2020.signature_type: + vp["@context"].append(SECURITY_CONTEXT_BBS_URL) + async with self.profile.session() as session: + wallet = session.inject(BaseWallet) + issue_suite = await self._get_issue_suite( + wallet=wallet, + issuer_id=issuer_id, + ) + signed_vp = await sign_presentation( + presentation=vp, + suite=issue_suite, + challenge=challenge, + document_loader=document_loader, + ) + return signed_vp + else: + vp = await create_presentation(credentials=applicable_creds_list) + vp["presentation_submission"] = submission_property.serialize() + if self.proof_type is BbsBlsSignature2020.signature_type: + vp["@context"].append(SECURITY_CONTEXT_BBS_URL) + if self.pres_signing_did: + async with self.profile.session() as session: + wallet = session.inject(BaseWallet) + issue_suite = await self._get_issue_suite( + wallet=wallet, + issuer_id=self.pres_signing_did, + ) + signed_vp = await sign_presentation( + presentation=vp, + suite=issue_suite, + challenge=challenge, + document_loader=document_loader, + ) + return signed_vp + else: + return vp + + def check_sign_pres(self, creds: Sequence[VCRecord]) -> bool: + """Check if applicable creds have CredentialSubject.id set.""" + for cred in creds: + if len(cred.subject_ids) > 0 and not next( + iter(cred.subject_ids) + ).startswith("urn:"): + return True + return False + + async def merge( + self, + dict_descriptor_creds: dict, + ) -> (Sequence[VCRecord], Sequence[InputDescriptorMapping]): + """ + Return applicable credentials and descriptor_map for attachment. + + Used for generating the presentation_submission property with the + descriptor_map, mantaining the order in which applicable credential + list is returned. + + Args: + dict_descriptor_creds: dict with input_descriptor.id as keys + and merged_credentials_list + Return: + Tuple of applicable credential list and descriptor map + """ + dict_of_creds = {} + dict_of_descriptors = {} + result = [] + descriptors = [] + sorted_desc_keys = sorted(list(dict_descriptor_creds.keys())) + for desc_id in sorted_desc_keys: + credentials = dict_descriptor_creds.get(desc_id) + for cred in credentials: + if cred.given_id not in dict_of_creds: + result.append(cred) + dict_of_creds[cred.given_id] = len(descriptors) + + if f"{cred.given_id}-{cred.given_id}" not in dict_of_descriptors: + descriptor_map = InputDescriptorMapping( + id=desc_id, + fmt="ldp_vp", + path=( + f"$.verifiableCredential[{dict_of_creds[cred.given_id]}]" + ), + ) + descriptors.append(descriptor_map) + + descriptors = sorted(descriptors, key=lambda i: i.id) + return (result, descriptors) diff --git a/aries_cloudagent/protocols/present_proof/dif/pres_proposal_schema.py b/aries_cloudagent/protocols/present_proof/dif/pres_proposal_schema.py new file mode 100644 index 0000000000..4f48851001 --- /dev/null +++ b/aries_cloudagent/protocols/present_proof/dif/pres_proposal_schema.py @@ -0,0 +1,18 @@ +"""DIF Proof Proposal Schema.""" +from marshmallow import fields + +from ....messaging.models.openapi import OpenAPISchema + +from .pres_exch import InputDescriptorsSchema + + +class DIFProofProposalSchema(OpenAPISchema): + """Schema for DIF Proposal.""" + + input_descriptors = fields.List( + fields.Nested( + InputDescriptorsSchema(), + required=True, + ), + required=False, + ) diff --git a/aries_cloudagent/protocols/present_proof/dif/pres_request_schema.py b/aries_cloudagent/protocols/present_proof/dif/pres_request_schema.py new file mode 100644 index 0000000000..a625a3b2d7 --- /dev/null +++ b/aries_cloudagent/protocols/present_proof/dif/pres_request_schema.py @@ -0,0 +1,47 @@ +"""DIF Proof Request Schema.""" +from marshmallow import fields + +from ....messaging.models.openapi import OpenAPISchema + +from .pres_exch import PresentationDefinitionSchema, DIFOptionsSchema + + +class DIFProofRequestSchema(OpenAPISchema): + """Schema for DIF Proof request.""" + + options = fields.Nested( + DIFOptionsSchema(), + required=False, + ) + presentation_definition = fields.Nested( + PresentationDefinitionSchema(), + required=True, + ) + + +class DIFPresSpecSchema(OpenAPISchema): + """Schema for DIF Presentation Spec schema.""" + + issuer_id = fields.Str( + description=( + ( + "Issuer identifier to sign the presentation," + " if different from current public DID" + ) + ), + required=False, + ) + record_ids = fields.List( + fields.Str(description="Record identifier"), + description=( + ( + "List of record_id to fetch stored " + "W3C credentials for presentation exchange" + ) + ), + required=False, + ) + presentation_definition = fields.Nested( + PresentationDefinitionSchema(), + required=False, + ) diff --git a/aries_cloudagent/protocols/present_proof/dif/pres_schema.py b/aries_cloudagent/protocols/present_proof/dif/pres_schema.py new file mode 100644 index 0000000000..5a5905606a --- /dev/null +++ b/aries_cloudagent/protocols/present_proof/dif/pres_schema.py @@ -0,0 +1,41 @@ +"""DIF Proof Schema.""" +from marshmallow import fields + +from ....messaging.models.openapi import OpenAPISchema +from ....messaging.valid import ( + UUID4, + StrOrDictField, +) +from ....vc.vc_ld.models import LinkedDataProofSchema + +from .pres_exch import PresentationSubmissionSchema + + +class DIFProofSchema(OpenAPISchema): + """Schema for DIF Proof.""" + + id = fields.Str( + description="ID", + required=False, + **UUID4, + ) + contexts = fields.List( + StrOrDictField(), + data_key="@context", + required=True, + ) + types = fields.List( + fields.Str(description="Types"), + data_key="type", + required=True, + ) + credentials = fields.List( + fields.Dict(description="Credentials", required=False), + data_key="verifiableCredential", + ) + proof = fields.Nested( + LinkedDataProofSchema(), + required=False, + description="The proof of the credential", + ) + presentation_submission = fields.Nested(PresentationSubmissionSchema()) diff --git a/aries_cloudagent/protocols/present_proof/dif/tests/__init__.py b/aries_cloudagent/protocols/present_proof/dif/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/aries_cloudagent/protocols/present_proof/dif/tests/test_data.py b/aries_cloudagent/protocols/present_proof/dif/tests/test_data.py new file mode 100644 index 0000000000..e606eb8f1d --- /dev/null +++ b/aries_cloudagent/protocols/present_proof/dif/tests/test_data.py @@ -0,0 +1,1306 @@ +"""Data for DIFPresExchHandler.""" +import json +from pyld import jsonld +from pyld.jsonld import JsonLdProcessor + +from .....storage.vc_holder.vc_record import VCRecord + +from ..pres_exch import PresentationDefinition + + +def create_vcrecord(cred_dict: dict, expanded_types: list): + given_id = cred_dict.get("id") + contexts = [ctx for ctx in cred_dict.get("@context") if type(ctx) is str] + + # issuer + issuer = cred_dict.get("issuer") + if type(issuer) is dict: + issuer = issuer.get("id") + + # subjects + subjects = cred_dict.get("credentialSubject") + if type(subjects) is dict: + subjects = [subjects] + subject_ids = [subject.get("id") for subject in subjects if subject.get("id")] + + # Schemas + schemas = cred_dict.get("credentialSchema", []) + if type(schemas) is dict: + schemas = [schemas] + schema_ids = [schema.get("id") for schema in schemas] + + # Proofs (this can be done easier if we use the expanded version) + proofs = cred_dict.get("proof") or [] + proof_types = None + if type(proofs) is dict: + proofs = [proofs] + if proofs: + proof_types = [proof.get("type") for proof in proofs] + + return VCRecord( + contexts=contexts, + expanded_types=expanded_types, + issuer_id=issuer, + subject_ids=subject_ids, + proof_types=proof_types, + given_id=given_id, + cred_value=cred_dict, + schema_ids=schema_ids, + ) + + +bbs_signed_cred_no_credsubjectid = [ + create_vcrecord( + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627465", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "identifier": "83627465", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2019-12-03T12:19:52Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "type": ["PermanentResident", "Person"], + "givenName": "JOHN", + "familyName": "SMITH", + "gender": "Male", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2015-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-999", + "commuterClassification": "C1", + "birthCountry": "Bahamas", + "birthDate": "1958-07-17", + }, + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa#zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "created": "2019-12-11T03:50:55", + "proofPurpose": "assertionMethod", + "proofValue": "hG9cNGyjjAgPkDmtNv/+28ciBZFUVcAG2gfvLBlTWeFyYJu6DARo16RwQAoSnrgVRQn3n7KCSdnSrPb3op1+vSTu2vo+LF3GfSfqlei44bwA+c2FBIRk7S3FKY6Lm5mqOtC2Q4LStC9HtaOj8vQhgA==", + }, + }, + [ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://w3id.org/citizenship#PermanentResidentCard", + ], + ), + create_vcrecord( + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "credentialSubject": { + "birthCountry": "Bahamas", + "birthDate": "1958-07-17", + "familyName": "SMITH", + "gender": "Female", + "givenName": "ALICE", + "type": ["PermanentResident", "Person"], + }, + "issuanceDate": "2020-01-01T12:00:00Z", + "issuer": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa#zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "created": "2019-12-11T03:50:55", + "proofPurpose": "assertionMethod", + "proofValue": "haUMgpZE4hPiIEvzdEWyGsvXh1enQvhsOq2cMf3q80u29ybRDi74zU0O+fug1bWiMxeFOboxsfuEKXGC4Ldw0sCsIs+90Jn4EuTqhY4ml8YWsKY9Kjpxvtpc0e24SOl++oo48EICfUxb24HYlQ35pw==", + }, + }, + [ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://w3id.org/citizenship#PermanentResidentCard", + ], + ), + create_vcrecord( + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://example.gov/credentials/3732", + "type": ["VerifiableCredential", "UniversityDegreeCredential"], + "issuer": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "issuanceDate": "2020-03-10T04:24:12.164Z", + "credentialSubject": { + "degree": { + "type": "BachelorDegree", + "name": "Bachelor of Science and Arts", + "degreeType": "Underwater Basket Weaving", + }, + "college": "Contoso University", + }, + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa#zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "created": "2019-12-11T03:50:55", + "proofPurpose": "assertionMethod", + "proofValue": "iRArJRSvmIwx5YH2HXg5OJD+0v5sD1HoqhBsiJiw59t3Eb6nSntyOnENEnqnpzQwCjtbvOsU18eBlVi2/ign1u1ysz0iOLxSRHvIKtDDpr1dTDwQCbuZo2gUnY+8Dy+xEst8MDtcXwzNQW8Y3l1XzA==", + }, + }, + [ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + ), +] + +bbs_signed_cred_credsubjectid = [ + create_vcrecord( + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627465", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "identifier": "83627465", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2019-12-03T12:19:52Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:sov:WgWxqztrNooG92RXvxSTWv", + "type": ["PermanentResident", "Person"], + "givenName": "JOHN", + "familyName": "SMITH", + "gender": "Male", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2015-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-999", + "commuterClassification": "C1", + "birthCountry": "Bahamas", + "birthDate": "1958-07-17", + }, + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa#zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "created": "2019-12-11T03:50:55", + "proofPurpose": "assertionMethod", + "proofValue": "s++A89p+SvIHvY9pnIKIPsLjrLGGk2cs+LfpTWCsE0S1Y5Rg1h9OA5c84Vzqlc3kGfM3zdYpHrO9v0/vBFLQ3HV9wH7xgmD9MPVN+klsaQJdobRpJMjlBni7/QA2/+0szT2P1FJ537lGjyuRboVWng==", + }, + }, + [ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://w3id.org/citizenship#PermanentResidentCard", + ], + ), + create_vcrecord( + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://example.gov/credentials/3732", + "type": ["VerifiableCredential", "UniversityDegreeCredential"], + "issuer": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "issuanceDate": "2020-03-10T04:24:12.164Z", + "credentialSubject": { + "id": "did:sov:WgWxqztrNooG92RXvxSTWv", + "degree": { + "type": "BachelorDegree", + "name": "Bachelor of Science and Arts", + "degreeType": "Underwater Basket Weaving", + }, + "college": "Contoso University", + }, + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa#zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "created": "2019-12-11T03:50:55", + "proofPurpose": "assertionMethod", + "proofValue": "iGAQ4bOxuqkoCbX3RoxTqFkJsoqPcEeRN2vqIzd/zWLS+VHCwYkQHu/TeMOrit4eb6eugbJFUBaoenZyy2VU/7Rsj614sNzumJFuJ6ZaDTlv0k70CkO9GheQTc+Gwv749Y3JzPJ0dwYGUzzcyytFCQ==", + }, + }, + [ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + ), +] + +bbs_bls_number_filter_creds = [ + create_vcrecord( + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627465", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:example:489398593", + "identifier": "83627465", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2010-01-01T19:53:24Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:sov:WgWxqztrNooG92RXvxSTWv", + "type": ["PermanentResident", "Person"], + "givenName": "JOHN", + "familyName": "SMITH", + "gender": "Male", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2015-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-999", + "commuterClassification": "C1", + "birthCountry": "Bahamas", + "birthDate": "1958-07-17", + "test": 2, + }, + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:example:489398593#test", + "created": "2021-04-13T23:23:56.045014", + "proofPurpose": "assertionMethod", + "proofValue": "rhD+4HOhPfLywBuhLYMi1i0kWa/L2Qipt+sqTRiebjoo4OF3ESoGnm+L4Movz128Mjns60H0Bz7W+aqN1dPP9uhU/FGBKW/LEIGJX1rrrYgn17CkWp46z/hwQy+8c9ulOCn0Yq3BDqB37euoBTZbOQ==", + }, + }, + [ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://w3id.org/citizenship#PermanentResidentCard", + ], + ), + create_vcrecord( + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627466", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:example:489398593", + "identifier": "83627466", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2010-01-01T19:53:24Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:sov:WgWxqztrNooG92RXvxSTWv", + "type": ["PermanentResident", "Person"], + "givenName": "Theodor", + "familyName": "Major", + "gender": "Male", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2017-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-999", + "commuterClassification": "C1", + "birthCountry": "Canada", + "birthDate": "1968-07-17", + "test": 2, + }, + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:example:489398593#test", + "created": "2021-04-13T23:33:05.798834", + "proofPurpose": "assertionMethod", + "proofValue": "jp8ahSYYFhRAk+1ahfG8qu7iEjQnEXp3P3fWgTrc4khxmw9/9mGACq67YW9r917/aKYTQcVyojelN3cBHrjBvaOzb7bZ6Ps0Wf6WFq1gc0QFUrdiN0mJRl5YAz8R16sLxrPsoS/8ji1MoabjqmlnWQ==", + }, + }, + [ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://w3id.org/citizenship#PermanentResidentCard", + ], + ), + create_vcrecord( + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627467", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:example:489398593", + "identifier": "83627467", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2010-01-01T19:53:24Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:sov:WgWxqztrNooG92RXvxSTWv", + "type": ["PermanentResident", "Person"], + "givenName": "Cai", + "familyName": "Leblanc", + "gender": "Male", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2015-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-9989", + "commuterClassification": "C1", + "birthCountry": "Canada", + "birthDate": "1975-07-17", + "test": 3, + }, + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:example:489398593#test", + "created": "2021-04-13T23:40:44.835154", + "proofPurpose": "assertionMethod", + "proofValue": "t8+TPbYqF/dGlEn+qNnEFL1L0QeUjgXlYfJ7AelzOhb7cr2CjP/MIcG5bAQ5l6F2OZKNyE8RsPY14xedrkxpyv1oyWPmXzOwr0gt6ElLJm9jAUwFoZ7xAYHSedcR3Lh4FFuqmxfBHYF3A6VgSlMSfA==", + }, + }, + [ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://w3id.org/citizenship#PermanentResidentCard", + ], + ), +] + +edd_jsonld_creds = [ + create_vcrecord( + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627465", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:example:489398593", + "identifier": "83627465", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2010-01-01T19:53:24Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:sov:WgWxqztrNooG92RXvxSTWv", + "type": ["PermanentResident", "Person"], + "givenName": "JOHN", + "familyName": "SMITH", + "gender": "Male", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2015-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-999", + "commuterClassification": "C1", + "birthCountry": "Bahamas", + "birthDate": "1958-07-17", + }, + "proof": { + "type": "Ed25519Signature2018", + "verificationMethod": "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL#z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + "created": "2021-05-07T08:47:13.090322", + "proofPurpose": "assertionMethod", + "jws": "eyJhbGciOiAiRWREU0EiLCAiYjY0IjogZmFsc2UsICJjcml0IjogWyJiNjQiXX0..HHEpbiQp781YtXdxmYr3xO9a8OtHSePjySbgwGaSqiHGjd9hO0AnhkFxlBlrGukp5rkiJccr4p9KV3uKDzkqDA", + }, + }, + [ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://w3id.org/citizenship#PermanentResidentCard", + ], + ), + create_vcrecord( + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627466", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:example:489398593", + "identifier": "83627466", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2010-01-01T19:53:24Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:sov:WgWxqztrNooG92RXvxSTWv", + "type": ["PermanentResident", "Person"], + "givenName": "Theodor", + "familyName": "Major", + "gender": "Male", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2017-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-999", + "commuterClassification": "C1", + "birthCountry": "Canada", + "birthDate": "1968-07-17", + }, + "proof": { + "type": "Ed25519Signature2018", + "verificationMethod": "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL#z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + "created": "2021-05-07T08:48:49.702706", + "proofPurpose": "assertionMethod", + "jws": "eyJhbGciOiAiRWREU0EiLCAiYjY0IjogZmFsc2UsICJjcml0IjogWyJiNjQiXX0..r88-rSvqp_JLr2fnGr8nKEU--Hu6UhzjXOmdWpt082Wc6ojWpOANvv2wbgKrs5kXF5ATb8-AZ01VPpHdv4m9CQ", + }, + }, + [ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://w3id.org/citizenship#PermanentResidentCard", + ], + ), + create_vcrecord( + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627467", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:example:489398593", + "identifier": "83627467", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2010-01-01T19:53:24Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:sov:WgWxqztrNooG92RXvxSTWv", + "type": ["PermanentResident", "Person"], + "givenName": "Cai", + "familyName": "Leblanc", + "gender": "Male", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2015-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-9989", + "commuterClassification": "C1", + "birthCountry": "Canada", + "birthDate": "1975-07-17", + }, + "proof": { + "type": "Ed25519Signature2018", + "verificationMethod": "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL#z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + "created": "2021-05-07T08:50:17.626625", + "proofPurpose": "assertionMethod", + "jws": "eyJhbGciOiAiRWREU0EiLCAiYjY0IjogZmFsc2UsICJjcml0IjogWyJiNjQiXX0..rubQvgig7cN-F6cYn_AJF1BCSaMpkoR517Ot_4pqwdJnQ-JwKXq6d6cNos5JR73E9WkwYISXapY0fYTIG9-fBA", + }, + }, + [ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://w3id.org/citizenship#PermanentResidentCard", + ], + ), +] + +cred_list = [ + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627465", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "identifier": "83627465", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2010-01-01T19:53:24Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:sov:WgWxqztrNooG92RXvxSTWv", + "type": ["PermanentResident", "Person"], + "givenName": "JOHN", + "familyName": "SMITH", + "gender": "Male", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2015-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-999", + "commuterClassification": "C1", + "birthCountry": "Bahamas", + "birthDate": "1958-07-17", + }, + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa#zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "created": "2019-12-11T03:50:55", + "proofPurpose": "assertionMethod", + "proofValue": "ssut7SPH6KiY44z5w9N/dD+L8KxS7pXF5irVyty0IlafX7hn5AZNd1rb7fhzVz6wZo9nK/nu/bYs9zhuJggTbuQNPWyOWiFmd3uSxr+CrTYUZ/u31s7gaqEYv4pUBoKgMx6WKkOApELtOI4e0PFddA==", + }, + }, + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627466", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "identifier": "83627466", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2010-01-01T19:53:24Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:sov:WgWxqztrNooG92RXvxSTWv", + "type": ["PermanentResident", "Person"], + "givenName": "Theodor", + "familyName": "Major", + "gender": "Male", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2017-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-999", + "commuterClassification": "C1", + "birthCountry": "Canada", + "birthDate": "1968-07-17", + }, + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa#zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "created": "2019-12-11T03:50:55", + "proofPurpose": "assertionMethod", + "proofValue": "tnRgimJXy8mP7Amk8dPiJnJc+WhAFiYPF8hpRlqOPsSom4cF1VAxaiAN2o3io1kYajUtmXAJLxNNLOZkXDBblcqZAu5mHKEPWc/nctu3vNs4gs5f7tWZX7lm6JK71pLJq8lWbyIufIm/BnCjeIll5g==", + }, + }, + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627467", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:sov:2wJPyULfLLnYTEFYzByfUR", + "identifier": "83627467", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2010-01-01T19:53:24Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:sov:WgWxqztrNooG92RXvxSTWv", + "type": ["PermanentResident", "Person"], + "givenName": "Cai", + "familyName": "Leblanc", + "gender": "Male", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2015-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-9989", + "commuterClassification": "C1", + "birthCountry": "Canada", + "birthDate": "1975-07-17", + }, + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa#zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "created": "2019-12-11T03:50:55", + "proofPurpose": "assertionMethod", + "proofValue": "l1sdjvIhlkAPb+Y1vUYgIVH9YhiWSFjJFOL1ntzN9jXvOqv7/RMFhoAxg0BTYU1ITHK5l/6Q5lwmtkKxJMt/Z+QPZ8yWDIwqX8kVXFxKo9st8T45ChYiizc75E+Rd7Z5qIidmuulyPvpHlpgHYLZsQ==", + }, + }, + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627468", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:example:489398593", + "identifier": "83627468", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2010-01-01T19:53:24Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:sov:WgWxqztrNooG92RXvxSTWv", + "type": ["PermanentResident", "Person"], + "givenName": "Jamel", + "familyName": "Huber", + "gender": "Female", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2012-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-000", + "commuterClassification": "C1", + "birthCountry": "United States", + "birthDate": "1959-07-17", + }, + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa#zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "created": "2019-12-11T03:50:55", + "proofPurpose": "assertionMethod", + "proofValue": "rtG10rBsv8zXHBGWqHTeGyH9Y3oMa6xHjBvsJ5YHeocUBxOCge2WEs1tr60hjI4SEpi3JHSJOfd1wJEvfvMg/x6YnTZoA2UXiHBu/6vANx43EfTgbq4YDrrf1aTtQgpZDp/J4GaKoeUshuPXyf8LyA==", + }, + }, + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627469", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:sov:2wJPyULfLLnYTEFYzByfUR", + "identifier": "83627469", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2010-01-01T19:53:24Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:sov:WgWxqztrNooG92RXvxSTWv", + "type": ["PermanentResident", "Person"], + "givenName": "Vivek", + "familyName": "Easton", + "gender": "Male", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2019-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-888", + "commuterClassification": "C1", + "birthCountry": "India", + "birthDate": "1990-07-17", + }, + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa#zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "created": "2019-12-11T03:50:55", + "proofPurpose": "assertionMethod", + "proofValue": "pvo7gqDgu9mMjcZafvS8gRz0mIRfFnRCNmp39cZ/92R3UDG5bmxPhh4nG2k1kjaza8wFfaqjaBxsonV+FkQUMzUWbZkn2vstEcGCJllDHFBQiDcf8MVCiCcbGBLpU9MXjnyzhwA5AteG9a2YcvRh/w==", + }, + }, + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627470", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:sov:2wJPyULfLLnYTEFYzByfUR", + "identifier": "83627470", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2010-01-01T19:53:24Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:sov:WgWxqztrNooG92RXvxSTWv", + "type": ["PermanentResident", "Person"], + "givenName": "Ralphie", + "familyName": "Jennings", + "gender": "Female", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2010-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-777", + "commuterClassification": "C1", + "birthCountry": "Canada", + "birthDate": "1980-07-17", + }, + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa#zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "created": "2019-12-11T03:50:55", + "proofPurpose": "assertionMethod", + "proofValue": "pVuvbBfGnVbwwht1s4qZSCoLlZ8nwqvsmNKR+1VTesA+7tXPriJCdlnNFDL0Gkh5TV5E0NOS8WNttE5Uhhqakmjcs7L4hIr4PoVtCLFAF1tce8n4Z/5PKD7IuGIdCDbn77fjQffu2Cs+JDBVVcQRBA==", + }, + }, +] + +# [Nested_From] Either or case +# Exclusive Disjunction +# ----------------------- +# |_x_|_y_|_output_| +# | T | T | F | +# | T | F | T | +# | F | T | T | +# | F | F | F | +# ----------------------- +pres_exch_nested_srs_a = """ +{ + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "Citizenship Information", + "rule": "pick", + "count": 1, + "from_nested": [ + { + "name": "United States Citizenship Proofs", + "purpose": "We need you to prove you are a US citizen.", + "rule": "all", + "from": "A" + }, + { + "name": "European Union Citizenship Proofs", + "purpose": "We need you to prove you are a citizen of a EU country.", + "rule": "all", + "from": "B" + } + ] + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuer.id", + "$.issuer", + "$.vc.issuer.id" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "type":"string", + "enum": ["did:example:489398593", "did:sov:2wJPyULfLLnYTEFYzByfUR"] + } + } + ] + } + }, + { + "id":"citizenship_input_2", + "name":"US Passport", + "group":[ + "B" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuanceDate", + "$.vc.issuanceDate" + ], + "filter":{ + "type":"string", + "format":"date", + "maximum":"2005-5-16" + } + } + ] + } + } + ] +} +""" + +pres_exch_nested_srs_b = """ +{ + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "Citizenship Information", + "rule": "pick", + "count": 1, + "from_nested": [ + { + "name": "United States Citizenship Proofs", + "purpose": "We need you to prove you are a US citizen.", + "rule": "all", + "from": "A" + }, + { + "name": "European Union Citizenship Proofs", + "purpose": "We need you to prove you are a citizen of a EU country.", + "rule": "all", + "from": "B" + } + ] + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuer.id", + "$.issuer", + "$.vc.issuer.id" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "type":"string", + "enum": ["did:example:489398593"] + } + } + ] + } + }, + { + "id":"citizenship_input_2", + "name":"US Passport", + "group":[ + "B" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuanceDate", + "$.vc.issuanceDate" + ], + "filter":{ + "type":"string", + "format":"date", + "maximum":"2012-5-16" + } + } + ] + } + } + ] +} +""" + +pres_exch_nested_srs_c = """ +{ + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "Citizenship Information", + "rule": "pick", + "count": 1, + "from_nested": [ + { + "name": "United States Citizenship Proofs", + "purpose": "We need you to prove you are a US citizen.", + "rule": "all", + "from": "A" + }, + { + "name": "European Union Citizenship Proofs", + "purpose": "We need you to prove you are a citizen of a EU country.", + "rule": "all", + "from": "B" + } + ] + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuer.id", + "$.issuer", + "$.vc.issuer.id" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "type":"string", + "enum": ["did:example:489398593", "did:sov:2wJPyULfLLnYTEFYzByfUR"] + } + } + ] + } + }, + { + "id":"citizenship_input_2", + "name":"US Passport", + "group":[ + "B" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuanceDate", + "$.vc.issuanceDate" + ], + "filter":{ + "type":"string", + "format":"date", + "minimum":"2005-5-16" + } + } + ] + } + } + ] +} +""" + +pres_exch_multiple_srs_not_met = """ +{ + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "Citizenship Information", + "rule": "pick", + "count": 2, + "from": "A" + }, + { + "name": "European Union Citizenship Proofs", + "purpose": "We need you to prove you are a citizen of a EU country.", + "rule": "all", + "from": "B" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuer.id", + "$.vc.issuer.id", + "$.issuer" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "type":"string", + "enum": ["did:example:489398593", "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", "did:sov:2wJPyULfLLnYTEFYzByfUR"] + } + } + ] + } + }, + { + "id":"citizenship_input_2", + "name":"US Passport", + "group":[ + "B" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuanceDate", + "$.vc.issuanceDate" + ], + "filter":{ + "type":"string", + "format":"date", + "exclusiveMinimum":"2009-5-16" + } + } + ] + } + } + ] +} +""" + +pres_exch_multiple_srs_met = """ +{ + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "Citizenship Information", + "rule": "all", + "from": "A" + }, + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "B" + }, + { + "name": "Date Test", + "rule": "all", + "from": "C" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuer.id", + "$.vc.issuer.id", + "$.issuer" + ], + "filter":{ + "type":"string", + "enum": ["did:example:489398593", "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", "did:sov:2wJPyULfLLnYTEFYzByfUR"] + } + } + ] + } + }, + { + "id":"citizenship_input_2", + "name":"US Passport", + "group":[ + "B" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.credentialSubject.gender" + ], + "filter":{ + "const":"Male" + } + } + ] + } + }, + { + "id":"citizenship_input_3", + "name":"US Passport", + "group":[ + "C" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuanceDate" + ], + "filter":{ + "type":"string", + "format":"date", + "minimum":"2005-5-16" + } + } + ] + } + } + ] +} +""" + +pres_exch_datetime_minimum_met = """ +{ + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuanceDate", + "$.vc.issuanceDate" + ], + "filter":{ + "type":"string", + "format":"date", + "minimum":"2005-5-16" + } + } + ] + } + } + ] +} +""" + +pres_exch_datetime_maximum_met = """ +{ + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuanceDate", + "$.vc.issuanceDate" + ], + "filter":{ + "type":"string", + "format":"date", + "maximum":"2014-5-16" + } + } + ] + } + } + ] +} +""" + + +def get_test_data(): + vc_record_list = [] + for cred in cred_list: + vc_record_list.append( + create_vcrecord( + cred, + [ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://w3id.org/citizenship#PermanentResidentCard", + ], + ) + ) + pd_json_list = [ + (pres_exch_multiple_srs_not_met, 0), + (pres_exch_multiple_srs_met, 4), + (pres_exch_datetime_minimum_met, 6), + (pres_exch_datetime_maximum_met, 6), + (pres_exch_nested_srs_a, 4), + (pres_exch_nested_srs_b, 5), + (pres_exch_nested_srs_c, 2), + ] + + pd_list = [] + for pd in pd_json_list: + pd_list.append( + ( + PresentationDefinition.deserialize(json.loads(pd[0])), + pd[1], + ) + ) + return (vc_record_list, pd_list) diff --git a/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch.py b/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch.py new file mode 100644 index 0000000000..60d9b1edc4 --- /dev/null +++ b/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch.py @@ -0,0 +1,322 @@ +"""Test for DIF PresExch Schemas.""" +import json + +from unittest import TestCase + +from .....messaging.models.base import BaseModelError + +from ..pres_exch import ( + ClaimFormat, + SubmissionRequirements, + DIFHolder, + Filter, + Constraints, + VerifiablePresentation, +) + + +class TestPresExchSchemas(TestCase): + """Presentation predicate specification tests""" + + def test_claim_format_a(self): + submission_req_json = """ + { + "jwt": { + "alg": ["EdDSA", "ES256K", "ES384"] + }, + "jwt_vc": { + "alg": ["ES256K", "ES384"] + }, + "jwt_vp": { + "alg": ["EdDSA", "ES256K"] + }, + "ldp_vc": { + "proof_type": [ + "JsonWebSignature2020", + "Ed25519Signature2018", + "EcdsaSecp256k1Signature2019", + "RsaSignature2018" + ] + }, + "ldp_vp": { + "proof_type": ["Ed25519Signature2018"] + }, + "ldp": { + "proof_type": ["RsaSignature2018"] + } + } + """ + expected_result = json.loads(submission_req_json) + actual_result = (ClaimFormat.deserialize(submission_req_json)).serialize() + assert expected_result == actual_result + + def test_claim_format_b(self): + submission_req_json = """ + { + "ldp_vp": { + "proof_type": ["Ed25519Signature2018"] + } + } + """ + expected_result = json.loads(submission_req_json) + actual_result = (ClaimFormat.deserialize(submission_req_json)).serialize() + assert expected_result == actual_result + + def test_submission_requirements_from(self): + claim_format_json = """ + { + "name": "European Union Citizenship Proofs", + "rule": "pick", + "min": 1, + "from": "A" + } + """ + expected_result = json.loads(claim_format_json) + actual_result = ( + SubmissionRequirements.deserialize(claim_format_json) + ).serialize() + assert expected_result == actual_result + + def test_submission_requirements_from_nested(self): + nested_submission_req_json = """ + { + "name": "Citizenship Information", + "rule": "pick", + "count": 1, + "from_nested": [ + { + "name": "United States Citizenship Proofs", + "purpose": "We need you to prove you are a US citizen.", + "rule": "all", + "from": "A" + }, + { + "name": "European Union Citizenship Proofs", + "purpose": "We need you to prove you are a citizen of a EU country.", + "rule": "all", + "from": "B" + } + ] + } + """ + expected_result = json.loads(nested_submission_req_json) + actual_result = ( + SubmissionRequirements.deserialize(nested_submission_req_json) + ).serialize() + assert expected_result == actual_result + + def test_submission_requirements_from_missing(self): + test_json = """ + { + "name": "Citizenship Information", + "rule": "pick", + "count": 1 + } + """ + with self.assertRaises(BaseModelError) as cm: + (SubmissionRequirements.deserialize(test_json)).serialize() + + def test_submission_requirements_from_both_present(self): + test_json = """ + { + "name": "Citizenship Information", + "rule": "pick", + "count": 1, + "from": "A", + "from_nested": [ + { + "name": "United States Citizenship Proofs", + "purpose": "We need you to prove you are a US citizen.", + "rule": "all", + "from": "A" + }, + { + "name": "European Union Citizenship Proofs", + "purpose": "We need you to prove you are a citizen of a EU country.", + "rule": "all", + "from": "B" + } + ] + } + """ + with self.assertRaises(BaseModelError) as cm: + (SubmissionRequirements.deserialize(test_json)).serialize() + + def test_is_holder(self): + test_json = """ + { + "field_id": [ + "ce66380c-1990-4aec-b8b4-5d532e92a616", + "dd69e8a4-4cc0-4540-b34a-b4aa0e0d2214", + "d15802b4-eec8-45ef-b78f-e35125ac1bb8", + "765f3e09-600c-467f-99eb-ea549c350121" + ], + "directive": "required" + } + """ + expected_result = json.loads(test_json) + actual_result = (DIFHolder.deserialize(test_json)).serialize() + assert expected_result == actual_result + + def test_filter(self): + test_json_list = [] + test_json_string_enum = """ + { + "type":"string", + "enum": ["testa1", "testa2", "testa3"] + } + """ + test_json_list.append(test_json_string_enum) + test_json_number_enum = """ + { + "type":"string", + "enum": ["testb1", "testb2", "testb3"] + } + """ + test_json_list.append(test_json_number_enum) + test_json_not_enum = """ + { + "not": { + "enum": ["testc1", "testc2", "testc3"] + } + } + """ + test_json_list.append(test_json_not_enum) + test_json_format_min = """ + { + "type":"string", + "format": "date", + "minimum": "1980/07/04" + } + """ + test_json_list.append(test_json_format_min) + test_json_exclmax = """ + { + "type":"number", + "exclusiveMaximum": 2 + } + """ + test_json_list.append(test_json_exclmax) + test_json_exclmin = """ + { + "exclusiveMinimum": 2 + } + """ + test_json_list.append(test_json_exclmin) + test_json_const = """ + { + "const": 2.0 + } + """ + test_json_list.append(test_json_const) + test_json_enum_error = """ + { + "enum": 2 + } + """ + test_json_custom_field_error = """ + { + "minimum": [ + "not_valid" + ] + } + """ + + for tmp_test_item in test_json_list: + expected_result = json.loads(tmp_test_item) + actual_result = (Filter.deserialize(tmp_test_item)).serialize() + assert expected_result == actual_result + + with self.assertRaises(BaseModelError) as cm: + (Filter.deserialize(test_json_enum_error)).serialize() + + with self.assertRaises(BaseModelError) as cm: + (Filter.deserialize(test_json_custom_field_error)).serialize() + + def test_constraints(self): + test_json = """ + { + "fields":[ + { + "path":[ + "$.credentialSubject.dob", + "$.vc.credentialSubject.dob", + "$.credentialSubject.license.dob" + ], + "filter":{ + "type":"string", + "format":"date", + "minimum":"1999-5-16" + } + } + ], + "statuses": { + "active": { + "directive": "required" + }, + "suspended": { + "directive": "allowed" + }, + "revoked": { + "directive": "disallowed" + } + } + } + """ + + expected_result = json.loads(test_json) + actual_result = (Constraints.deserialize(test_json)).serialize() + assert expected_result == actual_result + + def test_verifiable_presentation_wrapper(self): + test_vp_dict = { + "@context": ["https://www.w3.org/2018/credentials/v1"], + "type": ["VerifiablePresentation"], + "verifiableCredential": [ + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627465", + "type": ["PermanentResidentCard", "VerifiableCredential"], + "credentialSubject": { + "id": "did:example:b34ca6cd37bbf23", + "type": ["Person", "PermanentResident"], + "givenName": "JOHN", + }, + "issuanceDate": "2010-01-01T19:53:24Z", + "issuer": "did:key:zUC74bgefTdc43KS1psXgXf4jLaHyaj2qCQqQTXrtmSYGf1PxiJhrH6LGpaBMyj6tqAKmjGyMaS4RfNo2an77vT1HfzJUNPk4H7TCuJvSp4vet4Cu67kn2JSegoQNFSA1tbwU8v", + "proof": { + "type": "BbsBlsSignatureProof2020", + "nonce": "3AuruhJQrXtEgiagiJ+FwVf2S0SnzUDJvnO61YecQsJ7ImR1mPcoVjJJ0HOhfkFpoYI=", + "proofValue": "ABkBuAaPlP5A7JWY78Xf69oBnsMLcD1RXbIFYhcLoXPXW12CG9glnnqnPLsGri5xsA3LcP0kg74X+sAjKXGRGy3uvp412Dm0FuohYNboQcLne5KOAa5AxU4bjmwQsxdfduVqhriro1N+YTkuB4SMmO/5ooL0N3OHsYdExg7nSzWqmZoqgp+3CwIxF0a/oyKTcxJORuIqAAAAdInlL9teSIX49NJGEZfBO7IrdjT2iggH/G0AlPWoEvrWIbuCRQ69K83n5o7oJVjqhAAAAAIaVmlAD6+FEKA4eg0OaWOKPrd5Kq8rv0vIwjJ71egxll0Fqq4zDWQ/+yl3Pteh0Wyuyvpm19/sj6tiCWj4PkA+rpxtR2bXpnrCTKUffFFNBjVvVziXDS0KWkGUB7XU9mjUa4USC7Iub3bZZCnFjQA5AAAADzkGwGD837r33e7OTrGEti8eAkvFDcyCgA4ck/X+5HJjAJclHWbl4SNQR8CiNZyzJpvxW+jbNBcwmEvocYArddk3F78Ki0Qnp6aU9eDgfOOx1iW2BXLUjrhq5I2hP5/WQF3CEDYRjczGjzM9T8/coeC36YAp0zJunIXUKb8SPDSOISafibYRYFB4xhlWKXWloDelafyujOBST8KZNM8FmF4DSbXrO8vmZbjuR/8ntUcUK7X2rNbuZ3M5eWZDF8pL+SA9gQitKfPHEocoYAdhgEAM7ZNAJ+TgOcx9gtZIhDWKDNnFxIeoOAylbD1xZd9xbWtq3Bk3R79xqsKxFRJRNxk/9b6fJruP292+qM5lxcZ1jUz/dJUYFI93hH4Mso75CjGRN78MAY9SNifl6H8qcxTpBn4332LlFhRznLbtnc4YSWA/fvVqaN9h2zCH/6AdbLKXGffV34EF7DadwJsi9jsc+YlSMn6qaIUIDTdGLwh4KKpSH5bVbg/mVCcXPTJplFgYwRsOdiQbZY/740dJyo1lPjQ0Lvdio8W2M8c73ujeJU70CNLkgjJAMUPGrCFtGxBH2eeLBQ0P95qRZAIcJ7U0MibZLaRjoUOuTla5BIt2038PJ6XhcY6BEJaLyJOPEQ==", + "verificationMethod": "did:key:zUC74bgefTdc43KS1psXgXf4jLaHyaj2qCQqQTXrtmSYGf1PxiJhrH6LGpaBMyj6tqAKmjGyMaS4RfNo2an77vT1HfzJUNPk4H7TCuJvSp4vet4Cu67kn2JSegoQNFSA1tbwU8v#zUC74bgefTdc43KS1psXgXf4jLaHyaj2qCQqQTXrtmSYGf1PxiJhrH6LGpaBMyj6tqAKmjGyMaS4RfNo2an77vT1HfzJUNPk4H7TCuJvSp4vet4Cu67kn2JSegoQNFSA1tbwU8v", + "proofPurpose": "assertionMethod", + "created": "2021-05-05T15:22:30.523465", + }, + } + ], + "presentation_submission": { + "id": "a5fcfe44-2c30-497d-af02-98e539da9a0f", + "definition_id": "32f54163-7166-48f1-93d8-ff217bdb0653", + "descriptor_map": [ + { + "id": "citizenship_input_1", + "format": "ldp_vp", + "path": "$.verifiableCredential[0]", + } + ], + }, + "proof": { + "type": "Ed25519Signature2018", + "verificationMethod": "did:sov:4QxzWk3ajdnEA37NdNU5Kt#key-1", + "created": "2021-05-05T15:23:03.023971", + "proofPurpose": "authentication", + "challenge": "40429d49-5e8f-4ffc-baf8-e332412f1247", + "jws": "eyJhbGciOiAiRWREU0EiLCAiYjY0IjogZmFsc2UsICJjcml0IjogWyJiNjQiXX0..2uBYmg7muE9ZPVeAGo_ibVfLkCjf2hGshr2o5i8pAwFyNBM-kDHXofuq1MzJgb19wzb01VIu91hY_ajjt9KFAA", + }, + } + vp = VerifiablePresentation.deserialize(test_vp_dict) + assert isinstance(vp, VerifiablePresentation) diff --git a/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch_handler.py b/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch_handler.py new file mode 100644 index 0000000000..27f91fee57 --- /dev/null +++ b/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch_handler.py @@ -0,0 +1,2809 @@ +import asyncio +import pytest + +from asynctest import mock as async_mock +from copy import deepcopy + +from .....core.in_memory import InMemoryProfile +from .....did.did_key import DIDKey +from .....resolver.did_resolver_registry import DIDResolverRegistry +from .....resolver.did_resolver import DIDResolver +from .....storage.vc_holder.vc_record import VCRecord +from .....wallet.base import BaseWallet, DIDInfo +from .....wallet.crypto import KeyType +from .....wallet.did_method import DIDMethod +from .....wallet.error import WalletNotFoundError +from .....vc.ld_proofs import ( + BbsBlsSignatureProof2020, + BbsBlsSignature2020, +) +from .....vc.ld_proofs.document_loader import DocumentLoader +from .....vc.ld_proofs.error import LinkedDataProofException +from .....vc.ld_proofs.constants import SECURITY_CONTEXT_BBS_URL +from .....vc.tests.document_loader import custom_document_loader +from .....vc.tests.data import ( + BBS_SIGNED_VC_MATTR, + BBS_NESTED_VC_REVEAL_DOCUMENT_MATTR, +) + +from .. import pres_exch_handler as test_module +from ..pres_exch import ( + PresentationDefinition, + Requirement, + Filter, + SchemaInputDescriptor, + Constraints, + DIFField, +) +from ..pres_exch_handler import ( + DIFPresExchHandler, + DIFPresExchError, +) + +from .test_data import ( + get_test_data, + edd_jsonld_creds, + bbs_bls_number_filter_creds, + bbs_signed_cred_no_credsubjectid, + bbs_signed_cred_credsubjectid, +) + + +@pytest.yield_fixture(scope="class") +def event_loop(request): + loop = asyncio.get_event_loop_policy().new_event_loop() + yield loop + loop.close() + + +@pytest.fixture(scope="class") +def profile(): + profile = InMemoryProfile.test_profile() + context = profile.context + did_resolver_registry = DIDResolverRegistry() + context.injector.bind_instance(DIDResolverRegistry, did_resolver_registry) + context.injector.bind_instance(DIDResolver, DIDResolver(did_resolver_registry)) + context.injector.bind_instance(DocumentLoader, custom_document_loader) + return profile + + +@pytest.fixture(scope="class") +async def setup_tuple(profile): + async with profile.session() as session: + wallet = session.inject(BaseWallet, required=False) + await wallet.create_local_did( + method=DIDMethod.SOV, key_type=KeyType.ED25519, did="WgWxqztrNooG92RXvxSTWv" + ) + creds, pds = get_test_data() + return creds, pds + + +class TestPresExchHandler: + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_load_cred_json_a(self, setup_tuple, profile): + cred_list, pd_list = setup_tuple + dif_pres_exch_handler = DIFPresExchHandler(profile) + # assert len(cred_list) == 6 + for tmp_pd in pd_list: + # tmp_pd is tuple of presentation_definition and expected number of VCs + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=cred_list, + pd=tmp_pd[0], + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == tmp_pd[1] + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_load_cred_json_b(self, setup_tuple, profile): + cred_list, pd_list = setup_tuple + dif_pres_exch_handler = DIFPresExchHandler( + profile, pres_signing_did="did:sov:WgWxqztrNooG92RXvxSTWv" + ) + # assert len(cred_list) == 6 + for tmp_pd in pd_list: + # tmp_pd is tuple of presentation_definition and expected number of VCs + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=cred_list, + pd=tmp_pd[0], + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == tmp_pd[1] + + @pytest.mark.asyncio + async def test_to_requirement_catch_errors(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_json_pd = """ + { + "submission_requirements": [ + { + "name": "Banking Information", + "purpose": "We need you to prove you currently hold a bank account older than 12months.", + "rule": "pick", + "count": 1, + "from": "A" + } + ], + "id": "32f54163-7166-48f1-93d8-ff217bdb0653", + "input_descriptors": [ + { + "id": "banking_input_1", + "name": "Bank Account Information", + "purpose": "We can only remit payment to a currently-valid bank account.", + "group": [ + "B" + ], + "schema": [ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints": { + "fields": [ + { + "path": [ + "$.issuer.id", + "$.vc.issuer.id" + ], + "purpose": "We can only verify bank accounts if they are attested by a trusted bank, auditor or regulatory authority.", + "filter": { + "type": "string", + "pattern": "did:example:489398593" + } + }, + { + "path": [ + "$.credentialSubject.account[*].route", + "$.vc.credentialSubject.account[*].route", + "$.account[*].route" + ], + "purpose": "We can only remit payment to a currently-valid account at a US, Japanese, or German federally-accredited bank, submitted as an ABA RTN or SWIFT code.", + "filter": { + "type": "string", + "pattern": "^[0-9]{9}|^([a-zA-Z]){4}([a-zA-Z]){2}([0-9a-zA-Z]){2}([0-9a-zA-Z]{3})?$" + } + } + ] + } + } + ] + } + """ + + with pytest.raises(DIFPresExchError): + test_pd = PresentationDefinition.deserialize(test_json_pd) + await dif_pres_exch_handler.make_requirement( + srs=test_pd.submission_requirements, + descriptors=test_pd.input_descriptors, + ) + + test_json_pd_nested_srs = """ + { + "submission_requirements": [ + { + "name": "Citizenship Information", + "rule": "pick", + "max": 3, + "from_nested": [ + { + "name": "United States Citizenship Proofs", + "purpose": "We need you to prove your US citizenship.", + "rule": "all", + "from": "C" + }, + { + "name": "European Union Citizenship Proofs", + "purpose": "We need you to prove you are a citizen of an EU member state.", + "rule": "all", + "from": "D" + } + ] + } + ], + "id": "32f54163-7166-48f1-93d8-ff217bdb0653", + "input_descriptors": [ + { + "id": "banking_input_1", + "name": "Bank Account Information", + "purpose": "We can only remit payment to a currently-valid bank account.", + "group": [ + "B" + ], + "schema": [ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints": { + "fields": [ + { + "path": [ + "$.issuer.id", + "$.vc.issuer.id" + ], + "purpose": "We can only verify bank accounts if they are attested by a trusted bank, auditor or regulatory authority.", + "filter": { + "type": "string", + "pattern": "did:example:489398593" + } + }, + { + "path": [ + "$.credentialSubject.account[*].route", + "$.vc.credentialSubject.account[*].route", + "$.account[*].route" + ], + "purpose": "We can only remit payment to a currently-valid account at a US, Japanese, or German federally-accredited bank, submitted as an ABA RTN or SWIFT code.", + "filter": { + "type": "string", + "pattern": "^[0-9]{9}|^([a-zA-Z]){4}([a-zA-Z]){2}([0-9a-zA-Z]){2}([0-9a-zA-Z]{3})?$" + } + } + ] + } + } + ] + } + """ + + with pytest.raises(DIFPresExchError): + test_pd = PresentationDefinition.deserialize(test_json_pd_nested_srs) + await dif_pres_exch_handler.make_requirement( + srs=test_pd.submission_requirements, + descriptors=test_pd.input_descriptors, + ) + + @pytest.mark.asyncio + async def test_make_requirement_with_none_params(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_json_pd_no_sr = """ + { + "id": "32f54163-7166-48f1-93d8-ff217bdb0653", + "input_descriptors": [ + { + "id": "banking_input_1", + "name": "Bank Account Information", + "purpose": "We can only remit payment to a currently-valid bank account.", + "group": [ + "B" + ], + "schema": [ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints": { + "fields": [ + { + "path": [ + "$.issuer.id", + "$.vc.issuer.id" + ], + "purpose": "We can only verify bank accounts if they are attested by a trusted bank, auditor or regulatory authority.", + "filter": { + "type": "string", + "pattern": "did:example:489398593" + } + }, + { + "path": [ + "$.credentialSubject.account[*].route", + "$.vc.credentialSubject.account[*].route", + "$.account[*].route" + ], + "purpose": "We can only remit payment to a currently-valid account at a US, Japanese, or German federally-accredited bank, submitted as an ABA RTN or SWIFT code.", + "filter": { + "type": "string", + "pattern": "^[0-9]{9}|^([a-zA-Z]){4}([a-zA-Z]){2}([0-9a-zA-Z]){2}([0-9a-zA-Z]{3})?$" + } + } + ] + } + } + ] + } + """ + + test_pd = PresentationDefinition.deserialize(test_json_pd_no_sr) + assert test_pd.submission_requirements is None + await dif_pres_exch_handler.make_requirement( + srs=test_pd.submission_requirements, descriptors=test_pd.input_descriptors + ) + + test_json_pd_no_input_desc = """ + { + "submission_requirements": [ + { + "name": "Banking Information", + "purpose": "We need you to prove you currently hold a bank account older than 12months.", + "rule": "pick", + "count": 1, + "from": "A" + } + ], + "id": "32f54163-7166-48f1-93d8-ff217bdb0653" + } + """ + + with pytest.raises(DIFPresExchError): + test_pd = PresentationDefinition.deserialize(test_json_pd_no_input_desc) + await dif_pres_exch_handler.make_requirement( + srs=test_pd.submission_requirements, + descriptors=test_pd.input_descriptors, + ) + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_subject_is_issuer_check(self, setup_tuple, profile): + cred_list, pd_list = setup_tuple + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "Citizenship Information", + "rule": "pick", + "min": 1, + "from": "A" + }, + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "B" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "subject_is_issuer": "required", + "fields":[ + { + "path":[ + "$.issuer.id", + "$.vc.issuer.id" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "type":"string", + "enum": ["did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", "did:example:489398593", "did:sov:2wJPyULfLLnYTEFYzByfUR"] + } + } + ] + } + }, + { + "id":"citizenship_input_2", + "name":"US Passport", + "group":[ + "B" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuanceDate", + "$.vc.issuanceDate" + ], + "filter":{ + "type":"string", + "format":"date", + "minimum":"2009-5-16" + } + } + ] + } + } + ] + } + """ + + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=cred_list, + pd=PresentationDefinition.deserialize(test_pd), + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_limit_disclosure_required_check(self, setup_tuple, profile): + cred_list, pd_list = setup_tuple + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "Citizenship Information", + "rule": "pick", + "min": 1, + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "limit_disclosure": "required", + "fields":[ + { + "path":[ + "$.credentialSubject.givenName" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "type":"string", + "enum": ["JOHN", "CAI"] + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd) + assert tmp_pd.input_descriptors[0].constraint.limit_disclosure + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=cred_list, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 1 + for cred in tmp_vp.get("verifiableCredential"): + assert cred["issuer"] in [ + "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "did:example:489398593", + "did:sov:2wJPyULfLLnYTEFYzByfUR", + ] + assert cred["proof"]["type"] == "BbsBlsSignatureProof2020" + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_reveal_doc_a(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_constraint = { + "limit_disclosure": "required", + "fields": [ + { + "path": ["$.credentialSubject.givenName"], + "filter": {"type": "string", "const": "JOHN"}, + }, + { + "path": ["$.credentialSubject.familyName"], + "filter": {"type": "string", "const": "SMITH"}, + }, + { + "path": ["$.credentialSubject.type"], + "filter": { + "type": "string", + "enum": ["PermanentResident", "Person"], + }, + }, + { + "path": ["$.credentialSubject.gender"], + "filter": {"type": "string", "const": "Male"}, + }, + ], + } + + test_constraint = Constraints.deserialize(test_constraint) + tmp_reveal_doc = dif_pres_exch_handler.reveal_doc( + credential_dict=BBS_SIGNED_VC_MATTR, constraints=test_constraint + ) + assert tmp_reveal_doc + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_reveal_doc_b(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_credential = { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://example.gov/credentials/3732", + "issuer": "did:example:489398593", + "type": ["VerifiableCredential", "UniversityDegreeCredential"], + "issuanceDate": "2020-03-10T04:24:12.164Z", + "credentialSubject": { + "id": "did:example:b34ca6cd37bbf23", + "degree": { + "type": "BachelorDegree", + "name": "Bachelor of Science and Arts", + "degreeType": "Underwater Basket Weaving", + }, + "college": "Contoso University", + }, + "proof": { + "type": "BbsBlsSignature2020", + "verificationMethod": "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa#zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "created": "2021-04-14T15:56:26.427788", + "proofPurpose": "assertionMethod", + "proofValue": "q86pBug3pGMMXq0RE6jfQnk8HaIfM4lb9dQAnKM4aUkT64x/f/65tfnzooeVPf+vXR9a2TVParet6RKWVHVb1QB+GJMWglBy29iEz2tK8H8qYqLtRHMA3YCAQ/aynHKekSsURq+1c2RTEsX27G0hVA==", + }, + } + + test_constraint = { + "limit_disclosure": "required", + "fields": [ + { + "path": ["$.credentialSubject.degree.name"], + "filter": { + "type": "string", + "const": "Bachelor of Science and Arts", + }, + }, + { + "path": ["$.issuer"], + "filter": { + "type": "string", + "const": "did:example:489398593", + }, + }, + { + "path": ["$.issuanceDate"], + "filter": { + "type": "string", + "const": "2020-03-10T04:24:12.164Z", + }, + }, + ], + } + test_constraint = Constraints.deserialize(test_constraint) + tmp_reveal_doc = dif_pres_exch_handler.reveal_doc( + credential_dict=test_credential, constraints=test_constraint + ) + expected_reveal_doc = { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + "https://w3id.org/security/bbs/v1", + ], + "issuer": {}, + "issuanceDate": {}, + "type": ["VerifiableCredential", "UniversityDegreeCredential"], + "@explicit": True, + "@requireAll": True, + "credentialSubject": { + "@explicit": True, + "@requireAll": True, + "degree": {"@explicit": True, "@requireAll": True, "name": {}}, + }, + } + assert tmp_reveal_doc == expected_reveal_doc + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_reveal_doc_c(self, setup_tuple, profile): + cred_list, pd_list = setup_tuple + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_constraint = { + "limit_disclosure": "required", + "fields": [ + { + "path": ["$.credentialSubject.givenName"], + "filter": {"type": "string", "const": "Cai"}, + }, + { + "path": ["$.credentialSubject.familyName"], + "filter": {"type": "string", "const": "Leblanc"}, + }, + { + "path": ["$.credentialSubject.gender"], + "filter": {"type": "string", "const": "Male"}, + }, + ], + } + + test_constraint = Constraints.deserialize(test_constraint) + test_cred = cred_list[2].cred_value + tmp_reveal_doc = dif_pres_exch_handler.reveal_doc( + credential_dict=test_cred, constraints=test_constraint + ) + assert tmp_reveal_doc + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_filter_number_type_check(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd_min = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "pick", + "min": 1, + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.credentialSubject.test", + "$.vc.credentialSubject.test", + "$.test" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "type": "number", + "maximum": 2 + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_min) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=bbs_bls_number_filter_creds, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 2 + test_pd_max = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "pick", + "min": 1, + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.credentialSubject.test", + "$.vc.credentialSubject.test", + "$.test" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "type": "number", + "minimum": 2 + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_max) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=bbs_bls_number_filter_creds, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 3 + + test_pd_excl_min = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "pick", + "min": 1, + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "limit_disclosure": "preferred", + "fields":[ + { + "path":[ + "$.credentialSubject.test", + "$.vc.credentialSubject.test", + "$.test" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "type": "number", + "exclusiveMinimum": 1.5 + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_excl_min) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=bbs_bls_number_filter_creds, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 3 + + test_pd_excl_max = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "pick", + "min": 1, + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.credentialSubject.test", + "$.vc.credentialSubject.test", + "$.test" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "type": "number", + "exclusiveMaximum": 2.5 + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_excl_max) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=bbs_bls_number_filter_creds, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 2 + + test_pd_const = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "pick", + "min": 1, + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.credentialSubject.test", + "$.vc.credentialSubject.test", + "$.test" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "type": "number", + "const": 2 + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_const) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=bbs_bls_number_filter_creds, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 2 + + test_pd_enum = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "pick", + "min": 1, + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.credentialSubject.test", + "$.vc.credentialSubject.test", + "$.test" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "type": "number", + "enum": [2, 2.0 , "test"] + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_enum) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=bbs_bls_number_filter_creds, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 2 + + test_pd_missing = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "pick", + "min": 1, + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.credentialSubject.test", + "$.vc.credentialSubject.test", + "$.test" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "type": "number", + "enum": [2.5] + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_missing) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=bbs_bls_number_filter_creds, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 0 + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_filter_no_type_check(self, setup_tuple, profile): + cred_list, pd_list = setup_tuple + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.credentialSubject.lprCategory", + "$.vc.credentialSubject.lprCategory", + "$.test" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "not": { + "const": "C10" + } + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=cred_list, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 6 + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_edd_limit_disclosure(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "Citizenship Information", + "rule": "pick", + "min": 1, + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "limit_disclosure": "required", + "fields":[ + { + "path":[ + "$.issuer.id", + "$.issuer", + "$.vc.issuer.id" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "enum": ["did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", "did:example:489398593"] + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd) + assert tmp_pd.input_descriptors[0].constraint.limit_disclosure + with pytest.raises(LinkedDataProofException): + await dif_pres_exch_handler.create_vp( + credentials=edd_jsonld_creds, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_edd_jsonld_creds(self, setup_tuple, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd_const_check = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.vc.issuer.id", + "$.issuer", + "$.issuer.id" + ], + "filter":{ + "type":"string", + "const": "did:example:489398593" + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_const_check) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=edd_jsonld_creds, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 3 + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_filter_string(self, setup_tuple, profile): + cred_list, pd_list = setup_tuple + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd_min_length = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.vc.issuer.id", + "$.issuer", + "$.issuer.id" + ], + "filter":{ + "type":"string", + "minLength": 5 + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_min_length) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=cred_list, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 6 + + test_pd_max_length = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuer.id", + "$.issuer", + "$.vc.issuer.id" + ], + "filter":{ + "type":"string", + "maxLength": 150 + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_max_length) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=cred_list, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 6 + + test_pd_pattern_check = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.vc.issuer.id", + "$.issuer", + "$.issuer.id" + ], + "filter":{ + "type":"string", + "pattern": "did:example:test" + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_pattern_check) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=cred_list, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 0 + + test_pd_datetime_exclmax = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuanceDate", + "$.vc.issuanceDate" + ], + "filter":{ + "type":"string", + "format":"date", + "exclusiveMaximum":"2011-5-16" + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_datetime_exclmax) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=cred_list, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 6 + + test_pd_datetime_exclmin = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuanceDate", + "$.vc.issuanceDate" + ], + "filter":{ + "type":"string", + "format":"date", + "exclusiveMinimum":"2008-5-16" + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_datetime_exclmin) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=cred_list, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 6 + + test_pd_const_check = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.vc.issuer.id", + "$.issuer", + "$.issuer.id" + ], + "filter":{ + "type":"string", + "const": "did:example:489398593" + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_const_check) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=cred_list, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 1 + + @pytest.mark.asyncio + async def test_filter_schema(self, setup_tuple, profile): + cred_list, pd_list = setup_tuple + dif_pres_exch_handler = DIFPresExchHandler(profile) + tmp_schema_list = [ + SchemaInputDescriptor( + uri="test123", + required=True, + ) + ] + assert ( + len(await dif_pres_exch_handler.filter_schema(cred_list, tmp_schema_list)) + == 0 + ) + + def test_cred_schema_match_a(self, setup_tuple, profile): + cred_list, pd_list = setup_tuple + dif_pres_exch_handler = DIFPresExchHandler(profile) + tmp_cred = deepcopy(cred_list[0]) + assert ( + dif_pres_exch_handler.credential_match_schema( + tmp_cred, "https://www.w3.org/2018/credentials#VerifiableCredential" + ) + is True + ) + + @pytest.mark.asyncio + async def test_merge_nested(self, setup_tuple, profile): + cred_list, pd_list = setup_tuple + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_nested_result = [] + test_dict_1 = {} + test_dict_1["citizenship_input_1"] = [ + cred_list[0], + cred_list[1], + cred_list[2], + cred_list[3], + cred_list[4], + cred_list[5], + ] + test_dict_2 = {} + test_dict_2["citizenship_input_2"] = [ + cred_list[4], + cred_list[5], + ] + test_dict_3 = {} + test_dict_3["citizenship_input_2"] = [ + cred_list[3], + cred_list[2], + ] + test_nested_result.append(test_dict_1) + test_nested_result.append(test_dict_2) + test_nested_result.append(test_dict_3) + + tmp_result = await dif_pres_exch_handler.merge_nested_results( + test_nested_result, {} + ) + + def test_subject_is_issuer(self, setup_tuple, profile): + cred_list, pd_list = setup_tuple + dif_pres_exch_handler = DIFPresExchHandler(profile) + tmp_cred = deepcopy(cred_list[0]) + tmp_cred.issuer_id = "4fc82e63-f897-4dad-99cc-f698dff6c425" + tmp_cred.subject_ids.add("4fc82e63-f897-4dad-99cc-f698dff6c425") + assert tmp_cred.subject_ids is not None + assert dif_pres_exch_handler.subject_is_issuer(tmp_cred) is True + tmp_cred.issuer_id = "19b823fb-55ef-49f4-8caf-2a26b8b9286f" + assert dif_pres_exch_handler.subject_is_issuer(tmp_cred) is False + + @pytest.mark.asyncio + def test_is_numeric(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + assert dif_pres_exch_handler.is_numeric("test") is False + assert dif_pres_exch_handler.is_numeric(1) is True + assert dif_pres_exch_handler.is_numeric(2 + 3j) is False + + @pytest.mark.asyncio + def test_filter_no_match(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + tmp_filter_excl_min = Filter(exclusive_min=7) + assert ( + dif_pres_exch_handler.exclusive_minimum_check("test", tmp_filter_excl_min) + is False + ) + tmp_filter_excl_max = Filter(exclusive_max=10) + assert ( + dif_pres_exch_handler.exclusive_maximum_check("test", tmp_filter_excl_max) + is False + ) + tmp_filter_min = Filter(minimum=10) + assert dif_pres_exch_handler.minimum_check("test", tmp_filter_min) is False + tmp_filter_max = Filter(maximum=10) + assert dif_pres_exch_handler.maximum_check("test", tmp_filter_max) is False + + @pytest.mark.asyncio + def test_filter_valueerror(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + tmp_filter_excl_min = Filter(exclusive_min=7, fmt="date") + assert ( + dif_pres_exch_handler.exclusive_minimum_check("test", tmp_filter_excl_min) + is False + ) + tmp_filter_excl_max = Filter(exclusive_max=10, fmt="date") + assert ( + dif_pres_exch_handler.exclusive_maximum_check("test", tmp_filter_excl_max) + is False + ) + tmp_filter_min = Filter(minimum=10, fmt="date") + assert dif_pres_exch_handler.minimum_check("test", tmp_filter_min) is False + tmp_filter_max = Filter(maximum=10, fmt="date") + assert dif_pres_exch_handler.maximum_check("test", tmp_filter_max) is False + + @pytest.mark.asyncio + def test_filter_length_check(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + tmp_filter_both = Filter(min_length=7, max_length=10) + assert dif_pres_exch_handler.length_check("test12345", tmp_filter_both) is True + tmp_filter_min = Filter(min_length=7) + assert dif_pres_exch_handler.length_check("test123", tmp_filter_min) is True + tmp_filter_max = Filter(max_length=10) + assert dif_pres_exch_handler.length_check("test", tmp_filter_max) is True + assert dif_pres_exch_handler.length_check("test12", tmp_filter_min) is False + + @pytest.mark.asyncio + def test_filter_pattern_check(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + tmp_filter = Filter(pattern="test1|test2") + assert dif_pres_exch_handler.pattern_check("test3", tmp_filter) is False + tmp_filter = Filter(const="test3") + assert dif_pres_exch_handler.pattern_check("test3", tmp_filter) is False + + @pytest.mark.asyncio + def test_is_len_applicable(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + tmp_req_a = Requirement(count=1) + tmp_req_b = Requirement(minimum=3) + tmp_req_c = Requirement(maximum=5) + + assert dif_pres_exch_handler.is_len_applicable(tmp_req_a, 2) is False + assert dif_pres_exch_handler.is_len_applicable(tmp_req_b, 2) is False + assert dif_pres_exch_handler.is_len_applicable(tmp_req_c, 6) is False + + @pytest.mark.asyncio + def test_create_vcrecord(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_cred_dict = { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + "id": "http://example.edu/credentials/3732", + "type": ["VerifiableCredential", "UniversityDegreeCredential"], + "issuer": {"id": "https://example.edu/issuers/14"}, + "issuanceDate": "2010-01-01T19:23:24Z", + "credentialSubject": { + "id": "did:example:b34ca6cd37bbf23", + "degree": { + "type": "BachelorDegree", + "name": "Bachelor of Science and Arts", + }, + }, + "credentialSchema": { + "id": "https://example.org/examples/degree.json", + "type": "JsonSchemaValidator2018", + }, + } + test_vcrecord = dif_pres_exch_handler.create_vcrecord(test_cred_dict) + assert isinstance(test_vcrecord, VCRecord) + + @pytest.mark.asyncio + async def test_reveal_doc_d(self, profile): + dif_pres_exch_handler = DIFPresExchHandler( + profile, pres_signing_did="did:example:b34ca6cd37bbf23" + ) + test_constraint = { + "limit_disclosure": "required", + "fields": [ + { + "path": ["$.credentialSubject.accounts[*].accountnumber"], + "filter": {"type": "string", "const": "test"}, + } + ], + } + test_cred_dict = { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627465", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:example:489398593", + "identifier": "83627465", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2019-12-03T12:19:52Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:example:b34ca6cd37bbf23", + "accounts": [ + {"accountnumber": "test"}, + {"accountnumber": "test"}, + {"accountnumber": "test"}, + ], + }, + "proof": { + "type": "BbsBlsSignature2020", + "created": "2020-10-16T23:59:31Z", + "proofPurpose": "assertionMethod", + "proofValue": "kAkloZSlK79ARnlx54tPqmQyy6G7/36xU/LZgrdVmCqqI9M0muKLxkaHNsgVDBBvYp85VT3uouLFSXPMr7Stjgq62+OCunba7bNdGfhM/FUsx9zpfRtw7jeE182CN1cZakOoSVsQz61c16zQikXM3w==", + "verificationMethod": "did:example:489398593#test", + }, + } + test_constraint = Constraints.deserialize(test_constraint) + tmp_reveal_doc = dif_pres_exch_handler.reveal_doc( + credential_dict=test_cred_dict, constraints=test_constraint + ) + assert tmp_reveal_doc + + @pytest.mark.asyncio + async def test_credential_subject_as_list(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + with async_mock.patch.object( + dif_pres_exch_handler, "new_credential_builder", autospec=True + ) as mock_cred_builder: + mock_cred_builder.return_value = {} + dif_pres_exch_handler.reveal_doc( + {"credentialSubject": []}, Constraints(_fields=[]) + ) + + def test_invalid_number_filter(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + assert not dif_pres_exch_handler.process_numeric_val(val=2, _filter=Filter()) + + def test_invalid_string_filter(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + assert not dif_pres_exch_handler.process_string_val( + val="test", _filter=Filter() + ) + + def test_cred_schema_match_b(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_cred_dict = { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + "id": "http://example.edu/credentials/3732", + "type": ["VerifiableCredential", "UniversityDegreeCredential"], + "issuer": {"id": "https://example.edu/issuers/14"}, + "issuanceDate": "2010-01-01T19:23:24Z", + "credentialSubject": { + "id": "did:example:b34ca6cd37bbf23", + "degree": { + "type": "BachelorDegree", + "name": "Bachelor of Science and Arts", + }, + }, + "credentialSchema": { + "id": "https://example.org/examples/degree.json", + "type": "JsonSchemaValidator2018", + }, + } + test_cred = dif_pres_exch_handler.create_vcrecord(test_cred_dict) + assert dif_pres_exch_handler.credential_match_schema( + test_cred, "https://example.org/examples/degree.json" + ) + + def test_verification_method(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + assert ( + dif_pres_exch_handler._get_verification_method( + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" + ) + == DIDKey.from_did( + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" + ).key_id + ) + with pytest.raises(DIFPresExchError): + dif_pres_exch_handler._get_verification_method("did:test:test") + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_sign_pres_no_cred_subject_id(self, profile, setup_tuple): + dif_pres_exch_handler = DIFPresExchHandler( + profile, pres_signing_did="did:sov:WgWxqztrNooG92RXvxSTWv" + ) + cred_list, pd_list = setup_tuple + tmp_pd = pd_list[3] + tmp_creds = [] + for cred in deepcopy(cred_list): + cred.subject_ids = [] + tmp_creds.append(cred) + + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=tmp_creds, + pd=tmp_pd[0], + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 6 + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_sign_pres_bbsbls(self, profile, setup_tuple): + dif_pres_exch_handler = DIFPresExchHandler( + profile, proof_type=BbsBlsSignature2020.signature_type + ) + cred_list, pd_list = setup_tuple + tmp_pd = pd_list[3] + tmp_creds = [] + for cred in deepcopy(cred_list): + cred.subject_ids = [] + tmp_creds.append(cred) + + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=tmp_creds, + pd=tmp_pd[0], + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 6 + + def test_create_vc_record_with_graph_struct(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_credential_dict_a = { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "@graph": [ + { + "id": "did:key:zUC79Dfc18UM9HWQKmqonstuwcP4Fu3V9Zy7aNrcFU6K34WzkBesnm9LhaVxMtrqy2qrgkRyKVoFXsE1BJFAgrzhavrYBQ69AWTcgmBFQ1VauGGCJJKvDaaWfRqgtM3DQzx1TpM", + "type": "PermanentResident", + "https://www.w3.org/2018/credentials#credentialSubject": "", + "https://www.w3.org/2018/credentials#issuanceDate": "", + "https://www.w3.org/2018/credentials#issuer": "", + }, + { + "id": "urn:bnid:_:c14n0", + "type": ["PermanentResident", "VerifiableCredential"], + "credentialSubject": { + "id": "did:key:zUC79Dfc18UM9HWQKmqonstuwcP4Fu3V9Zy7aNrcFU6K34WzkBesnm9LhaVxMtrqy2qrgkRyKVoFXsE1BJFAgrzhavrYBQ69AWTcgmBFQ1VauGGCJJKvDaaWfRqgtM3DQzx1TpM", + "type": "PermanentResident", + "familyName": "SMITH", + }, + "issuanceDate": "2020-01-01T12:00:00Z", + "issuer": "did:key:zUC7GLSYyPCryDnWzBgrSu4x44NH7bqEvY8dVPdWii1zdi3GTT9wsmTavEUfgd6VZ6wuz6yx7EDvT23DcxRT5oPBjEt3LYYAi1ph63NWxoGLCjwcP8XAHWRTCR1TKyVak4eLsjD", + }, + ], + "proof": { + "type": "BbsBlsSignatureProof2020", + "nonce": "4xEz2oZQdiyDI5WE5snXQnvQQSfBZMyrmc9PtjDRTdnzV0GdT9tDgEJhFhseP5fJTeY=", + "proofValue": "AA99L7TriEaSJ3iTDZJhrwHz/pkPLYPFFtJQuiUy/IHLIuhcdkSPhKtXGDfe6pmng+nE9pc969b6qghh/T1RjEBF7B+J9uBaWyEz0C57OV22ts+ejB/Cn4/I/jyqryzmpSzF8IvwhhtaRl31JVxnBd7bmfLZAX6FW+ZxopSepH/2sxZKpfV2Ntafx0qNfMRmt8sMrwAAAHSW5bC0H37sdQRCvwhuSwKm9xAq81saAPnUR393bspYzkC0OUNaRzsN4W/oF7db250AAAACXRj8cmq12t5N0iklCp7s2ujTP5Yemp0qERGsDaeNb0Fh6tGzhP5QJHbiNY8i/scBIMN4bN0nX2HM2grkRKMxErOO0sirH6MMz90XFOs2pxJxh33MZ0Qp0CTff6YT/Cjd0FO4SBs4ZuUzdeoRI6FSoAAAAAZIlySGrOeIFVheqXONHu9WcwNnGi48KjL/EjLcDqJgCymARUsEW5XjNJSysUqFiibm221yYMaAskdQDHdoh0q5CtV8UeDCunycGMiphhIhcP9xtWW1+WY0gif0qxRMxNs4IpcJ7TtXse7zOysQrU0iXMLwA96yzGk722QZKnXXPEZMSduj+YPfMJnDR67uxJYUGx+ci8dqmEmFfNEzeq/DTKpJwbbNbeLVnd4GKHQB2WZwRfYwNYt2U+c/xCuxvew=", + "verificationMethod": "did:key:zUC7GLSYyPCryDnWzBgrSu4x44NH7bqEvY8dVPdWii1zdi3GTT9wsmTavEUfgd6VZ6wuz6yx7EDvT23DcxRT5oPBjEt3LYYAi1ph63NWxoGLCjwcP8XAHWRTCR1TKyVak4eLsjD#zUC7GLSYyPCryDnWzBgrSu4x44NH7bqEvY8dVPdWii1zdi3GTT9wsmTavEUfgd6VZ6wuz6yx7EDvT23DcxRT5oPBjEt3LYYAi1ph63NWxoGLCjwcP8XAHWRTCR1TKyVak4eLsjD", + "proofPurpose": "assertionMethod", + "created": "2021-06-01T08:32:11.935336", + }, + } + test_credential_dict_b = { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "@graph": [ + { + "id": "urn:bnid:_:c14n1", + "type": "PermanentResident", + "https://www.w3.org/2018/credentials#credentialSubject": "", + "https://www.w3.org/2018/credentials#issuanceDate": "", + "https://www.w3.org/2018/credentials#issuer": "", + }, + { + "id": "urn:bnid:_:c14n0", + "type": ["PermanentResident", "VerifiableCredential"], + "credentialSubject": None, + "issuanceDate": "2020-01-01T12:00:00Z", + "issuer": "did:key:zUC7GLSYyPCryDnWzBgrSu4x44NH7bqEvY8dVPdWii1zdi3GTT9wsmTavEUfgd6VZ6wuz6yx7EDvT23DcxRT5oPBjEt3LYYAi1ph63NWxoGLCjwcP8XAHWRTCR1TKyVak4eLsjD", + }, + ], + "proof": { + "type": "BbsBlsSignatureProof2020", + "nonce": "4xEz2oZQdiyDI5WE5snXQnvQQSfBZMyrmc9PtjDRTdnzV0GdT9tDgEJhFhseP5fJTeY=", + "proofValue": "AA99L7TriEaSJ3iTDZJhrwHz/pkPLYPFFtJQuiUy/IHLIuhcdkSPhKtXGDfe6pmng+nE9pc969b6qghh/T1RjEBF7B+J9uBaWyEz0C57OV22ts+ejB/Cn4/I/jyqryzmpSzF8IvwhhtaRl31JVxnBd7bmfLZAX6FW+ZxopSepH/2sxZKpfV2Ntafx0qNfMRmt8sMrwAAAHSW5bC0H37sdQRCvwhuSwKm9xAq81saAPnUR393bspYzkC0OUNaRzsN4W/oF7db250AAAACXRj8cmq12t5N0iklCp7s2ujTP5Yemp0qERGsDaeNb0Fh6tGzhP5QJHbiNY8i/scBIMN4bN0nX2HM2grkRKMxErOO0sirH6MMz90XFOs2pxJxh33MZ0Qp0CTff6YT/Cjd0FO4SBs4ZuUzdeoRI6FSoAAAAAZIlySGrOeIFVheqXONHu9WcwNnGi48KjL/EjLcDqJgCymARUsEW5XjNJSysUqFiibm221yYMaAskdQDHdoh0q5CtV8UeDCunycGMiphhIhcP9xtWW1+WY0gif0qxRMxNs4IpcJ7TtXse7zOysQrU0iXMLwA96yzGk722QZKnXXPEZMSduj+YPfMJnDR67uxJYUGx+ci8dqmEmFfNEzeq/DTKpJwbbNbeLVnd4GKHQB2WZwRfYwNYt2U+c/xCuxvew=", + "verificationMethod": "did:key:zUC7GLSYyPCryDnWzBgrSu4x44NH7bqEvY8dVPdWii1zdi3GTT9wsmTavEUfgd6VZ6wuz6yx7EDvT23DcxRT5oPBjEt3LYYAi1ph63NWxoGLCjwcP8XAHWRTCR1TKyVak4eLsjD#zUC7GLSYyPCryDnWzBgrSu4x44NH7bqEvY8dVPdWii1zdi3GTT9wsmTavEUfgd6VZ6wuz6yx7EDvT23DcxRT5oPBjEt3LYYAi1ph63NWxoGLCjwcP8XAHWRTCR1TKyVak4eLsjD", + "proofPurpose": "assertionMethod", + "created": "2021-06-01T08:32:11.935336", + }, + } + assert isinstance( + dif_pres_exch_handler.create_vcrecord(test_credential_dict_a), VCRecord + ) + assert isinstance( + dif_pres_exch_handler.create_vcrecord(test_credential_dict_b), VCRecord + ) + + @pytest.mark.asyncio + async def test_get_did_info_for_did(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_did_key = "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" + with pytest.raises(WalletNotFoundError): + await dif_pres_exch_handler._did_info_for_did(test_did_key) + + @pytest.mark.asyncio + async def test_get_sign_key_credential_subject_id(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + + VC_RECORDS = [ + VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=["did:sov:LjgpST2rjsoxYegQDRm7EL"], + proof_types=["Ed25519Signature2018"], + schema_ids=["https://example.org/examples/degree.json"], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + ), + VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=[ + "did:example:ebfeb1f712ebc6f1c276e12ec31", + "did:sov:LjgpST2rjsoxYegQDRm7EL", + ], + proof_types=["Ed25519Signature2018"], + schema_ids=["https://example.org/examples/degree.json"], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + ), + ] + with async_mock.patch.object( + DIFPresExchHandler, + "_did_info_for_did", + async_mock.CoroutineMock(), + ) as mock_did_info: + did_info = DIDInfo( + did="did:sov:LjgpST2rjsoxYegQDRm7EL", + verkey="verkey", + metadata={}, + method=DIDMethod.SOV, + key_type=KeyType.ED25519, + ) + mock_did_info.return_value = did_info + ( + issuer_id, + filtered_creds, + ) = await dif_pres_exch_handler.get_sign_key_credential_subject_id( + VC_RECORDS + ) + assert issuer_id == "did:sov:LjgpST2rjsoxYegQDRm7EL" + assert len(filtered_creds) == 2 + + @pytest.mark.asyncio + async def test_get_sign_key_credential_subject_id_error(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + + VC_RECORDS = [ + VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=["did:sov:LjgpST2rjsoxYegQDRm7EL"], + proof_types=["Ed25519Signature2018"], + schema_ids=["https://example.org/examples/degree.json"], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + ), + VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=[ + "did:example:ebfeb1f712ebc6f1c276e12ec31", + "did:example:ebfeb1f712ebc6f1c276e12ec21", + ], + proof_types=["Ed25519Signature2018"], + schema_ids=["https://example.org/examples/degree.json"], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + ), + ] + with async_mock.patch.object( + DIFPresExchHandler, + "_did_info_for_did", + async_mock.CoroutineMock(), + ) as mock_did_info: + did_info = DIDInfo( + did="did:sov:LjgpST2rjsoxYegQDRm7EL", + verkey="verkey", + metadata={}, + method=DIDMethod.SOV, + key_type=KeyType.ED25519, + ) + mock_did_info.return_value = did_info + with pytest.raises(DIFPresExchError): + ( + issuer_id, + filtered_creds, + ) = await dif_pres_exch_handler.get_sign_key_credential_subject_id( + VC_RECORDS + ) + + @pytest.mark.asyncio + async def test_get_sign_key_credential_subject_id_bbsbls(self, profile): + dif_pres_exch_handler = DIFPresExchHandler( + profile, proof_type="BbsBlsSignature2020" + ) + + VC_RECORDS = [ + VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=[ + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" + ], + proof_types=["BbsBlsSignature2020"], + schema_ids=["https://example.org/examples/degree.json"], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + ), + VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=[ + "did:sov:LjgpST2rjsoxYegQDRm7EL", + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + ], + proof_types=["BbsBlsSignature2020"], + schema_ids=["https://example.org/examples/degree.json"], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + ), + ] + with async_mock.patch.object( + DIFPresExchHandler, + "_did_info_for_did", + async_mock.CoroutineMock(), + ) as mock_did_info: + did_info = DIDInfo( + did="did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + verkey="verkey", + metadata={}, + method=DIDMethod.KEY, + key_type=KeyType.BLS12381G2, + ) + mock_did_info.return_value = did_info + ( + issuer_id, + filtered_creds, + ) = await dif_pres_exch_handler.get_sign_key_credential_subject_id( + VC_RECORDS + ) + assert ( + issuer_id == "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" + ) + assert len(filtered_creds) == 2 + + @pytest.mark.asyncio + async def test_create_vp_no_issuer(self, profile, setup_tuple): + dif_pres_exch_handler = DIFPresExchHandler(profile) + cred_list, pd_list = setup_tuple + VC_RECORDS = [ + VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=[ + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" + ], + proof_types=["BbsBlsSignature2020"], + schema_ids=["https://example.org/examples/degree.json"], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + ), + VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=[ + "did:sov:LjgpST2rjsoxYegQDRm7EL", + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + ], + proof_types=["BbsBlsSignature2020"], + schema_ids=["https://example.org/examples/degree.json"], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + ), + ] + with async_mock.patch.object( + DIFPresExchHandler, + "_did_info_for_did", + async_mock.CoroutineMock(), + ) as mock_did_info, async_mock.patch.object( + DIFPresExchHandler, + "make_requirement", + async_mock.CoroutineMock(), + ) as mock_make_req, async_mock.patch.object( + DIFPresExchHandler, + "apply_requirements", + async_mock.CoroutineMock(), + ) as mock_apply_req, async_mock.patch.object( + DIFPresExchHandler, + "merge", + async_mock.CoroutineMock(), + ) as mock_merge, async_mock.patch.object( + DIFPresExchHandler, + "check_sign_pres", + async_mock.CoroutineMock(), + ) as mock_check_sign_pres, async_mock.patch.object( + test_module, + "create_presentation", + async_mock.CoroutineMock(), + ) as mock_create_vp: + mock_make_req.return_value = async_mock.MagicMock() + mock_apply_req.return_value = async_mock.MagicMock() + mock_merge.return_value = (VC_RECORDS, {}) + mock_check_sign_pres.return_value = True + mock_create_vp.return_value = {"test": "1"} + did_info = DIDInfo( + did="did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + verkey="verkey", + metadata={}, + method=DIDMethod.KEY, + key_type=KeyType.BLS12381G2, + ) + mock_did_info.return_value = did_info + vp = await dif_pres_exch_handler.create_vp( + VC_RECORDS, + pd=pd_list[0][0], + challenge="3fa85f64-5717-4562-b3fc-2c963f66afa7", + ) + assert vp["test"] == "1" + assert ( + vp["presentation_submission"]["definition_id"] + == "32f54163-7166-48f1-93d8-ff217bdb0653" + ) + + @pytest.mark.asyncio + async def test_create_vp_with_bbs_suite(self, profile, setup_tuple): + dif_pres_exch_handler = DIFPresExchHandler( + profile, proof_type=BbsBlsSignature2020.signature_type + ) + cred_list, pd_list = setup_tuple + with async_mock.patch.object( + DIFPresExchHandler, + "_did_info_for_did", + async_mock.CoroutineMock(), + ) as mock_did_info, async_mock.patch.object( + DIFPresExchHandler, + "make_requirement", + async_mock.CoroutineMock(), + ) as mock_make_req, async_mock.patch.object( + DIFPresExchHandler, + "apply_requirements", + async_mock.CoroutineMock(), + ) as mock_apply_req, async_mock.patch.object( + DIFPresExchHandler, + "merge", + async_mock.CoroutineMock(), + ) as mock_merge, async_mock.patch.object( + DIFPresExchHandler, + "check_sign_pres", + async_mock.CoroutineMock(), + ) as mock_check_sign_pres, async_mock.patch.object( + test_module, + "create_presentation", + async_mock.CoroutineMock(), + ) as mock_create_vp, async_mock.patch.object( + test_module, + "sign_presentation", + async_mock.CoroutineMock(), + ) as mock_sign_vp: + mock_make_req.return_value = async_mock.MagicMock() + mock_apply_req.return_value = async_mock.MagicMock() + mock_merge.return_value = (cred_list, {}) + mock_check_sign_pres.return_value = True + mock_create_vp.return_value = {"test": "1", "@context": ["test"]} + mock_sign_vp.return_value = { + "test": "1", + "@context": ["test", SECURITY_CONTEXT_BBS_URL], + } + did_info = DIDInfo( + did="did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + verkey="verkey", + metadata={}, + method=DIDMethod.KEY, + key_type=KeyType.BLS12381G2, + ) + mock_did_info.return_value = did_info + vp = await dif_pres_exch_handler.create_vp( + cred_list, + pd=pd_list[0][0], + challenge="3fa85f64-5717-4562-b3fc-2c963f66afa7", + ) + assert vp["test"] == "1" + assert SECURITY_CONTEXT_BBS_URL in vp["@context"] + + @pytest.mark.asyncio + async def test_create_vp_no_issuer_with_bbs_suite(self, profile, setup_tuple): + dif_pres_exch_handler = DIFPresExchHandler( + profile, proof_type=BbsBlsSignature2020.signature_type + ) + cred_list, pd_list = setup_tuple + with async_mock.patch.object( + DIFPresExchHandler, + "_did_info_for_did", + async_mock.CoroutineMock(), + ) as mock_did_info, async_mock.patch.object( + DIFPresExchHandler, + "make_requirement", + async_mock.CoroutineMock(), + ) as mock_make_req, async_mock.patch.object( + DIFPresExchHandler, + "apply_requirements", + async_mock.CoroutineMock(), + ) as mock_apply_req, async_mock.patch.object( + DIFPresExchHandler, + "merge", + async_mock.CoroutineMock(), + ) as mock_merge, async_mock.patch.object( + DIFPresExchHandler, + "check_sign_pres", + async_mock.CoroutineMock(), + ) as mock_check_sign_pres, async_mock.patch.object( + test_module, + "create_presentation", + async_mock.CoroutineMock(), + ) as mock_create_vp, async_mock.patch.object( + DIFPresExchHandler, + "get_sign_key_credential_subject_id", + async_mock.CoroutineMock(), + ) as mock_sign_key_cred_subject: + mock_make_req.return_value = async_mock.MagicMock() + mock_apply_req.return_value = async_mock.MagicMock() + mock_merge.return_value = (cred_list, {}) + mock_check_sign_pres.return_value = True + mock_create_vp.return_value = {"test": "1", "@context": ["test"]} + mock_sign_key_cred_subject.return_value = (None, []) + did_info = DIDInfo( + did="did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + verkey="verkey", + metadata={}, + method=DIDMethod.KEY, + key_type=KeyType.BLS12381G2, + ) + mock_did_info.return_value = did_info + vp = await dif_pres_exch_handler.create_vp( + cred_list, + pd=pd_list[0][0], + challenge="3fa85f64-5717-4562-b3fc-2c963f66afa7", + ) + assert vp["test"] == "1" + assert SECURITY_CONTEXT_BBS_URL in vp["@context"] + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_no_filter(self, setup_tuple, profile): + cred_list, pd_list = setup_tuple + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd_no_filter = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuanceDate", + "$.vc.issuanceDate" + ] + } + ] + } + } + ] + } + """ + tmp_pd = PresentationDefinition.deserialize(test_pd_no_filter) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=cred_list, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 6 + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_filter_with_only_string_type(self, setup_tuple, profile): + cred_list, pd_list = setup_tuple + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd_filter_with_only_string_type = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuanceDate", + "$.vc.issuanceDate" + ], + "filter":{ + "type":"string" + } + } + ] + } + } + ] + } + """ + tmp_pd = PresentationDefinition.deserialize( + test_pd_filter_with_only_string_type + ) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=cred_list, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 6 + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_filter_with_only_num_type(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd_filter_with_only_num_type = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "pick", + "min": 1, + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.credentialSubject.test", + "$.vc.credentialSubject.test", + "$.test" + ], + "filter":{ + "type": "number" + } + } + ] + } + } + ] + } + """ + + tmp_pd = PresentationDefinition.deserialize(test_pd_filter_with_only_num_type) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=bbs_bls_number_filter_creds, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 3 + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_filter_with_only_string_type_with_format(self, setup_tuple, profile): + cred_list, pd_list = setup_tuple + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd_filter_with_only_string_type_with_format = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements":[ + { + "name": "European Union Citizenship Proofs", + "rule": "all", + "from": "A" + } + ], + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "group":[ + "A" + ], + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints":{ + "fields":[ + { + "path":[ + "$.issuanceDate", + "$.vc.issuanceDate" + ], + "filter":{ + "type":"string", + "format":"date" + } + } + ] + } + } + ] + } + """ + tmp_pd = PresentationDefinition.deserialize( + test_pd_filter_with_only_string_type_with_format + ) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=cred_list, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp.get("verifiableCredential")) == 6 + + def test_validate_patch_catch_errors(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + _filter = Filter(_type="string", fmt="date") + _to_check = "test123" + assert not dif_pres_exch_handler.validate_patch( + to_check=_to_check, _filter=_filter + ) + _to_check = 123 + assert not dif_pres_exch_handler.validate_patch( + to_check=_to_check, _filter=_filter + ) + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_derive_cred_missing_credsubjectid(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0654", + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "limit_disclosure": "required", + "fields":[ + { + "path":[ + "$.credentialSubject.familyName" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "const": "SMITH" + } + } + ] + } + } + ] + } + """ + tmp_pd = PresentationDefinition.deserialize(test_pd) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=bbs_signed_cred_no_credsubjectid, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp["verifiableCredential"]) == 2 + for tmp_vc in tmp_vp.get("verifiableCredential"): + assert tmp_vc.get("credentialSubject").get("id").startswith("urn:") + assert tmp_vc.get("credentialSubject").get("familyName") == "SMITH" + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_derive_cred_credsubjectid(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0654", + "input_descriptors":[ + { + "id":"citizenship_input_1", + "name":"EU Driver's License", + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://w3id.org/citizenship#PermanentResidentCard" + } + ], + "constraints":{ + "limit_disclosure": "required", + "fields":[ + { + "path":[ + "$.credentialSubject.familyName" + ], + "purpose":"The claim must be from one of the specified issuers", + "filter":{ + "const": "SMITH" + } + } + ] + } + } + ] + } + """ + tmp_pd = PresentationDefinition.deserialize(test_pd) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=bbs_signed_cred_credsubjectid, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp["verifiableCredential"]) == 1 + assert "givenName" not in tmp_vp.get("verifiableCredential")[0].get( + "credentialSubject" + ) + assert ( + tmp_vp.get("verifiableCredential")[0].get("credentialSubject").get("id") + == "did:sov:WgWxqztrNooG92RXvxSTWv" + ) + assert ( + tmp_vp.get("verifiableCredential")[0] + .get("credentialSubject") + .get("familyName") + == "SMITH" + ) + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_derive_nested_cred_missing_credsubjectid_a(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0654", + "input_descriptors":[ + { + "id":"degree_input_1", + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://example.org/examples#UniversityDegreeCredential" + } + ], + "constraints":{ + "limit_disclosure": "required", + "fields":[ + { + "path":[ + "$.credentialSubject.degree.name" + ], + "filter":{ + "const": "Bachelor of Science and Arts" + } + } + ] + } + } + ] + } + """ + tmp_pd = PresentationDefinition.deserialize(test_pd) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=bbs_signed_cred_no_credsubjectid, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp["verifiableCredential"]) == 1 + assert ( + tmp_vp.get("verifiableCredential")[0] + .get("credentialSubject") + .get("id") + .startswith("urn:") + ) + assert ( + tmp_vp.get("verifiableCredential")[0] + .get("credentialSubject") + .get("degree") + .get("name") + == "Bachelor of Science and Arts" + ) + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_derive_nested_cred_missing_credsubjectid_b(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0654", + "input_descriptors":[ + { + "id":"degree_input_1", + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://example.org/examples#UniversityDegreeCredential" + } + ], + "constraints":{ + "limit_disclosure": "required", + "fields":[ + { + "path":[ + "$.credentialSubject.college" + ], + "filter":{ + "const": "Contoso University" + } + } + ] + } + } + ] + } + """ + tmp_pd = PresentationDefinition.deserialize(test_pd) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=bbs_signed_cred_no_credsubjectid, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp["verifiableCredential"]) == 1 + assert ( + tmp_vp.get("verifiableCredential")[0] + .get("credentialSubject") + .get("id") + .startswith("urn:") + ) + assert ( + tmp_vp.get("verifiableCredential")[0] + .get("credentialSubject") + .get("college") + == "Contoso University" + ) + + @pytest.mark.asyncio + @pytest.mark.ursa_bbs_signatures + async def test_derive_nested_cred_credsubjectid(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + test_pd = """ + { + "id":"32f54163-7166-48f1-93d8-ff217bdb0654", + "input_descriptors":[ + { + "id":"degree_input_1", + "schema":[ + { + "uri":"https://www.w3.org/2018/credentials#VerifiableCredential" + }, + { + "uri":"https://example.org/examples#UniversityDegreeCredential" + } + ], + "constraints":{ + "limit_disclosure": "required", + "fields":[ + { + "path":[ + "$.credentialSubject.degree.name" + ], + "filter":{ + "const": "Bachelor of Science and Arts" + } + } + ] + } + } + ] + } + """ + tmp_pd = PresentationDefinition.deserialize(test_pd) + tmp_vp = await dif_pres_exch_handler.create_vp( + credentials=bbs_signed_cred_credsubjectid, + pd=tmp_pd, + challenge="1f44d55f-f161-4938-a659-f8026467f126", + ) + assert len(tmp_vp["verifiableCredential"]) == 1 + assert ( + tmp_vp.get("verifiableCredential")[0].get("credentialSubject").get("id") + == "did:sov:WgWxqztrNooG92RXvxSTWv" + ) + assert ( + tmp_vp.get("verifiableCredential")[0] + .get("credentialSubject") + .get("degree") + .get("name") + == "Bachelor of Science and Arts" + ) + + @pytest.mark.asyncio + async def test_filter_by_field_path_match_on_proof(self, profile): + dif_pres_exch_handler = DIFPresExchHandler(profile) + field = DIFField(paths=["$.proof.proofPurpose"]) + cred = VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=[ + "did:sov:LjgpST2rjsoxYegQDRm7EL", + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + ], + proof_types=["BbsBlsSignature2020"], + schema_ids=["https://example.org/examples/degree.json"], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + record_id="test1", + ) + with pytest.raises(DIFPresExchError): + await dif_pres_exch_handler.filter_by_field(field, cred) diff --git a/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_request.py b/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_request.py new file mode 100644 index 0000000000..e37d2232b3 --- /dev/null +++ b/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_request.py @@ -0,0 +1,108 @@ +from unittest import TestCase + +from ..pres_request_schema import DIFProofRequestSchema + + +class TestPresRequestSchema(TestCase): + """DIF Presentation Request Test""" + + def test_limit_disclosure(self): + test_pd_a = { + "options": { + "challenge": "3fa85f64-5717-4562-b3fc-2c963f66afa7", + "domain": "4jt78h47fh47", + }, + "presentation_definition": { + "id": "32f54163-7166-48f1-93d8-ff217bdb0654", + "submission_requirements": [ + { + "name": "Citizenship Information", + "rule": "pick", + "min": 1, + "from": "A", + } + ], + "input_descriptors": [ + { + "id": "citizenship_input_1", + "name": "EU Driver's License", + "group": ["A"], + "schema": [ + { + "uri": "https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints": { + "limit_disclosure": "required", + "fields": [ + { + "path": ["$.credentialSubject.givenName"], + "purpose": "The claim must be from one of the specified issuers", + "filter": { + "type": "string", + "enum": ["JOHN", "CAI"], + }, + } + ], + }, + } + ], + }, + } + test_pd_b = { + "options": { + "challenge": "3fa85f64-5717-4562-b3fc-2c963f66afa7", + "domain": "4jt78h47fh47", + }, + "presentation_definition": { + "id": "32f54163-7166-48f1-93d8-ff217bdb0654", + "submission_requirements": [ + { + "name": "Citizenship Information", + "rule": "pick", + "min": 1, + "from": "A", + } + ], + "input_descriptors": [ + { + "id": "citizenship_input_1", + "name": "EU Driver's License", + "group": ["A"], + "schema": [ + { + "uri": "https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints": { + "limit_disclosure": "preferred", + "fields": [ + { + "path": ["$.credentialSubject.givenName"], + "purpose": "The claim must be from one of the specified issuers", + "filter": { + "type": "string", + "enum": ["JOHN", "CAI"], + }, + } + ], + }, + } + ], + }, + } + + pres_request_a = DIFProofRequestSchema().load(test_pd_a) + test_limit_disclosure_a = ( + pres_request_a.get("presentation_definition") + .input_descriptors[0] + .constraint.limit_disclosure + ) + assert test_limit_disclosure_a == "required" + pres_request_b = DIFProofRequestSchema().load(test_pd_b) + test_limit_disclosure_b = ( + pres_request_b.get("presentation_definition") + .input_descriptors[0] + .constraint.limit_disclosure + ) + assert test_limit_disclosure_b == "preferred" diff --git a/aries_cloudagent/protocols/present_proof/indy/__init__.py b/aries_cloudagent/protocols/present_proof/indy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/aries_cloudagent/protocols/present_proof/indy/pres_exch_handler.py b/aries_cloudagent/protocols/present_proof/indy/pres_exch_handler.py new file mode 100644 index 0000000000..2e6b8fdc69 --- /dev/null +++ b/aries_cloudagent/protocols/present_proof/indy/pres_exch_handler.py @@ -0,0 +1,250 @@ +"""Utilities for dif presentation exchange attachment.""" +import json +import logging +import time + +from typing import Union, Tuple + +from ....core.error import BaseError +from ....core.profile import Profile +from ....indy.holder import IndyHolder, IndyHolderError +from ....indy.models.xform import indy_proof_req2non_revoc_intervals +from ....ledger.base import BaseLedger +from ....revocation.models.revocation_registry import RevocationRegistry + +from ..v1_0.models.presentation_exchange import V10PresentationExchange +from ..v2_0.messages.pres_format import V20PresFormat +from ..v2_0.models.pres_exchange import V20PresExRecord + +LOGGER = logging.getLogger(__name__) + + +class IndyPresExchHandlerError(BaseError): + """Base class for Indy Presentation Exchange related errors.""" + + +class IndyPresExchHandler: + """Base Presentation Exchange Handler.""" + + def __init__( + self, + profile: Profile, + ): + """Initialize PresExchange Handler.""" + super().__init__() + self._profile = profile + + async def return_presentation( + self, + pres_ex_record: Union[V10PresentationExchange, V20PresExRecord], + requested_credentials: dict = {}, + ) -> dict: + """Return Indy proof request as dict.""" + # Get all credentials for this presentation + holder = self._profile.inject(IndyHolder) + credentials = {} + + # extract credential ids and non_revoked + requested_referents = {} + if isinstance(pres_ex_record, V20PresExRecord): + proof_request = pres_ex_record.pres_request.attachment( + V20PresFormat.Format.INDY + ) + elif isinstance(pres_ex_record, V10PresentationExchange): + proof_request = pres_ex_record._presentation_request.ser + non_revoc_intervals = indy_proof_req2non_revoc_intervals(proof_request) + attr_creds = requested_credentials.get("requested_attributes", {}) + req_attrs = proof_request.get("requested_attributes", {}) + for reft in attr_creds: + requested_referents[reft] = {"cred_id": attr_creds[reft]["cred_id"]} + if reft in req_attrs and reft in non_revoc_intervals: + requested_referents[reft]["non_revoked"] = non_revoc_intervals[reft] + pred_creds = requested_credentials.get("requested_predicates", {}) + req_preds = proof_request.get("requested_predicates", {}) + for reft in pred_creds: + requested_referents[reft] = {"cred_id": pred_creds[reft]["cred_id"]} + if reft in req_preds and reft in non_revoc_intervals: + requested_referents[reft]["non_revoked"] = non_revoc_intervals[reft] + # extract mapping of presentation referents to credential ids + for reft in requested_referents: + credential_id = requested_referents[reft]["cred_id"] + if credential_id not in credentials: + credentials[credential_id] = json.loads( + await holder.get_credential(credential_id) + ) + # remove any timestamps that cannot correspond to non-revoc intervals + for r in ("requested_attributes", "requested_predicates"): + for reft, req_item in requested_credentials.get(r, {}).items(): + if not credentials[req_item["cred_id"]].get( + "rev_reg_id" + ) and req_item.pop("timestamp", None): + LOGGER.info( + f"Removed superfluous timestamp from requested_credentials {r} " + f"{reft} for non-revocable credential {req_item['cred_id']}" + ) + # Get all schemas, credential definitions, and revocation registries in use + ledger = self._profile.inject(BaseLedger) + schemas = {} + cred_defs = {} + revocation_registries = {} + async with ledger: + for credential in credentials.values(): + schema_id = credential["schema_id"] + if schema_id not in schemas: + schemas[schema_id] = await ledger.get_schema(schema_id) + cred_def_id = credential["cred_def_id"] + if cred_def_id not in cred_defs: + cred_defs[cred_def_id] = await ledger.get_credential_definition( + cred_def_id + ) + if credential.get("rev_reg_id"): + revocation_registry_id = credential["rev_reg_id"] + if revocation_registry_id not in revocation_registries: + revocation_registries[ + revocation_registry_id + ] = RevocationRegistry.from_definition( + await ledger.get_revoc_reg_def(revocation_registry_id), True + ) + # Get delta with non-revocation interval defined in "non_revoked" + # of the presentation request or attributes + epoch_now = int(time.time()) + revoc_reg_deltas = {} + async with ledger: + for precis in requested_referents.values(): # cred_id, non-revoc interval + credential_id = precis["cred_id"] + if not credentials[credential_id].get("rev_reg_id"): + continue + if "timestamp" in precis: + continue + rev_reg_id = credentials[credential_id]["rev_reg_id"] + reft_non_revoc_interval = precis.get("non_revoked") + if reft_non_revoc_interval: + key = ( + f"{rev_reg_id}_" + f"{reft_non_revoc_interval.get('from', 0)}_" + f"{reft_non_revoc_interval.get('to', epoch_now)}" + ) + if key not in revoc_reg_deltas: + (delta, delta_timestamp) = await ledger.get_revoc_reg_delta( + rev_reg_id, + reft_non_revoc_interval.get("from", 0), + reft_non_revoc_interval.get("to", epoch_now), + ) + revoc_reg_deltas[key] = ( + rev_reg_id, + credential_id, + delta, + delta_timestamp, + ) + for stamp_me in requested_referents.values(): + # often one cred satisfies many requested attrs/preds + if stamp_me["cred_id"] == credential_id: + stamp_me["timestamp"] = revoc_reg_deltas[key][3] + # Get revocation states to prove non-revoked + revocation_states = {} + for ( + rev_reg_id, + credential_id, + delta, + delta_timestamp, + ) in revoc_reg_deltas.values(): + if rev_reg_id not in revocation_states: + revocation_states[rev_reg_id] = {} + rev_reg = revocation_registries[rev_reg_id] + tails_local_path = await rev_reg.get_or_fetch_local_tails_path() + try: + revocation_states[rev_reg_id][delta_timestamp] = json.loads( + await holder.create_revocation_state( + credentials[credential_id]["cred_rev_id"], + rev_reg.reg_def, + delta, + delta_timestamp, + tails_local_path, + ) + ) + except IndyHolderError as e: + LOGGER.error( + f"Failed to create revocation state: {e.error_code}, {e.message}" + ) + raise e + for (referent, precis) in requested_referents.items(): + if "timestamp" not in precis: + continue + if referent in requested_credentials["requested_attributes"]: + requested_credentials["requested_attributes"][referent][ + "timestamp" + ] = precis["timestamp"] + if referent in requested_credentials["requested_predicates"]: + requested_credentials["requested_predicates"][referent][ + "timestamp" + ] = precis["timestamp"] + indy_proof_json = await holder.create_presentation( + proof_request, + requested_credentials, + schemas, + cred_defs, + revocation_states, + ) + indy_proof = json.loads(indy_proof_json) + return indy_proof + + async def process_pres_identifiers( + self, + identifiers: list, + ) -> Tuple[dict, dict, dict, dict]: + """Return schemas, cred_defs, rev_reg_defs, rev_reg_entries.""" + schema_ids = [] + cred_def_ids = [] + + schemas = {} + cred_defs = {} + rev_reg_defs = {} + rev_reg_entries = {} + + ledger = self._profile.inject(BaseLedger) + async with ledger: + for identifier in identifiers: + schema_ids.append(identifier["schema_id"]) + cred_def_ids.append(identifier["cred_def_id"]) + + # Build schemas for anoncreds + if identifier["schema_id"] not in schemas: + schemas[identifier["schema_id"]] = await ledger.get_schema( + identifier["schema_id"] + ) + + if identifier["cred_def_id"] not in cred_defs: + cred_defs[ + identifier["cred_def_id"] + ] = await ledger.get_credential_definition( + identifier["cred_def_id"] + ) + + if identifier.get("rev_reg_id"): + if identifier["rev_reg_id"] not in rev_reg_defs: + rev_reg_defs[ + identifier["rev_reg_id"] + ] = await ledger.get_revoc_reg_def(identifier["rev_reg_id"]) + + if identifier.get("timestamp"): + rev_reg_entries.setdefault(identifier["rev_reg_id"], {}) + + if ( + identifier["timestamp"] + not in rev_reg_entries[identifier["rev_reg_id"]] + ): + ( + found_rev_reg_entry, + _found_timestamp, + ) = await ledger.get_revoc_reg_entry( + identifier["rev_reg_id"], identifier["timestamp"] + ) + rev_reg_entries[identifier["rev_reg_id"]][ + identifier["timestamp"] + ] = found_rev_reg_entry + return ( + schemas, + cred_defs, + rev_reg_defs, + rev_reg_entries, + ) diff --git a/aries_cloudagent/protocols/present_proof/v1_0/handlers/presentation_request_handler.py b/aries_cloudagent/protocols/present_proof/v1_0/handlers/presentation_request_handler.py index bb22c22827..231e2c5a31 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/handlers/presentation_request_handler.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/handlers/presentation_request_handler.py @@ -1,7 +1,7 @@ """Presentation request message handler.""" from .....indy.holder import IndyHolder, IndyHolderError -from .....indy.sdk.models.xform import indy_proof_req_preview2indy_requested_creds +from .....indy.models.xform import indy_proof_req_preview2indy_requested_creds from .....ledger.error import LedgerError from .....messaging.base_handler import BaseHandler, HandlerException from .....messaging.models.base import BaseModelError diff --git a/aries_cloudagent/protocols/present_proof/v1_0/handlers/tests/test_presentation_request_handler.py b/aries_cloudagent/protocols/present_proof/v1_0/handlers/tests/test_presentation_request_handler.py index ca3947d7cb..e8cefb0371 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/handlers/tests/test_presentation_request_handler.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/handlers/tests/test_presentation_request_handler.py @@ -1,6 +1,6 @@ from asynctest import mock as async_mock, TestCase as AsyncTestCase -from ......indy.sdk.models.pres_preview import ( +from ......indy.models.pres_preview import ( IndyPresAttrSpec, IndyPresPredSpec, IndyPresPreview, diff --git a/aries_cloudagent/protocols/present_proof/v1_0/manager.py b/aries_cloudagent/protocols/present_proof/v1_0/manager.py index ae051f5c2b..6c19ad356d 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/manager.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/manager.py @@ -2,21 +2,18 @@ import json import logging -import time from ....connections.models.conn_record import ConnRecord from ....core.error import BaseError from ....core.profile import Profile -from ....indy.holder import IndyHolder, IndyHolderError -from ....indy.sdk.models.xform import indy_proof_req2non_revoc_intervals from ....indy.verifier import IndyVerifier from ....ledger.base import BaseLedger from ....messaging.decorators.attach_decorator import AttachDecorator from ....messaging.responder import BaseResponder -from ....revocation.models.revocation_registry import RevocationRegistry from ....storage.error import StorageNotFoundError -from .models.presentation_exchange import V10PresentationExchange +from ..indy.pres_exch_handler import IndyPresExchHandler + from .messages.presentation_ack import PresentationAck from .messages.presentation_problem_report import ( PresentationProblemReport, @@ -26,6 +23,7 @@ from .messages.presentation_request import PresentationRequest from .messages.presentation import Presentation from .message_types import ATTACH_DECO_IDS, PRESENTATION, PRESENTATION_REQUEST +from .models.presentation_exchange import V10PresentationExchange LOGGER = logging.getLogger(__name__) @@ -264,163 +262,11 @@ async def create_presentation( A tuple (updated presentation exchange record, presentation message) """ - - # Get all credentials for this presentation - holder = self._profile.inject(IndyHolder) - credentials = {} - - # extract credential ids and non_revoked - requested_referents = {} - presentation_request = presentation_exchange_record._presentation_request.ser - non_revoc_intervals = indy_proof_req2non_revoc_intervals(presentation_request) - attr_creds = requested_credentials.get("requested_attributes", {}) - req_attrs = presentation_request.get("requested_attributes", {}) - - for reft in attr_creds: - requested_referents[reft] = {"cred_id": attr_creds[reft]["cred_id"]} - if reft in req_attrs and reft in non_revoc_intervals: - requested_referents[reft]["non_revoked"] = non_revoc_intervals[reft] - - pred_creds = requested_credentials.get("requested_predicates", {}) - req_preds = presentation_request.get("requested_predicates", {}) - for reft in pred_creds: - requested_referents[reft] = {"cred_id": pred_creds[reft]["cred_id"]} - if reft in req_preds and reft in non_revoc_intervals: - requested_referents[reft]["non_revoked"] = non_revoc_intervals[reft] - - # extract mapping of presentation referents to credential ids - for reft in requested_referents: - credential_id = requested_referents[reft]["cred_id"] - if credential_id not in credentials: - credentials[credential_id] = json.loads( - await holder.get_credential(credential_id) - ) - - # remove any timestamps that cannot correspond to non-revoc intervals - for r in ("requested_attributes", "requested_predicates"): - for reft, req_item in requested_credentials.get(r, {}).items(): - if not credentials[req_item["cred_id"]].get( - "rev_reg_id" - ) and req_item.pop("timestamp", None): - LOGGER.info( - f"Removed superfluous timestamp from requested_credentials {r} " - f"{reft} for non-revocable credential {req_item['cred_id']}" - ) - - # Get all schemas, credential definitions, and revocation registries in use - ledger = self._profile.inject(BaseLedger) - schemas = {} - credential_definitions = {} - revocation_registries = {} - - async with ledger: - for credential in credentials.values(): - schema_id = credential["schema_id"] - if schema_id not in schemas: - schemas[schema_id] = await ledger.get_schema(schema_id) - - credential_definition_id = credential["cred_def_id"] - if credential_definition_id not in credential_definitions: - credential_definitions[ - credential_definition_id - ] = await ledger.get_credential_definition(credential_definition_id) - - if credential.get("rev_reg_id"): - revocation_registry_id = credential["rev_reg_id"] - if revocation_registry_id not in revocation_registries: - revocation_registries[ - revocation_registry_id - ] = RevocationRegistry.from_definition( - await ledger.get_revoc_reg_def(revocation_registry_id), True - ) - - # Get delta with non-revocation interval defined in "non_revoked" - # of the presentation request or attributes - epoch_now = int(time.time()) - - revoc_reg_deltas = {} - async with ledger: - for precis in requested_referents.values(): # cred_id, non-revoc interval - credential_id = precis["cred_id"] - if not credentials[credential_id].get("rev_reg_id"): - continue - if "timestamp" in precis: - continue - rev_reg_id = credentials[credential_id]["rev_reg_id"] - reft_non_revoc_interval = precis.get("non_revoked") - if reft_non_revoc_interval: - key = ( - f"{rev_reg_id}_" - f"{reft_non_revoc_interval.get('from', 0)}_" - f"{reft_non_revoc_interval.get('to', epoch_now)}" - ) - if key not in revoc_reg_deltas: - (delta, delta_timestamp) = await ledger.get_revoc_reg_delta( - rev_reg_id, - reft_non_revoc_interval.get("from", 0), - reft_non_revoc_interval.get("to", epoch_now), - ) - revoc_reg_deltas[key] = ( - rev_reg_id, - credential_id, - delta, - delta_timestamp, - ) - for stamp_me in requested_referents.values(): - # often one cred satisfies many requested attrs/preds - if stamp_me["cred_id"] == credential_id: - stamp_me["timestamp"] = revoc_reg_deltas[key][3] - - # Get revocation states to prove non-revoked - revocation_states = {} - for ( - rev_reg_id, - credential_id, - delta, - delta_timestamp, - ) in revoc_reg_deltas.values(): - if rev_reg_id not in revocation_states: - revocation_states[rev_reg_id] = {} - - rev_reg = revocation_registries[rev_reg_id] - tails_local_path = await rev_reg.get_or_fetch_local_tails_path() - - try: - revocation_states[rev_reg_id][delta_timestamp] = json.loads( - await holder.create_revocation_state( - credentials[credential_id]["cred_rev_id"], - rev_reg.reg_def, - delta, - delta_timestamp, - tails_local_path, - ) - ) - except IndyHolderError as e: - LOGGER.error( - f"Failed to create revocation state: {e.error_code}, {e.message}" - ) - raise e - - for (referent, precis) in requested_referents.items(): - if "timestamp" not in precis: - continue - if referent in requested_credentials["requested_attributes"]: - requested_credentials["requested_attributes"][referent][ - "timestamp" - ] = precis["timestamp"] - if referent in requested_credentials["requested_predicates"]: - requested_credentials["requested_predicates"][referent][ - "timestamp" - ] = precis["timestamp"] - - indy_proof_json = await holder.create_presentation( - presentation_exchange_record._presentation_request.ser, - requested_credentials, - schemas, - credential_definitions, - revocation_states, + indy_handler = IndyPresExchHandler(self._profile) + indy_proof = await indy_handler.return_presentation( + pres_ex_record=presentation_exchange_record, + requested_credentials=requested_credentials, ) - indy_proof = json.loads(indy_proof_json) presentation_message = Presentation( comment=comment, @@ -541,57 +387,13 @@ async def verify_presentation( """ indy_proof_request = presentation_exchange_record._presentation_request.ser indy_proof = presentation_exchange_record._presentation.ser - - schema_ids = [] - credential_definition_ids = [] - - schemas = {} - credential_definitions = {} - rev_reg_defs = {} - rev_reg_entries = {} - - identifiers = indy_proof["identifiers"] - ledger = self._profile.inject(BaseLedger) - async with ledger: - for identifier in identifiers: - schema_ids.append(identifier["schema_id"]) - credential_definition_ids.append(identifier["cred_def_id"]) - - # Build schemas for anoncreds - if identifier["schema_id"] not in schemas: - schemas[identifier["schema_id"]] = await ledger.get_schema( - identifier["schema_id"] - ) - - if identifier["cred_def_id"] not in credential_definitions: - credential_definitions[ - identifier["cred_def_id"] - ] = await ledger.get_credential_definition( - identifier["cred_def_id"] - ) - - if identifier.get("rev_reg_id"): - if identifier["rev_reg_id"] not in rev_reg_defs: - rev_reg_defs[ - identifier["rev_reg_id"] - ] = await ledger.get_revoc_reg_def(identifier["rev_reg_id"]) - - if identifier.get("timestamp"): - rev_reg_entries.setdefault(identifier["rev_reg_id"], {}) - - if ( - identifier["timestamp"] - not in rev_reg_entries[identifier["rev_reg_id"]] - ): - ( - found_rev_reg_entry, - _found_timestamp, - ) = await ledger.get_revoc_reg_entry( - identifier["rev_reg_id"], identifier["timestamp"] - ) - rev_reg_entries[identifier["rev_reg_id"]][ - identifier["timestamp"] - ] = found_rev_reg_entry + indy_handler = IndyPresExchHandler(self._profile) + ( + schemas, + cred_defs, + rev_reg_defs, + rev_reg_entries, + ) = await indy_handler.process_pres_identifiers(indy_proof["identifiers"]) verifier = self._profile.inject(IndyVerifier) presentation_exchange_record.verified = json.dumps( # tag: needs string value @@ -599,7 +401,7 @@ async def verify_presentation( indy_proof_request, indy_proof, schemas, - credential_definitions, + cred_defs, rev_reg_defs, rev_reg_entries, ) diff --git a/aries_cloudagent/protocols/present_proof/v1_0/messages/presentation_proposal.py b/aries_cloudagent/protocols/present_proof/v1_0/messages/presentation_proposal.py index 90c46433bb..872961d738 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/messages/presentation_proposal.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/messages/presentation_proposal.py @@ -2,7 +2,7 @@ from marshmallow import EXCLUDE, fields -from .....indy.sdk.models.pres_preview import IndyPresPreview, IndyPresPreviewSchema +from .....indy.models.pres_preview import IndyPresPreview, IndyPresPreviewSchema from .....messaging.agent_message import AgentMessage, AgentMessageSchema from ..message_types import PRESENTATION_PROPOSAL, PROTOCOL_PACKAGE diff --git a/aries_cloudagent/protocols/present_proof/v1_0/messages/tests/test_presentation.py b/aries_cloudagent/protocols/present_proof/v1_0/messages/tests/test_presentation.py index 085147f4bd..ae4a817153 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/messages/tests/test_presentation.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/messages/tests/test_presentation.py @@ -2,7 +2,7 @@ from datetime import datetime, timezone from unittest import TestCase -from ......indy.sdk.models.pres_preview import PRESENTATION_PREVIEW +from ......indy.models.pres_preview import PRESENTATION_PREVIEW from ......messaging.decorators.attach_decorator import AttachDecorator from ......messaging.util import str_to_datetime, str_to_epoch diff --git a/aries_cloudagent/protocols/present_proof/v1_0/messages/tests/test_presentation_proposal.py b/aries_cloudagent/protocols/present_proof/v1_0/messages/tests/test_presentation_proposal.py index 972a879922..2c43f97269 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/messages/tests/test_presentation_proposal.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/messages/tests/test_presentation_proposal.py @@ -1,6 +1,6 @@ from unittest import TestCase -from ......indy.sdk.models.pres_preview import ( +from ......indy.models.pres_preview import ( IndyPresAttrSpec, IndyPresPredSpec, IndyPresPreview, diff --git a/aries_cloudagent/protocols/present_proof/v1_0/messages/tests/test_presentation_request.py b/aries_cloudagent/protocols/present_proof/v1_0/messages/tests/test_presentation_request.py index c0b5b7adb4..c1e3ebcdaf 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/messages/tests/test_presentation_request.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/messages/tests/test_presentation_request.py @@ -2,7 +2,7 @@ from datetime import datetime, timezone from unittest import TestCase -from ......indy.sdk.models.pres_preview import PRESENTATION_PREVIEW +from ......indy.models.pres_preview import PRESENTATION_PREVIEW from ......messaging.decorators.attach_decorator import AttachDecorator from ......messaging.util import str_to_datetime, str_to_epoch diff --git a/aries_cloudagent/protocols/present_proof/v1_0/models/presentation_exchange.py b/aries_cloudagent/protocols/present_proof/v1_0/models/presentation_exchange.py index 95f74952fc..ef13ad5b89 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/models/presentation_exchange.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/models/presentation_exchange.py @@ -7,8 +7,8 @@ from marshmallow import fields, validate from .....core.profile import ProfileSession -from .....indy.sdk.models.proof import IndyProof, IndyProofSchema -from .....indy.sdk.models.proof_request import IndyProofRequest, IndyProofRequestSchema +from .....indy.models.proof import IndyProof, IndyProofSchema +from .....indy.models.proof_request import IndyProofRequest, IndyProofRequestSchema from .....messaging.models.base_record import BaseExchangeRecord, BaseExchangeSchema from .....messaging.valid import UUIDFour from .....storage.base import StorageError diff --git a/aries_cloudagent/protocols/present_proof/v1_0/models/tests/test_record.py b/aries_cloudagent/protocols/present_proof/v1_0/models/tests/test_record.py index 9b82f80d10..62d5c64884 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/models/tests/test_record.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/models/tests/test_record.py @@ -1,7 +1,7 @@ from asynctest import mock as async_mock, TestCase as AsyncTestCase from ......core.in_memory import InMemoryProfile -from ......indy.sdk.models.pres_preview import ( +from ......indy.models.pres_preview import ( IndyPresAttrSpec, IndyPresPredSpec, IndyPresPreview, diff --git a/aries_cloudagent/protocols/present_proof/v1_0/routes.py b/aries_cloudagent/protocols/present_proof/v1_0/routes.py index 8e9fdbf4dc..cd1b68a134 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/routes.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/routes.py @@ -15,10 +15,10 @@ from ....admin.request_context import AdminRequestContext from ....connections.models.conn_record import ConnRecord from ....indy.holder import IndyHolder, IndyHolderError -from ....indy.sdk.models.cred_precis import IndyCredPrecisSchema -from ....indy.sdk.models.proof import IndyPresSpecSchema -from ....indy.sdk.models.proof_request import IndyProofRequestSchema -from ....indy.sdk.models.pres_preview import IndyPresPreview, IndyPresPreviewSchema +from ....indy.models.cred_precis import IndyCredPrecisSchema +from ....indy.models.proof import IndyPresSpecSchema +from ....indy.models.proof_request import IndyProofRequestSchema +from ....indy.models.pres_preview import IndyPresPreview, IndyPresPreviewSchema from ....indy.util import generate_pr_nonce from ....ledger.error import LedgerError from ....messaging.decorators.attach_decorator import AttachDecorator diff --git a/aries_cloudagent/protocols/present_proof/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/present_proof/v1_0/tests/test_manager.py index 314c346399..dd35121f47 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/tests/test_manager.py @@ -5,11 +5,11 @@ from asynctest import mock as async_mock, TestCase as AsyncTestCase from .....core.in_memory import InMemoryProfile -from .....indy.holder import IndyHolder +from .....indy.holder import IndyHolder, IndyHolderError from .....indy.issuer import IndyIssuer from .....indy.sdk.holder import IndySdkHolder -from .....indy.sdk.models.xform import indy_proof_req_preview2indy_requested_creds -from .....indy.sdk.models.pres_preview import ( +from .....indy.models.xform import indy_proof_req_preview2indy_requested_creds +from .....indy.models.pres_preview import ( IndyPresAttrSpec, IndyPresPreview, IndyPresPredSpec, @@ -24,6 +24,8 @@ from ....didcomm_prefix import DIDCommPrefix +from ...indy import pres_exch_handler as test_indy_util_module + from .. import manager as test_module from ..manager import PresentationManager, PresentationManagerError from ..message_types import ATTACH_DECO_IDS, PRESENTATION, PRESENTATION_REQUEST @@ -448,7 +450,7 @@ async def test_create_presentation(self): ) as save_ex, async_mock.patch.object( test_module, "AttachDecorator", autospec=True ) as mock_attach_decorator, async_mock.patch.object( - test_module, "RevocationRegistry", autospec=True + test_indy_util_module, "RevocationRegistry", autospec=True ) as mock_rr: mock_rr.from_definition = async_mock.MagicMock(return_value=more_magic_rr) @@ -491,7 +493,7 @@ async def test_create_presentation_proof_req_non_revoc_interval_none(self): ) as save_ex, async_mock.patch.object( test_module, "AttachDecorator", autospec=True ) as mock_attach_decorator, async_mock.patch.object( - test_module, "RevocationRegistry", autospec=True + test_indy_util_module, "RevocationRegistry", autospec=True ) as mock_rr: mock_rr.from_definition = async_mock.MagicMock(return_value=more_magic_rr) @@ -552,7 +554,7 @@ async def test_create_presentation_self_asserted(self): ) as save_ex, async_mock.patch.object( test_module, "AttachDecorator", autospec=True ) as mock_attach_decorator, async_mock.patch.object( - test_module, "RevocationRegistry", autospec=True + test_indy_util_module, "RevocationRegistry", autospec=True ) as mock_rr: mock_rr.from_definition = async_mock.MagicMock(return_value=more_magic_rr) @@ -627,7 +629,7 @@ async def test_create_presentation_no_revocation(self): ) as save_ex, async_mock.patch.object( test_module, "AttachDecorator", autospec=True ) as mock_attach_decorator, async_mock.patch.object( - test_module.LOGGER, "info", async_mock.MagicMock() + test_indy_util_module.LOGGER, "info", async_mock.MagicMock() ) as mock_log_info: mock_attach_decorator.data_base64 = async_mock.MagicMock( return_value=mock_attach_decorator @@ -688,7 +690,7 @@ async def test_create_presentation_bad_revoc_state(self): ) self.holder.create_presentation = async_mock.CoroutineMock(return_value="{}") self.holder.create_revocation_state = async_mock.CoroutineMock( - side_effect=test_module.IndyHolderError("Problem", {"message": "Nope"}) + side_effect=IndyHolderError("Problem", {"message": "Nope"}) ) self.profile.context.injector.bind_instance(IndyHolder, self.holder) @@ -702,7 +704,7 @@ async def test_create_presentation_bad_revoc_state(self): ) as save_ex, async_mock.patch.object( test_module, "AttachDecorator", autospec=True ) as mock_attach_decorator, async_mock.patch.object( - test_module, "RevocationRegistry", autospec=True + test_indy_util_module, "RevocationRegistry", autospec=True ) as mock_rr: mock_rr.from_definition = async_mock.MagicMock(return_value=more_magic_rr) @@ -714,7 +716,7 @@ async def test_create_presentation_bad_revoc_state(self): indy_proof_req, holder=self.holder ) - with self.assertRaises(test_module.IndyHolderError): + with self.assertRaises(IndyHolderError): await self.manager.create_presentation(exchange_in, req_creds) async def test_create_presentation_multi_matching_proposal_creds_names(self): @@ -789,7 +791,7 @@ async def test_create_presentation_multi_matching_proposal_creds_names(self): ) as save_ex, async_mock.patch.object( test_module, "AttachDecorator", autospec=True ) as mock_attach_decorator, async_mock.patch.object( - test_module, "RevocationRegistry", autospec=True + test_indy_util_module, "RevocationRegistry", autospec=True ) as mock_rr: mock_rr.from_definition = async_mock.MagicMock(return_value=more_magic_rr) diff --git a/aries_cloudagent/protocols/present_proof/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/present_proof/v1_0/tests/test_routes.py index a9191ea04e..f8335341c4 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/tests/test_routes.py @@ -6,7 +6,7 @@ from .....admin.request_context import AdminRequestContext from .....indy.holder import IndyHolder -from .....indy.sdk.models.proof_request import IndyProofReqAttrSpecSchema +from .....indy.models.proof_request import IndyProofReqAttrSpecSchema from .....indy.verifier import IndyVerifier from .....ledger.base import BaseLedger from .....storage.error import StorageNotFoundError @@ -331,7 +331,7 @@ async def test_presentation_exchange_send_proposal(self): "aries_cloudagent.protocols.present_proof.v1_0.manager.PresentationManager", autospec=True, ) as mock_presentation_manager, async_mock.patch( - "aries_cloudagent.indy.sdk.models.pres_preview.IndyPresPreview", + "aries_cloudagent.indy.models.pres_preview.IndyPresPreview", autospec=True, ) as mock_preview: @@ -379,7 +379,7 @@ async def test_presentation_exchange_send_proposal_not_ready(self): "aries_cloudagent.connections.models.conn_record.ConnRecord", autospec=True, ) as mock_connection_record, async_mock.patch( - "aries_cloudagent.indy.sdk.models.pres_preview.IndyPresPreview", + "aries_cloudagent.indy.models.pres_preview.IndyPresPreview", autospec=True, ) as mock_preview, async_mock.patch( ( @@ -408,7 +408,7 @@ async def test_presentation_exchange_send_proposal_x(self): "aries_cloudagent.protocols.present_proof.v1_0.manager.PresentationManager", autospec=True, ) as mock_presentation_manager, async_mock.patch( - "aries_cloudagent.indy.sdk.models.pres_preview.IndyPresPreview", + "aries_cloudagent.indy.models.pres_preview.IndyPresPreview", autospec=True, ) as mock_preview: @@ -439,7 +439,7 @@ async def test_presentation_exchange_create_request(self): "aries_cloudagent.protocols.present_proof.v1_0.manager.PresentationManager", autospec=True, ) as mock_presentation_manager, async_mock.patch( - "aries_cloudagent.indy.sdk.models.pres_preview.IndyPresPreview", + "aries_cloudagent.indy.models.pres_preview.IndyPresPreview", autospec=True, ) as mock_preview, async_mock.patch.object( test_module, "PresentationRequest", autospec=True @@ -493,7 +493,7 @@ async def test_presentation_exchange_create_request_x(self): "aries_cloudagent.protocols.present_proof.v1_0.manager.PresentationManager", autospec=True, ) as mock_presentation_manager, async_mock.patch( - "aries_cloudagent.indy.sdk.models.pres_preview.IndyPresPreview", + "aries_cloudagent.indy.models.pres_preview.IndyPresPreview", autospec=True, ) as mock_preview, async_mock.patch.object( test_module, "PresentationRequest", autospec=True @@ -548,7 +548,7 @@ async def test_presentation_exchange_send_free_request(self): "aries_cloudagent.indy.util.generate_pr_nonce", autospec=True, ) as mock_generate_nonce, async_mock.patch( - "aries_cloudagent.indy.sdk.models.pres_preview.IndyPresPreview", + "aries_cloudagent.indy.models.pres_preview.IndyPresPreview", autospec=True, ) as mock_preview, async_mock.patch.object( test_module, "PresentationRequest", autospec=True @@ -1236,7 +1236,7 @@ async def test_presentation_exchange_verify_presentation(self): "aries_cloudagent.indy.util.generate_pr_nonce", autospec=True, ) as mock_generate_nonce, async_mock.patch( - "aries_cloudagent.indy.sdk.models.pres_preview.IndyPresPreview", + "aries_cloudagent.indy.models.pres_preview.IndyPresPreview", autospec=True, ) as mock_preview, async_mock.patch.object( test_module, "PresentationRequest", autospec=True diff --git a/aries_cloudagent/protocols/present_proof/v2_0/formats/__init__.py b/aries_cloudagent/protocols/present_proof/v2_0/formats/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/__init__.py b/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py b/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py new file mode 100644 index 0000000000..0d77064b3c --- /dev/null +++ b/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py @@ -0,0 +1,332 @@ +"""V2.0 present-proof dif presentation-exchange format handler.""" + +import logging + +from marshmallow import RAISE +from typing import Mapping, Tuple, Sequence +from uuid import uuid4 + +from ......messaging.decorators.attach_decorator import AttachDecorator +from ......storage.error import StorageNotFoundError +from ......storage.vc_holder.base import VCHolder +from ......storage.vc_holder.vc_record import VCRecord +from ......vc.ld_proofs import ( + DocumentLoader, + Ed25519Signature2018, + BbsBlsSignature2020, + BbsBlsSignatureProof2020, + WalletKeyPair, +) +from ......vc.ld_proofs.constants import EXPANDED_TYPE_CREDENTIALS_CONTEXT_V1_VC_TYPE +from ......vc.vc_ld.verify import verify_presentation +from ......wallet.base import BaseWallet +from ......wallet.key_type import KeyType + +from ....dif.pres_exch import PresentationDefinition +from ....dif.pres_exch_handler import DIFPresExchHandler +from ....dif.pres_proposal_schema import DIFProofProposalSchema +from ....dif.pres_request_schema import ( + DIFProofRequestSchema, + DIFPresSpecSchema, +) +from ....dif.pres_schema import DIFProofSchema + +from ...message_types import ( + ATTACHMENT_FORMAT, + PRES_20_REQUEST, + PRES_20, + PRES_20_PROPOSAL, +) +from ...messages.pres_format import V20PresFormat +from ...messages.pres import V20Pres +from ...models.pres_exchange import V20PresExRecord + +from ..handler import V20PresFormatHandler, V20PresFormatHandlerError + +LOGGER = logging.getLogger(__name__) + + +class DIFPresFormatHandler(V20PresFormatHandler): + """DIF presentation format handler.""" + + format = V20PresFormat.Format.DIF + + ISSUE_SIGNATURE_SUITE_KEY_TYPE_MAPPING = { + Ed25519Signature2018: KeyType.ED25519, + } + + if BbsBlsSignature2020.BBS_SUPPORTED: + ISSUE_SIGNATURE_SUITE_KEY_TYPE_MAPPING[BbsBlsSignature2020] = KeyType.BLS12381G2 + ISSUE_SIGNATURE_SUITE_KEY_TYPE_MAPPING[ + BbsBlsSignatureProof2020 + ] = KeyType.BLS12381G2 + + async def _get_all_suites(self, wallet: BaseWallet): + """Get all supported suites for verifying presentation.""" + suites = [] + for suite, key_type in self.ISSUE_SIGNATURE_SUITE_KEY_TYPE_MAPPING.items(): + suites.append( + suite( + key_pair=WalletKeyPair(wallet=wallet, key_type=key_type), + ) + ) + return suites + + @classmethod + def validate_fields(cls, message_type: str, attachment_data: Mapping): + """Validate attachment data for a specific message type. + + Uses marshmallow schemas to validate if format specific attachment data + is valid for the specified message type. Only does structural and type + checks, does not validate if .e.g. the issuer value is valid. + + + Args: + message_type (str): The message type to validate the attachment data for. + Should be one of the message types as defined in message_types.py + attachment_data (Mapping): [description] + The attachment data to valide + + Raises: + Exception: When the data is not valid. + + """ + mapping = { + PRES_20_REQUEST: DIFProofRequestSchema, + PRES_20_PROPOSAL: DIFProofProposalSchema, + PRES_20: DIFProofSchema, + } + + # Get schema class + Schema = mapping[message_type] + + # Validate, throw if not valid + Schema(unknown=RAISE).load(attachment_data) + + def get_format_identifier(self, message_type: str) -> str: + """Get attachment format identifier for format and message combination. + + Args: + message_type (str): Message type for which to return the format identifier + + Returns: + str: Issue credential attachment format identifier + + """ + return ATTACHMENT_FORMAT[message_type][DIFPresFormatHandler.format.api] + + def get_format_data( + self, message_type: str, data: dict + ) -> Tuple[V20PresFormat, AttachDecorator]: + """Get presentation format and attach objects for use in pres_ex messages.""" + + return ( + V20PresFormat( + attach_id=DIFPresFormatHandler.format.api, + format_=self.get_format_identifier(message_type), + ), + AttachDecorator.data_json(data, ident=DIFPresFormatHandler.format.api), + ) + + async def create_bound_request( + self, + pres_ex_record: V20PresExRecord, + request_data: dict = None, + ) -> Tuple[V20PresFormat, AttachDecorator]: + """ + Create a presentation request bound to a proposal. + + Args: + pres_ex_record: Presentation exchange record for which + to create presentation request + name: name to use in presentation request (None for default) + version: version to use in presentation request (None for default) + nonce: nonce to use in presentation request (None to generate) + comment: Optional human-readable comment pertaining to request creation + + Returns: + A tuple (updated presentation exchange record, presentation request message) + + """ + dif_proof_request = pres_ex_record.pres_proposal.attachment( + DIFPresFormatHandler.format + ) + + return self.get_format_data(PRES_20_REQUEST, dif_proof_request) + + async def create_pres( + self, + pres_ex_record: V20PresExRecord, + request_data: dict = {}, + ) -> Tuple[V20PresFormat, AttachDecorator]: + """Create a presentation.""" + proof_request = pres_ex_record.pres_request.attachment( + DIFPresFormatHandler.format + ) + pres_definition = None + limit_record_ids = None + challenge = None + domain = None + if request_data != {} and DIFPresFormatHandler.format.api in request_data: + dif_spec = request_data.get(DIFPresFormatHandler.format.api) + pres_spec_payload = DIFPresSpecSchema().load(dif_spec) + # Overriding with prover provided pres_spec + pres_definition = pres_spec_payload.get("presentation_definition") + issuer_id = pres_spec_payload.get("issuer_id") + limit_record_ids = pres_spec_payload.get("record_ids") + if not pres_definition: + if "options" in proof_request: + challenge = proof_request.get("options").get("challenge") + domain = proof_request.get("options").get("domain") + pres_definition = PresentationDefinition.deserialize( + proof_request.get("presentation_definition") + ) + issuer_id = None + if not challenge: + challenge = str(uuid4()) + + input_descriptors = pres_definition.input_descriptors + try: + holder = self._profile.inject(VCHolder) + record_ids = set() + credentials_list = [] + if not limit_record_ids: + for input_descriptor in input_descriptors: + expanded_types = set() + schema_ids = set() + for schema in input_descriptor.schemas: + uri = schema.uri + required = schema.required or True + if required: + # JSONLD Expanded URLs + if "#" in uri: + expanded_types.add(uri) + else: + schema_ids.add(uri) + if len(schema_ids) == 0: + schema_ids_list = None + else: + schema_ids_list = list(schema_ids) + if len(expanded_types) == 0: + expanded_types_list = None + else: + expanded_types_list = list(expanded_types) + # Raise Exception if expanded type extracted from + # CREDENTIALS_CONTEXT_V1_URL and + # VERIFIABLE_CREDENTIAL_TYPE is the only schema.uri + # specified in the presentation_definition. + if len(expanded_types_list) == 1: + if expanded_types_list[0] in [ + EXPANDED_TYPE_CREDENTIALS_CONTEXT_V1_VC_TYPE + ]: + raise V20PresFormatHandlerError( + "Only expanded type extracted from " + "CREDENTIALS_CONTEXT_V1_URL and " + "VERIFIABLE_CREDENTIAL_TYPE included " + "as the schema.uri" + ) + search = holder.search_credentials( + types=expanded_types_list, + schema_ids=schema_ids_list, + ) + # Defaults to page_size but would like to include all + # For now, setting to 1000 + max_results = 1000 + records = await search.fetch(max_results) + # Avoiding addition of duplicate records + ( + vcrecord_list, + vcrecord_ids_set, + ) = await self.process_vcrecords_return_list(records, record_ids) + record_ids = vcrecord_ids_set + credentials_list = credentials_list + vcrecord_list + else: + records = [] + for record_id in limit_record_ids: + records.append(await holder.retrieve_credential_by_id(record_id)) + # Avoiding addition of duplicate records + ( + vcrecord_list, + vcrecord_ids_set, + ) = await self.process_vcrecords_return_list(records, record_ids) + record_ids = vcrecord_ids_set + credentials_list = credentials_list + vcrecord_list + except StorageNotFoundError as err: + raise V20PresFormatHandlerError(err) + # Selecting suite from claim_format + claim_format = pres_definition.fmt + proof_type = None + if claim_format: + if claim_format.ldp_vp: + for proof_req in claim_format.ldp_vp.get("proof_type"): + if proof_req == Ed25519Signature2018.signature_type: + proof_type = Ed25519Signature2018.signature_type + break + elif proof_req == BbsBlsSignature2020.signature_type: + proof_type = BbsBlsSignature2020.signature_type + break + + dif_handler = DIFPresExchHandler( + self._profile, pres_signing_did=issuer_id, proof_type=proof_type + ) + + pres = await dif_handler.create_vp( + challenge=challenge, + domain=domain, + pd=pres_definition, + credentials=credentials_list, + ) + return self.get_format_data(PRES_20, pres) + + async def process_vcrecords_return_list( + self, vc_records: Sequence[VCRecord], record_ids: set + ) -> Tuple[Sequence[VCRecord], set]: + """Return list of non-duplicate VCRecords.""" + to_add = [] + for vc_record in vc_records: + if vc_record.record_id not in record_ids: + to_add.append(vc_record) + record_ids.add(vc_record.record_id) + return (to_add, record_ids) + + async def receive_pres( + self, message: V20Pres, pres_ex_record: V20PresExRecord + ) -> None: + """Receive a presentation, from message in context on manager creation.""" + + async def verify_pres(self, pres_ex_record: V20PresExRecord) -> V20PresExRecord: + """ + Verify a presentation. + + Args: + pres_ex_record: presentation exchange record + with presentation request and presentation to verify + + Returns: + presentation exchange record, updated + + """ + async with self._profile.session() as session: + wallet = session.inject(BaseWallet) + dif_proof = pres_ex_record.pres.attachment(DIFPresFormatHandler.format) + pres_request = pres_ex_record.pres_request.attachment( + DIFPresFormatHandler.format + ) + if "options" in pres_request: + challenge = pres_request.get("options").get("challenge") + else: + raise V20PresFormatHandlerError( + "No options [challenge] set for the presentation request" + ) + if not challenge: + raise V20PresFormatHandlerError( + "No challenge is set for the presentation request" + ) + pres_ver_result = await verify_presentation( + presentation=dif_proof, + suites=await self._get_all_suites(wallet=wallet), + document_loader=self._profile.inject(DocumentLoader), + challenge=challenge, + ) + pres_ex_record.verified = pres_ver_result.verified + return pres_ex_record diff --git a/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/tests/__init__.py b/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/tests/test_handler.py b/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/tests/test_handler.py new file mode 100644 index 0000000000..b3b7de9f6f --- /dev/null +++ b/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/tests/test_handler.py @@ -0,0 +1,902 @@ +from copy import deepcopy +from asynctest import TestCase as AsyncTestCase +from asynctest import mock as async_mock +from marshmallow import ValidationError + +from .......core.in_memory import InMemoryProfile +from .......messaging.decorators.attach_decorator import AttachDecorator +from .......storage.vc_holder.base import VCHolder +from .......storage.vc_holder.vc_record import VCRecord +from .......vc.ld_proofs import ( + DocumentLoader, + Ed25519Signature2018, + BbsBlsSignature2020, + BbsBlsSignatureProof2020, +) +from .......vc.tests.document_loader import custom_document_loader +from .......vc.vc_ld.validation_result import PresentationVerificationResult +from .......wallet.base import BaseWallet + +from .....dif.pres_exch_handler import DIFPresExchHandler + +from ....message_types import ( + ATTACHMENT_FORMAT, + PRES_20_REQUEST, + PRES_20, + PRES_20_PROPOSAL, +) +from ....messages.pres import V20Pres +from ....messages.pres_proposal import V20PresProposal +from ....messages.pres_request import V20PresRequest +from ....messages.pres_format import V20PresFormat +from ....models.pres_exchange import V20PresExRecord + +from ...handler import V20PresFormatHandlerError + +from .. import handler as test_module +from ..handler import DIFPresFormatHandler + +TEST_DID_SOV = "did:sov:LjgpST2rjsoxYegQDRm7EL" +TEST_DID_KEY = "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" + +DIF_PRES_REQUEST_A = { + "presentation_definition": { + "id": "32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements": [ + { + "name": "Citizenship Information", + "rule": "pick", + "count": 2, + "from": "A", + } + ], + "input_descriptors": [ + { + "id": "citizenship_input_1", + "name": "EU Driver's License", + "group": ["A"], + "schema": [{"uri": "https://example.org/examples/degree.json"}], + "constraints": { + "fields": [ + { + "path": ["$.issuer.id", "$.vc.issuer.id", "$.issuer"], + "purpose": "The claim must be from one of the specified issuers", + "filter": { + "type": "string", + "enum": [ + "did:example:489398593", + "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "did:sov:2wJPyULfLLnYTEFYzByfUR", + ], + }, + } + ] + }, + } + ], + } +} + +DIF_PRES_REQUEST_B = { + "options": { + "challenge": "3fa85f64-5717-4562-b3fc-2c963f66afa7", + "domain": "4jt78h47fh47", + }, + "presentation_definition": { + "id": "32f54163-7166-48f1-93d8-ff217bdb0654", + "submission_requirements": [ + { + "name": "Citizenship Information", + "rule": "pick", + "min": 1, + "from": "A", + } + ], + "input_descriptors": [ + { + "id": "citizenship_input_1", + "name": "EU Driver's License", + "group": ["A"], + "schema": [ + {"uri": "https://www.w3.org/2018/credentials#VerifiableCredential"}, + {"uri": "https://w3id.org/citizenship#PermanentResidentCard"}, + ], + "constraints": { + "limit_disclosure": "required", + "fields": [ + { + "path": ["$.credentialSubject.givenName"], + "purpose": "The claim must be from one of the specified issuers", + "filter": { + "type": "string", + "enum": ["JOHN", "CAI"], + }, + } + ], + }, + } + ], + }, +} + +DIF_PRES_PROPOSAL = { + "input_descriptors": [ + { + "id": "citizenship_input_1", + "name": "EU Driver's License", + "group": ["A"], + "schema": [ + {"uri": "https://www.w3.org/2018/credentials#VerifiableCredential"}, + {"uri": "https://w3id.org/citizenship#PermanentResidentCard"}, + ], + "constraints": { + "fields": [ + { + "path": ["$.issuer.id", "$.vc.issuer.id", "$.issuer"], + "purpose": "The claim must be from one of the specified issuers", + "filter": { + "type": "string", + "enum": [ + "did:example:489398593", + "did:key:zUC72Q7XD4PE4CrMiDVXuvZng3sBvMmaGgNeTUJuzavH2BS7ThbHL9FhsZM9QYY5fqAQ4MB8M9oudz3tfuaX36Ajr97QRW7LBt6WWmrtESe6Bs5NYzFtLWEmeVtvRYVAgjFcJSa", + "did:sov:2wJPyULfLLnYTEFYzByfUR", + ], + }, + } + ] + }, + } + ] +} + +DIF_PRES = { + "@context": ["https://www.w3.org/2018/credentials/v1"], + "type": ["VerifiablePresentation"], + "verifiableCredential": [ + { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627465", + "type": ["PermanentResidentCard", "VerifiableCredential"], + "credentialSubject": { + "id": "did:example:b34ca6cd37bbf23", + "type": ["Person", "PermanentResident"], + "givenName": "JOHN", + }, + "issuanceDate": "2010-01-01T19:53:24Z", + "issuer": "did:key:zUC74bgefTdc43KS1psXgXf4jLaHyaj2qCQqQTXrtmSYGf1PxiJhrH6LGpaBMyj6tqAKmjGyMaS4RfNo2an77vT1HfzJUNPk4H7TCuJvSp4vet4Cu67kn2JSegoQNFSA1tbwU8v", + "proof": { + "type": "BbsBlsSignatureProof2020", + "nonce": "3AuruhJQrXtEgiagiJ+FwVf2S0SnzUDJvnO61YecQsJ7ImR1mPcoVjJJ0HOhfkFpoYI=", + "proofValue": "ABkBuAaPlP5A7JWY78Xf69oBnsMLcD1RXbIFYhcLoXPXW12CG9glnnqnPLsGri5xsA3LcP0kg74X+sAjKXGRGy3uvp412Dm0FuohYNboQcLne5KOAa5AxU4bjmwQsxdfduVqhriro1N+YTkuB4SMmO/5ooL0N3OHsYdExg7nSzWqmZoqgp+3CwIxF0a/oyKTcxJORuIqAAAAdInlL9teSIX49NJGEZfBO7IrdjT2iggH/G0AlPWoEvrWIbuCRQ69K83n5o7oJVjqhAAAAAIaVmlAD6+FEKA4eg0OaWOKPrd5Kq8rv0vIwjJ71egxll0Fqq4zDWQ/+yl3Pteh0Wyuyvpm19/sj6tiCWj4PkA+rpxtR2bXpnrCTKUffFFNBjVvVziXDS0KWkGUB7XU9mjUa4USC7Iub3bZZCnFjQA5AAAADzkGwGD837r33e7OTrGEti8eAkvFDcyCgA4ck/X+5HJjAJclHWbl4SNQR8CiNZyzJpvxW+jbNBcwmEvocYArddk3F78Ki0Qnp6aU9eDgfOOx1iW2BXLUjrhq5I2hP5/WQF3CEDYRjczGjzM9T8/coeC36YAp0zJunIXUKb8SPDSOISafibYRYFB4xhlWKXWloDelafyujOBST8KZNM8FmF4DSbXrO8vmZbjuR/8ntUcUK7X2rNbuZ3M5eWZDF8pL+SA9gQitKfPHEocoYAdhgEAM7ZNAJ+TgOcx9gtZIhDWKDNnFxIeoOAylbD1xZd9xbWtq3Bk3R79xqsKxFRJRNxk/9b6fJruP292+qM5lxcZ1jUz/dJUYFI93hH4Mso75CjGRN78MAY9SNifl6H8qcxTpBn4332LlFhRznLbtnc4YSWA/fvVqaN9h2zCH/6AdbLKXGffV34EF7DadwJsi9jsc+YlSMn6qaIUIDTdGLwh4KKpSH5bVbg/mVCcXPTJplFgYwRsOdiQbZY/740dJyo1lPjQ0Lvdio8W2M8c73ujeJU70CNLkgjJAMUPGrCFtGxBH2eeLBQ0P95qRZAIcJ7U0MibZLaRjoUOuTla5BIt2038PJ6XhcY6BEJaLyJOPEQ==", + "verificationMethod": "did:key:zUC74bgefTdc43KS1psXgXf4jLaHyaj2qCQqQTXrtmSYGf1PxiJhrH6LGpaBMyj6tqAKmjGyMaS4RfNo2an77vT1HfzJUNPk4H7TCuJvSp4vet4Cu67kn2JSegoQNFSA1tbwU8v#zUC74bgefTdc43KS1psXgXf4jLaHyaj2qCQqQTXrtmSYGf1PxiJhrH6LGpaBMyj6tqAKmjGyMaS4RfNo2an77vT1HfzJUNPk4H7TCuJvSp4vet4Cu67kn2JSegoQNFSA1tbwU8v", + "proofPurpose": "assertionMethod", + "created": "2021-05-05T15:22:30.523465", + }, + } + ], + "presentation_submission": { + "id": "a5fcfe44-2c30-497d-af02-98e539da9a0f", + "definition_id": "32f54163-7166-48f1-93d8-ff217bdb0653", + "descriptor_map": [ + { + "id": "citizenship_input_1", + "format": "ldp_vp", + "path": "$.verifiableCredential[0]", + } + ], + }, + "proof": { + "type": "Ed25519Signature2018", + "verificationMethod": "did:sov:4QxzWk3ajdnEA37NdNU5Kt#key-1", + "created": "2021-05-05T15:23:03.023971", + "proofPurpose": "authentication", + "challenge": "40429d49-5e8f-4ffc-baf8-e332412f1247", + "jws": "eyJhbGciOiAiRWREU0EiLCAiYjY0IjogZmFsc2UsICJjcml0IjogWyJiNjQiXX0..2uBYmg7muE9ZPVeAGo_ibVfLkCjf2hGshr2o5i8pAwFyNBM-kDHXofuq1MzJgb19wzb01VIu91hY_ajjt9KFAA", + }, +} + +TEST_CRED = { + "@context": [ + "https://www.w3.org/2018/credentials/v1", + "https://w3id.org/citizenship/v1", + "https://w3id.org/security/bbs/v1", + ], + "id": "https://issuer.oidp.uscis.gov/credentials/83627465", + "type": ["VerifiableCredential", "PermanentResidentCard"], + "issuer": "did:key:zUC76eX863NT1BEFYEfSMRY4CSVwRmRdKBtv3cwTwytDXNEAvJxFr3GBjkpKRs3xg9FznUhxXkwDLLu7UjDnetSQNvGiT1ivHdDFByZdXoWLYpDDRph5eZDXTGNweuq3Z8uAo3o", + "identifier": "83627465", + "name": "Permanent Resident Card", + "description": "Government of Example Permanent Resident Card.", + "issuanceDate": "2010-01-01T19:53:24Z", + "expirationDate": "2029-12-03T12:19:52Z", + "credentialSubject": { + "id": "did:sov:4QxzWk3ajdnEA37NdNU5Kt", + "type": ["PermanentResident", "Person"], + "givenName": "JOHN", + "familyName": "SMITH", + "gender": "Male", + "image": "data:image/png;base64,iVBORw0KGgokJggg==", + "residentSince": "2015-01-01", + "lprCategory": "C09", + "lprNumber": "999-999-999", + "commuterClassification": "C1", + "birthCountry": "Bahamas", + "birthDate": "1958-07-17", + }, +} + + +class TestDIFFormatHandler(AsyncTestCase): + async def setUp(self): + self.holder = async_mock.MagicMock() + self.wallet = async_mock.MagicMock(BaseWallet, autospec=True) + + self.session = InMemoryProfile.test_session( + bind={VCHolder: self.holder, BaseWallet: self.wallet} + ) + self.profile = self.session.profile + self.context = self.profile.context + setattr( + self.profile, "session", async_mock.MagicMock(return_value=self.session) + ) + + # Set custom document loader + self.context.injector.bind_instance(DocumentLoader, custom_document_loader) + + self.handler = DIFPresFormatHandler(self.profile) + assert self.handler.profile + + def test_validate_fields(self): + self.handler.validate_fields(PRES_20, DIF_PRES) + self.handler.validate_fields(PRES_20_PROPOSAL, DIF_PRES_PROPOSAL) + self.handler.validate_fields(PRES_20_REQUEST, DIF_PRES_REQUEST_A) + self.handler.validate_fields(PRES_20_REQUEST, DIF_PRES_REQUEST_B) + + with self.assertRaises(ValidationError): + incorrect_pres = DIF_PRES.copy() + incorrect_pres.pop("@context") + self.handler.validate_fields(PRES_20, incorrect_pres) + + async def test_get_all_suites(self): + suites = await self.handler._get_all_suites(self.wallet) + assert len(suites) == 3 + types = [Ed25519Signature2018, BbsBlsSignature2020, BbsBlsSignatureProof2020] + for suite in suites: + assert type(suite) in types + + async def test_create_bound_request(self): + dif_proposal_dict = { + "input_descriptors": [ + { + "id": "citizenship_input_1", + "name": "EU Driver's License", + "group": ["A"], + "schema": [ + { + "uri": "https://www.w3.org/2018/credentials#VerifiableCredential" + } + ], + "constraints": { + "limit_disclosure": "required", + "fields": [ + { + "path": ["$.credentialSubject.givenName"], + "purpose": "The claim must be from one of the specified issuers", + "filter": {"type": "string", "enum": ["JOHN", "CAI"]}, + } + ], + }, + } + ] + } + dif_pres_proposal = V20PresProposal( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_PROPOSAL][ + V20PresFormat.Format.DIF.api + ], + ) + ], + proposals_attach=[ + AttachDecorator.data_json(dif_proposal_dict, ident="dif") + ], + ) + record = V20PresExRecord( + pres_ex_id="pxid", + thread_id="thid", + connection_id="conn_id", + initiator="init", + role="role", + state="state", + pres_proposal=dif_pres_proposal, + verified="false", + auto_present=True, + error_msg="error", + ) + output = await self.handler.create_bound_request(pres_ex_record=record) + assert isinstance(output[0], V20PresFormat) and isinstance( + output[1], AttachDecorator + ) + + async def test_create_pres(self): + dif_pres_request = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(DIF_PRES_REQUEST_B, ident="dif") + ], + ) + record = V20PresExRecord( + pres_ex_id="pxid", + thread_id="thid", + connection_id="conn_id", + initiator="init", + role="role", + state="state", + pres_request=dif_pres_request, + verified="false", + auto_present=True, + error_msg="error", + ) + + with async_mock.patch.object( + DIFPresExchHandler, + "create_vp", + async_mock.CoroutineMock(), + ) as mock_create_vp: + mock_create_vp.return_value = DIF_PRES + output = await self.handler.create_pres(record, {}) + assert isinstance(output[0], V20PresFormat) and isinstance( + output[1], AttachDecorator + ) + assert output[1].data.json_ == DIF_PRES + + async def test_create_pres_pd_schema_uri(self): + dif_pres_req = deepcopy(DIF_PRES_REQUEST_B) + dif_pres_req["presentation_definition"]["input_descriptors"][0]["schema"][0][ + "uri" + ] = "test.json" + dif_pres_request = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(dif_pres_req, ident="dif") + ], + ) + record = V20PresExRecord( + pres_ex_id="pxid", + thread_id="thid", + connection_id="conn_id", + initiator="init", + role="role", + state="state", + pres_request=dif_pres_request, + verified="false", + auto_present=True, + error_msg="error", + ) + request_data = {} + with async_mock.patch.object( + DIFPresExchHandler, + "create_vp", + async_mock.CoroutineMock(), + ) as mock_create_vp: + mock_create_vp.return_value = DIF_PRES + output = await self.handler.create_pres(record, request_data) + assert isinstance(output[0], V20PresFormat) and isinstance( + output[1], AttachDecorator + ) + assert output[1].data.json_ == DIF_PRES + + async def test_create_pres_prover_proof_spec(self): + dif_pres_spec = deepcopy(DIF_PRES_REQUEST_A) + dif_pres_spec["issuer_id"] = "test123" + dif_pres_request = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(DIF_PRES_REQUEST_B, ident="dif") + ], + ) + record = V20PresExRecord( + pres_ex_id="pxid", + thread_id="thid", + connection_id="conn_id", + initiator="init", + role="role", + state="state", + pres_request=dif_pres_request, + verified="false", + auto_present=True, + error_msg="error", + ) + request_data = {} + request_data["dif"] = dif_pres_spec + with async_mock.patch.object( + DIFPresExchHandler, + "create_vp", + async_mock.CoroutineMock(), + ) as mock_create_vp: + mock_create_vp.return_value = DIF_PRES + output = await self.handler.create_pres(record, request_data) + assert isinstance(output[0], V20PresFormat) and isinstance( + output[1], AttachDecorator + ) + assert output[1].data.json_ == DIF_PRES + + async def test_create_pres_prover_proof_spec_with_record_ids(self): + dif_pres_spec = deepcopy(DIF_PRES_REQUEST_A) + dif_pres_spec["issuer_id"] = "test123" + dif_pres_spec["record_ids"] = ["test1"] + cred = VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=[ + "did:sov:LjgpST2rjsoxYegQDRm7EL", + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + ], + proof_types=["BbsBlsSignature2020"], + schema_ids=["https://example.org/examples/degree.json"], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + record_id="test1", + ) + dif_pres_request = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(DIF_PRES_REQUEST_B, ident="dif") + ], + ) + record = V20PresExRecord( + pres_ex_id="pxid", + thread_id="thid", + connection_id="conn_id", + initiator="init", + role="role", + state="state", + pres_request=dif_pres_request, + verified="false", + auto_present=True, + error_msg="error", + ) + request_data = {} + request_data["dif"] = dif_pres_spec + + self.context.injector.bind_instance( + VCHolder, + async_mock.MagicMock( + retrieve_credential_by_id=async_mock.CoroutineMock(return_value=cred) + ), + ) + + with async_mock.patch.object( + DIFPresExchHandler, + "create_vp", + async_mock.CoroutineMock(), + ) as mock_create_vp: + mock_create_vp.return_value = DIF_PRES + output = await self.handler.create_pres(record, request_data) + + async def test_create_pres_no_challenge(self): + dif_pres_req = deepcopy(DIF_PRES_REQUEST_B) + del dif_pres_req["options"]["challenge"] + dif_pres_request = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(dif_pres_req, ident="dif") + ], + ) + record = V20PresExRecord( + pres_ex_id="pxid", + thread_id="thid", + connection_id="conn_id", + initiator="init", + role="role", + state="state", + pres_request=dif_pres_request, + verified="false", + auto_present=True, + error_msg="error", + ) + request_data = {} + with async_mock.patch.object( + DIFPresExchHandler, + "create_vp", + async_mock.CoroutineMock(), + ) as mock_create_vp: + mock_create_vp.return_value = DIF_PRES + output = await self.handler.create_pres(record, request_data) + assert isinstance(output[0], V20PresFormat) and isinstance( + output[1], AttachDecorator + ) + assert output[1].data.json_ == DIF_PRES + + async def test_create_pres_storage_not_found(self): + dif_pres_request = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(DIF_PRES_REQUEST_B, ident="dif") + ], + ) + record = V20PresExRecord( + pres_ex_id="pxid", + thread_id="thid", + connection_id="conn_id", + initiator="init", + role="role", + state="state", + pres_request=dif_pres_request, + verified="false", + auto_present=True, + error_msg="error", + ) + + self.context.injector.bind_instance( + VCHolder, + async_mock.MagicMock( + search_credentials=async_mock.MagicMock( + return_value=async_mock.MagicMock( + fetch=async_mock.CoroutineMock( + side_effect=test_module.StorageNotFoundError() + ) + ) + ) + ), + ) + with self.assertRaises(V20PresFormatHandlerError): + await self.handler.create_pres(record) + + async def test_create_pres_pd_claim_format_ed255(self): + test_pd = deepcopy(DIF_PRES_REQUEST_B) + test_pd["presentation_definition"]["format"] = { + "ldp_vp": {"proof_type": ["Ed25519Signature2018"]} + } + dif_pres_request = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(test_pd, ident="dif") + ], + ) + record = V20PresExRecord( + pres_ex_id="pxid", + thread_id="thid", + connection_id="conn_id", + initiator="init", + role="role", + state="state", + pres_request=dif_pres_request, + verified="false", + auto_present=True, + error_msg="error", + ) + + with async_mock.patch.object( + DIFPresExchHandler, + "create_vp", + async_mock.CoroutineMock(), + ) as mock_create_vp: + mock_create_vp.return_value = DIF_PRES + output = await self.handler.create_pres(record, {}) + assert isinstance(output[0], V20PresFormat) and isinstance( + output[1], AttachDecorator + ) + assert output[1].data.json_ == DIF_PRES + + async def test_create_pres_pd_claim_format_bls12381g2(self): + test_pd = deepcopy(DIF_PRES_REQUEST_B) + test_pd["presentation_definition"]["format"] = { + "ldp_vp": {"proof_type": ["BbsBlsSignature2020"]} + } + dif_pres_request = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(test_pd, ident="dif") + ], + ) + record = V20PresExRecord( + pres_ex_id="pxid", + thread_id="thid", + connection_id="conn_id", + initiator="init", + role="role", + state="state", + pres_request=dif_pres_request, + verified="false", + auto_present=True, + error_msg="error", + ) + + with async_mock.patch.object( + DIFPresExchHandler, + "create_vp", + async_mock.CoroutineMock(), + ) as mock_create_vp: + mock_create_vp.return_value = DIF_PRES + output = await self.handler.create_pres(record, {}) + assert isinstance(output[0], V20PresFormat) and isinstance( + output[1], AttachDecorator + ) + assert output[1].data.json_ == DIF_PRES + + async def test_verify_pres(self): + dif_pres = V20Pres( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.DIF.api], + ) + ], + presentations_attach=[AttachDecorator.data_json(DIF_PRES, ident="dif")], + ) + dif_pres_request = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(DIF_PRES_REQUEST_B, ident="dif") + ], + ) + record = V20PresExRecord( + pres_ex_id="pxid", + thread_id="thid", + connection_id="conn_id", + initiator="init", + role="role", + state="state", + pres_request=dif_pres_request, + pres=dif_pres, + verified="false", + auto_present=True, + error_msg="error", + ) + + with async_mock.patch.object( + test_module, + "verify_presentation", + async_mock.CoroutineMock( + return_value=PresentationVerificationResult(verified=True) + ), + ) as mock_vr: + output = await self.handler.verify_pres(record) + assert output.verified + + async def test_verify_pres_no_challenge(self): + test_pd = deepcopy(DIF_PRES_REQUEST_B) + del test_pd["options"]["challenge"] + dif_pres_request = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(test_pd, ident="dif") + ], + ) + dif_pres = V20Pres( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.DIF.api], + ) + ], + presentations_attach=[AttachDecorator.data_json(DIF_PRES, ident="dif")], + ) + record = V20PresExRecord( + pres_ex_id="pxid", + thread_id="thid", + connection_id="conn_id", + initiator="init", + role="role", + state="state", + pres_request=dif_pres_request, + pres=dif_pres, + verified="false", + auto_present=True, + error_msg="error", + ) + + with self.assertRaises(V20PresFormatHandlerError): + await self.handler.verify_pres(record) + + async def test_verify_pres_invalid_challenge(self): + test_pd = deepcopy(DIF_PRES_REQUEST_B) + del test_pd["options"] + dif_pres_request = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(test_pd, ident="dif") + ], + ) + dif_pres = V20Pres( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.DIF.api], + ) + ], + presentations_attach=[AttachDecorator.data_json(DIF_PRES, ident="dif")], + ) + record = V20PresExRecord( + pres_ex_id="pxid", + thread_id="thid", + connection_id="conn_id", + initiator="init", + role="role", + state="state", + pres_request=dif_pres_request, + pres=dif_pres, + verified="false", + auto_present=True, + error_msg="error", + ) + + with self.assertRaises(V20PresFormatHandlerError): + await self.handler.verify_pres(record) + + async def test_create_pres_cred_v1_context_schema_uri(self): + test_pd = deepcopy(DIF_PRES_REQUEST_B) + test_pd["presentation_definition"]["input_descriptors"][0]["schema"].pop(1) + dif_pres_request = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="dif", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.DIF.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_json(test_pd, ident="dif") + ], + ) + record = V20PresExRecord( + pres_ex_id="pxid", + thread_id="thid", + connection_id="conn_id", + initiator="init", + role="role", + state="state", + pres_request=dif_pres_request, + verified="false", + auto_present=True, + error_msg="error", + ) + + with self.assertRaises(V20PresFormatHandlerError): + await self.handler.create_pres(record) + + async def test_process_vcrecords_return_list(self): + cred_list = [ + VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=[ + "did:sov:LjgpST2rjsoxYegQDRm7EL", + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + ], + proof_types=["BbsBlsSignature2020"], + schema_ids=["https://example.org/examples/degree.json"], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + record_id="test1", + ), + VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=[ + "did:sov:LjgpST2rjsoxYegQDRm7EL", + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + ], + proof_types=["BbsBlsSignature2020"], + schema_ids=["https://example.org/examples/degree.json"], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + record_id="test2", + ), + ] + record_ids = {"test1"} + ( + returned_cred_list, + returned_record_ids, + ) = await self.handler.process_vcrecords_return_list(cred_list, record_ids) + assert len(returned_cred_list) == 1 + assert len(returned_record_ids) == 2 + assert returned_cred_list[0].record_id == "test2" diff --git a/aries_cloudagent/protocols/present_proof/v2_0/formats/handler.py b/aries_cloudagent/protocols/present_proof/v2_0/formats/handler.py new file mode 100644 index 0000000000..e70dd34c9c --- /dev/null +++ b/aries_cloudagent/protocols/present_proof/v2_0/formats/handler.py @@ -0,0 +1,89 @@ +"""present-proof-v2 format handler - supports DIF and INDY.""" +from abc import ABC, abstractclassmethod, abstractmethod +import logging + +from typing import Tuple + +from .....core.error import BaseError +from .....core.profile import Profile +from .....messaging.decorators.attach_decorator import AttachDecorator + +from ..messages.pres import V20Pres +from ..messages.pres_format import V20PresFormat +from ..models.pres_exchange import V20PresExRecord + +LOGGER = logging.getLogger(__name__) + +PresFormatAttachment = Tuple[V20PresFormat, AttachDecorator] + + +class V20PresFormatHandlerError(BaseError): + """Presentation exchange format error under present-proof protocol v2.0.""" + + +class V20PresFormatHandler(ABC): + """Base Presentation Exchange Handler.""" + + format: V20PresFormat.Format = None + + def __init__(self, profile: Profile): + """Initialize PresExchange Handler.""" + super().__init__() + self._profile = profile + + @property + def profile(self) -> Profile: + """ + Accessor for the current profile instance. + + Returns: + The profile instance for this presentation exchange format + + """ + return self._profile + + @abstractmethod + def get_format_identifier(self, message_type: str) -> str: + """Get attachment format identifier for format and message combination. + + Args: + message_type (str): Message type for which to return the format identifier + + Returns: + str: Issue credential attachment format identifier + + """ + + @abstractmethod + def get_format_data(self, message_type: str, data: dict) -> PresFormatAttachment: + """Get presentation format and attach objects for use in pres_ex messages.""" + + @abstractclassmethod + def validate_fields(cls, message_type: str, attachment_data: dict) -> None: + """Validate attachment data for specific message type and format.""" + + @abstractmethod + async def create_bound_request( + self, + pres_ex_record: V20PresExRecord, + request_data: dict = None, + ) -> PresFormatAttachment: + """Create a presentation request bound to a proposal.""" + + @abstractmethod + async def create_pres( + self, + pres_ex_record: V20PresExRecord, + request_data: dict = None, + ) -> PresFormatAttachment: + """Create a presentation.""" + + @abstractmethod + async def receive_pres( + self, message: V20Pres, pres_ex_record: V20PresExRecord + ) -> None: + """Receive a presentation, from message in context on manager creation.""" + + @abstractmethod + async def verify_pres(self, pres_ex_record: V20PresExRecord) -> V20PresExRecord: + """Verify a presentation.""" diff --git a/aries_cloudagent/protocols/present_proof/v2_0/formats/indy/__init__.py b/aries_cloudagent/protocols/present_proof/v2_0/formats/indy/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/aries_cloudagent/protocols/present_proof/v2_0/formats/indy/handler.py b/aries_cloudagent/protocols/present_proof/v2_0/formats/indy/handler.py new file mode 100644 index 0000000000..229ed472d4 --- /dev/null +++ b/aries_cloudagent/protocols/present_proof/v2_0/formats/indy/handler.py @@ -0,0 +1,330 @@ +"""V2.0 present-proof indy presentation-exchange format handler.""" + +import json +import logging + +from marshmallow import RAISE +from typing import Mapping, Tuple + +from ......indy.holder import IndyHolder +from ......indy.models.predicate import Predicate +from ......indy.models.proof import IndyProofSchema +from ......indy.models.proof_request import IndyProofRequestSchema +from ......indy.models.xform import indy_proof_req_preview2indy_requested_creds +from ......indy.util import generate_pr_nonce +from ......indy.verifier import IndyVerifier +from ......messaging.decorators.attach_decorator import AttachDecorator +from ......messaging.util import canon + +from ....indy.pres_exch_handler import IndyPresExchHandler + +from ...message_types import ( + ATTACHMENT_FORMAT, + PRES_20_REQUEST, + PRES_20, + PRES_20_PROPOSAL, +) +from ...messages.pres import V20Pres +from ...messages.pres_format import V20PresFormat +from ...models.pres_exchange import V20PresExRecord + +from ..handler import V20PresFormatHandler, V20PresFormatHandlerError + +LOGGER = logging.getLogger(__name__) + + +class IndyPresExchangeHandler(V20PresFormatHandler): + """Indy presentation format handler.""" + + format = V20PresFormat.Format.INDY + + @classmethod + def validate_fields(cls, message_type: str, attachment_data: Mapping): + """Validate attachment data for a specific message type. + + Uses marshmallow schemas to validate if format specific attachment data + is valid for the specified message type. Only does structural and type + checks, does not validate if .e.g. the issuer value is valid. + + + Args: + message_type (str): The message type to validate the attachment data for. + Should be one of the message types as defined in message_types.py + attachment_data (Mapping): [description] + The attachment data to valide + + Raises: + Exception: When the data is not valid. + + """ + mapping = { + PRES_20_REQUEST: IndyProofRequestSchema, + PRES_20_PROPOSAL: IndyProofRequestSchema, + PRES_20: IndyProofSchema, + } + + # Get schema class + Schema = mapping[message_type] + + # Validate, throw if not valid + Schema(unknown=RAISE).load(attachment_data) + + def get_format_identifier(self, message_type: str) -> str: + """Get attachment format identifier for format and message combination. + + Args: + message_type (str): Message type for which to return the format identifier + + Returns: + str: Issue credential attachment format identifier + + """ + return ATTACHMENT_FORMAT[message_type][IndyPresExchangeHandler.format.api] + + def get_format_data( + self, message_type: str, data: dict + ) -> Tuple[V20PresFormat, AttachDecorator]: + """Get presentation format and attach objects for use in pres_ex messages.""" + + return ( + V20PresFormat( + attach_id=IndyPresExchangeHandler.format.api, + format_=self.get_format_identifier(message_type), + ), + AttachDecorator.data_base64(data, ident=IndyPresExchangeHandler.format.api), + ) + + async def create_bound_request( + self, + pres_ex_record: V20PresExRecord, + request_data: dict = None, + ) -> Tuple[V20PresFormat, AttachDecorator]: + """ + Create a presentation request bound to a proposal. + + Args: + pres_ex_record: Presentation exchange record for which + to create presentation request + request_data: Dict + + Returns: + A tuple (updated presentation exchange record, presentation request message) + + """ + indy_proof_request = pres_ex_record.pres_proposal.attachment( + IndyPresExchangeHandler.format + ) + indy_proof_request["name"] = request_data.get("name") or "proof-request" + indy_proof_request["version"] = request_data.get("version") or "1.0" + indy_proof_request["nonce"] = ( + request_data.get("nonce") or await generate_pr_nonce() + ) + return self.get_format_data(PRES_20_REQUEST, indy_proof_request) + + async def create_pres( + self, + pres_ex_record: V20PresExRecord, + request_data: dict = {}, + ) -> Tuple[V20PresFormat, AttachDecorator]: + """Create a presentation.""" + requested_credentials = {} + if request_data == {}: + try: + proof_request = pres_ex_record.pres_request + indy_proof_request = proof_request.attachment( + IndyPresExchangeHandler.format + ) + requested_credentials = ( + await indy_proof_req_preview2indy_requested_creds( + indy_proof_request, + preview=None, + holder=self._profile.inject(IndyHolder), + ) + ) + except ValueError as err: + LOGGER.warning(f"{err}") + raise V20PresFormatHandlerError( + f"No matching Indy credentials found: {err}" + ) + else: + if IndyPresExchangeHandler.format.api in request_data: + indy_spec = request_data.get(IndyPresExchangeHandler.format.api) + requested_credentials = { + "self_attested_attributes": indy_spec["self_attested_attributes"], + "requested_attributes": indy_spec["requested_attributes"], + "requested_predicates": indy_spec["requested_predicates"], + } + indy_handler = IndyPresExchHandler(self._profile) + indy_proof = await indy_handler.return_presentation( + pres_ex_record=pres_ex_record, + requested_credentials=requested_credentials, + ) + return self.get_format_data(PRES_20, indy_proof) + + async def receive_pres( + self, message: V20Pres, pres_ex_record: V20PresExRecord + ) -> None: + """Receive a presentation and check for presented values vs. proposal request.""" + + def _check_proof_vs_proposal(): + """Check for bait and switch in presented values vs. proposal request.""" + proof_req = pres_ex_record.pres_request.attachment( + IndyPresExchangeHandler.format + ) + + # revealed attrs + for reft, attr_spec in proof["requested_proof"]["revealed_attrs"].items(): + proof_req_attr_spec = proof_req["requested_attributes"].get(reft) + if not proof_req_attr_spec: + raise V20PresFormatHandlerError( + f"Presentation referent {reft} not in proposal request" + ) + req_restrictions = proof_req_attr_spec.get("restrictions", {}) + + name = proof_req_attr_spec["name"] + proof_value = attr_spec["raw"] + sub_proof_index = attr_spec["sub_proof_index"] + schema_id = proof["identifiers"][sub_proof_index]["schema_id"] + cred_def_id = proof["identifiers"][sub_proof_index]["cred_def_id"] + criteria = { + "schema_id": schema_id, + "schema_issuer_did": schema_id.split(":")[-4], + "schema_name": schema_id.split(":")[-2], + "schema_version": schema_id.split(":")[-1], + "cred_def_id": cred_def_id, + "issuer_did": cred_def_id.split(":")[-5], + f"attr::{name}::value": proof_value, + } + + if not any(r.items() <= criteria.items() for r in req_restrictions): + raise V20PresFormatHandlerError( + f"Presented attribute {reft} does not satisfy proof request " + f"restrictions {req_restrictions}" + ) + + # revealed attr groups + for reft, attr_spec in ( + proof["requested_proof"].get("revealed_attr_groups", {}).items() + ): + proof_req_attr_spec = proof_req["requested_attributes"].get(reft) + if not proof_req_attr_spec: + raise V20PresFormatHandlerError( + f"Presentation referent {reft} not in proposal request" + ) + req_restrictions = proof_req_attr_spec.get("restrictions", {}) + proof_values = { + name: values["raw"] for name, values in attr_spec["values"].items() + } + sub_proof_index = attr_spec["sub_proof_index"] + schema_id = proof["identifiers"][sub_proof_index]["schema_id"] + cred_def_id = proof["identifiers"][sub_proof_index]["cred_def_id"] + criteria = { + "schema_id": schema_id, + "schema_issuer_did": schema_id.split(":")[-4], + "schema_name": schema_id.split(":")[-2], + "schema_version": schema_id.split(":")[-1], + "cred_def_id": cred_def_id, + "issuer_did": cred_def_id.split(":")[-5], + **{ + f"attr::{name}::value": value + for name, value in proof_values.items() + }, + } + + if not any(r.items() <= criteria.items() for r in req_restrictions): + raise V20PresFormatHandlerError( + f"Presented attr group {reft} does not satisfy proof request " + f"restrictions {req_restrictions}" + ) + + # predicate bounds + for reft, pred_spec in proof["requested_proof"]["predicates"].items(): + proof_req_pred_spec = proof_req["requested_predicates"].get(reft) + if not proof_req_pred_spec: + raise V20PresFormatHandlerError( + f"Presentation referent {reft} not in proposal request" + ) + req_name = proof_req_pred_spec["name"] + req_pred = Predicate.get(proof_req_pred_spec["p_type"]) + req_value = proof_req_pred_spec["p_value"] + req_restrictions = proof_req_pred_spec.get("restrictions", {}) + for req_restriction in req_restrictions: + for k in [k for k in req_restriction]: # cannot modify en passant + if k.startswith("attr::"): + req_restriction.pop(k) # let indy-sdk reject mismatch here + sub_proof_index = pred_spec["sub_proof_index"] + for ge_proof in proof["proof"]["proofs"][sub_proof_index][ + "primary_proof" + ]["ge_proofs"]: + proof_pred_spec = ge_proof["predicate"] + if proof_pred_spec["attr_name"] != canon(req_name): + continue + if not ( + Predicate.get(proof_pred_spec["p_type"]) is req_pred + and proof_pred_spec["value"] == req_value + ): + raise V20PresFormatHandlerError( + f"Presentation predicate on {req_name} " + "mismatches proposal request" + ) + break + else: + raise V20PresFormatHandlerError( + f"Proposed request predicate on {req_name} not in presentation" + ) + + schema_id = proof["identifiers"][sub_proof_index]["schema_id"] + cred_def_id = proof["identifiers"][sub_proof_index]["cred_def_id"] + criteria = { + "schema_id": schema_id, + "schema_issuer_did": schema_id.split(":")[-4], + "schema_name": schema_id.split(":")[-2], + "schema_version": schema_id.split(":")[-1], + "cred_def_id": cred_def_id, + "issuer_did": cred_def_id.split(":")[-5], + } + + if not any(r.items() <= criteria.items() for r in req_restrictions): + raise V20PresFormatHandlerError( + f"Presented predicate {reft} does not satisfy proof request " + f"restrictions {req_restrictions}" + ) + + proof = message.attachment(IndyPresExchangeHandler.format) + _check_proof_vs_proposal() + + async def verify_pres(self, pres_ex_record: V20PresExRecord) -> V20PresExRecord: + """ + Verify a presentation. + + Args: + pres_ex_record: presentation exchange record + with presentation request and presentation to verify + + Returns: + presentation exchange record, updated + + """ + pres_request_msg = pres_ex_record.pres_request + indy_proof_request = pres_request_msg.attachment(IndyPresExchangeHandler.format) + indy_proof = pres_ex_record.pres.attachment(IndyPresExchangeHandler.format) + indy_handler = IndyPresExchHandler(self._profile) + ( + schemas, + cred_defs, + rev_reg_defs, + rev_reg_entries, + ) = await indy_handler.process_pres_identifiers(indy_proof["identifiers"]) + + verifier = self._profile.inject(IndyVerifier) + pres_ex_record.verified = json.dumps( # tag: needs string value + await verifier.verify_presentation( + indy_proof_request, + indy_proof, + schemas, + cred_defs, + rev_reg_defs, + rev_reg_entries, + ) + ) + return pres_ex_record diff --git a/aries_cloudagent/protocols/present_proof/v2_0/handlers/pres_request_handler.py b/aries_cloudagent/protocols/present_proof/v2_0/handlers/pres_request_handler.py index 58e1be255e..cde83f185a 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/handlers/pres_request_handler.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/handlers/pres_request_handler.py @@ -1,7 +1,6 @@ """Presentation request message handler.""" -from .....indy.holder import IndyHolder, IndyHolderError -from .....indy.sdk.models.xform import indy_proof_req_preview2indy_requested_creds +from .....indy.holder import IndyHolderError from .....ledger.error import LedgerError from .....messaging.base_handler import BaseHandler, HandlerException from .....messaging.models.base import BaseModelError @@ -12,8 +11,8 @@ from .....wallet.error import WalletNotFoundError from .. import problem_report_for_record +from ..formats.handler import V20PresFormatHandlerError from ..manager import V20PresManager -from ..messages.pres_format import V20PresFormat from ..messages.pres_request import V20PresRequest from ..messages.pres_problem_report import ProblemReportReason from ..models.pres_exchange import V20PresExRecord @@ -54,14 +53,15 @@ async def handle(self, context: RequestContext, responder: BaseResponder): {"thread_id": context.message._thread_id}, {"connection_id": context.connection_record.connection_id}, ) # holder initiated via proposal - pres_ex_record.pres_request = context.message.serialize() - except StorageNotFoundError: # verifier sent this request free of any proposal + pres_ex_record.pres_request = context.message + except StorageNotFoundError: + # verifier sent this request free of any proposal pres_ex_record = V20PresExRecord( connection_id=context.connection_record.connection_id, thread_id=context.message._thread_id, initiator=V20PresExRecord.INITIATOR_EXTERNAL, role=V20PresExRecord.ROLE_PROVER, - pres_request=context.message.serialize(), + pres_request=context.message, auto_present=context.settings.get( "debug.auto_respond_presentation_request" ), @@ -81,26 +81,13 @@ async def handle(self, context: RequestContext, responder: BaseResponder): # If auto_present is enabled, respond immediately with presentation if pres_ex_record.auto_present: - indy_proof_request = context.message.attachment(V20PresFormat.Format.INDY) - - try: - req_creds = await indy_proof_req_preview2indy_requested_creds( - indy_proof_request, - preview=None, - holder=context.inject(IndyHolder), - ) - except ValueError as err: - self._logger.warning(f"{err}") - return # not a protocol error: prover could still build proof manually - pres_message = None try: (pres_ex_record, pres_message) = await pres_manager.create_pres( pres_ex_record=pres_ex_record, - requested_credentials=req_creds, comment=( - "auto-presented for proof request nonce " - f"{indy_proof_request['nonce']}" + f"auto-presented for proof requests" + f", pres_ex_record: {pres_ex_record.pres_ex_id}" ), ) await responder.send_reply(pres_message) @@ -110,6 +97,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): LedgerError, StorageError, WalletNotFoundError, + V20PresFormatHandlerError, ) as err: self._logger.exception(err) if pres_ex_record: @@ -124,7 +112,6 @@ async def handle(self, context: RequestContext, responder: BaseResponder): ProblemReportReason.ABANDONED.value, # them: be vague ) ) - trace_event( context.settings, pres_message, diff --git a/aries_cloudagent/protocols/present_proof/v2_0/handlers/tests/test_pres_request_handler.py b/aries_cloudagent/protocols/present_proof/v2_0/handlers/tests/test_pres_request_handler.py index ff7873ae71..12fac3c9bc 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/handlers/tests/test_pres_request_handler.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/handlers/tests/test_pres_request_handler.py @@ -1,13 +1,13 @@ from asynctest import mock as async_mock, TestCase as AsyncTestCase +from copy import deepcopy -from ......indy.sdk.models.pres_preview import IndyPresAttrSpec, IndyPresPreview from ......messaging.decorators.attach_decorator import AttachDecorator from ......messaging.request_context import RequestContext from ......messaging.responder import MockResponder from ......storage.error import StorageNotFoundError from ......transport.inbound.receipt import MessageReceipt -from .....didcomm_prefix import DIDCommPrefix +from ...formats.indy import handler as test_indy_handler from ...messages.pres_format import V20PresFormat from ...messages.pres_proposal import V20PresProposal @@ -59,6 +59,123 @@ } }, } +DIF_PROOF_REQ = { + "options": { + "challenge": "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "domain": "4jt78h47fh47", + }, + "presentation_definition": { + "id": "32f54163-7166-48f1-93d8-ff217bdb0653", + "submission_requirements": [ + { + "name": "Citizenship Information", + "rule": "pick", + "count": 1, + "from_nested": [ + { + "name": "United States Citizenship Proofs", + "purpose": "We need you to prove you are a US citizen.", + "rule": "all", + "from": "A", + }, + { + "name": "European Union Citizenship Proofs", + "purpose": "We need you to prove you are a citizen of a EU country.", + "rule": "all", + "from": "B", + }, + ], + } + ], + "input_descriptors": [ + { + "id": "citizenship_input_1", + "name": "EU Driver's License", + "group": ["A"], + "schema": [ + {"uri": "https://www.w3.org/2018/credentials#VerifiableCredential"} + ], + "constraints": { + "fields": [ + { + "path": ["$.issuer.id", "$.issuer", "$.vc.issuer.id"], + "purpose": "The claim must be from one of the specified issuers", + "filter": { + "type": "string", + "enum": ["did:sov:4cLztgZYocjqTdAZM93t27"], + }, + } + ] + }, + }, + { + "id": "citizenship_input_2", + "name": "US Passport", + "group": ["B"], + "schema": [ + {"uri": "https://www.w3.org/2018/credentials#VerifiableCredential"} + ], + "constraints": { + "fields": [ + { + "path": ["$.issuanceDate", "$.vc.issuanceDate"], + "filter": { + "type": "string", + "format": "date", + "maximum": "2009-5-16", + }, + } + ] + }, + }, + ], + }, +} + +DIF_PROP_REQ = { + "input_descriptors": [ + { + "id": "citizenship_input_1", + "name": "EU Driver's License", + "group": ["A"], + "schema": [ + {"uri": "https://www.w3.org/2018/credentials#VerifiableCredential"} + ], + "constraints": { + "fields": [ + { + "path": ["$.issuer.id", "$.issuer", "$.vc.issuer.id"], + "purpose": "The claim must be from one of the specified issuers", + "filter": { + "type": "string", + "enum": ["did:sov:4cLztgZYocjqTdAZM93t27"], + }, + } + ] + }, + }, + { + "id": "citizenship_input_2", + "name": "US Passport", + "group": ["B"], + "schema": [ + {"uri": "https://www.w3.org/2018/credentials#VerifiableCredential"} + ], + "constraints": { + "fields": [ + { + "path": ["$.issuanceDate", "$.vc.issuanceDate"], + "filter": { + "type": "string", + "format": "date", + "maximum": "2009-5-16", + }, + } + ] + }, + }, + ] +} class TestPresRequestHandler(AsyncTestCase): @@ -163,7 +280,7 @@ async def test_called_not_found(self): ) assert not responder.messages - async def test_called_auto_present(self): + async def test_called_auto_present_x(self): request_context = RequestContext.test_context() request_context.connection_record = async_mock.MagicMock() request_context.connection_record.connection_id = "dummy" @@ -184,9 +301,10 @@ async def test_called_auto_present(self): AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy") ], ) - px_rec_instance = test_module.V20PresExRecord( + mock_px_rec = async_mock.MagicMock( pres_proposal=pres_proposal.serialize(), auto_present=True, + save_error_state=async_mock.CoroutineMock(), ) with async_mock.patch.object( @@ -194,7 +312,7 @@ async def test_called_auto_present(self): ) as mock_pres_mgr, async_mock.patch.object( test_module, "V20PresExRecord", autospec=True ) as mock_pres_ex_rec_cls, async_mock.patch.object( - test_module, "IndyHolder", autospec=True + test_indy_handler, "IndyHolder", autospec=True ) as mock_holder: mock_holder.get_credentials_for_presentation_request_by_referent = ( @@ -204,33 +322,29 @@ async def test_called_auto_present(self): ) request_context.inject = async_mock.MagicMock(return_value=mock_holder) - mock_pres_ex_rec_cls.return_value = px_rec_instance + mock_pres_ex_rec_cls.return_value = mock_px_rec mock_pres_ex_rec_cls.retrieve_by_tag_filter = async_mock.CoroutineMock( - return_value=px_rec_instance + return_value=mock_px_rec ) mock_pres_mgr.return_value.receive_pres_request = async_mock.CoroutineMock( - return_value=px_rec_instance + return_value=mock_px_rec ) mock_pres_mgr.return_value.create_pres = async_mock.CoroutineMock( - return_value=(px_rec_instance, "pres message") + side_effect=test_module.IndyHolderError() ) + request_context.connection_ready = True handler = test_module.V20PresRequestHandler() responder = MockResponder() - await handler.handle(request_context, responder) - mock_pres_mgr.return_value.create_pres.assert_called_once() - mock_pres_mgr.return_value.receive_pres_request.assert_called_once_with( - px_rec_instance - ) - messages = responder.messages - assert len(messages) == 1 - (result, target) = messages[0] - assert result == "pres message" - assert target == {} + with async_mock.patch.object( + handler._logger, "exception", async_mock.MagicMock() + ) as mock_log_exc: + await handler.handle(request_context, responder) + mock_log_exc.assert_called_once() - async def test_called_auto_present_x(self): + async def test_called_auto_present_indy(self): request_context = RequestContext.test_context() request_context.connection_record = async_mock.MagicMock() request_context.connection_record.connection_id = "dummy" @@ -251,10 +365,9 @@ async def test_called_auto_present_x(self): AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy") ], ) - mock_px_rec = async_mock.MagicMock( + mock_px_rec = test_module.V20PresExRecord( pres_proposal=pres_proposal.serialize(), auto_present=True, - save_error_state=async_mock.CoroutineMock(), ) with async_mock.patch.object( @@ -262,7 +375,7 @@ async def test_called_auto_present_x(self): ) as mock_pres_mgr, async_mock.patch.object( test_module, "V20PresExRecord", autospec=True ) as mock_pres_ex_rec_cls, async_mock.patch.object( - test_module, "IndyHolder", autospec=True + test_indy_handler, "IndyHolder", autospec=True ) as mock_holder: mock_holder.get_credentials_for_presentation_request_by_referent = ( @@ -281,24 +394,99 @@ async def test_called_auto_present_x(self): ) mock_pres_mgr.return_value.create_pres = async_mock.CoroutineMock( - side_effect=test_module.IndyHolderError() + return_value=(mock_px_rec, "pres message") ) request_context.connection_ready = True handler = test_module.V20PresRequestHandler() responder = MockResponder() - with async_mock.patch.object( - handler._logger, "exception", async_mock.MagicMock() - ) as mock_log_exc: - await handler.handle(request_context, responder) - mock_log_exc.assert_called_once() + await handler.handle(request_context, responder) + mock_pres_mgr.return_value.create_pres.assert_called_once() + + mock_pres_mgr.return_value.receive_pres_request.assert_called_once_with( + mock_px_rec + ) + messages = responder.messages + assert len(messages) == 1 + (result, target) = messages[0] + assert result == "pres message" + assert target == {} + + async def test_called_auto_present_dif(self): + request_context = RequestContext.test_context() + request_context.connection_record = async_mock.MagicMock() + request_context.connection_record.connection_id = "dummy" + request_context.message = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="dif", + format_=V20PresFormat.Format.DIF.aries, + ) + ] + ) + request_context.message.attachment = async_mock.MagicMock( + return_value=DIF_PROOF_REQ + ) + request_context.message_receipt = MessageReceipt() + pres_proposal = V20PresProposal( + formats=[ + V20PresFormat( + attach_id="dif", + format_=V20PresFormat.Format.DIF.aries, + ) + ], + proposals_attach=[AttachDecorator.data_json(DIF_PROP_REQ, ident="dif")], + ) + + px_rec_instance = test_module.V20PresExRecord( + pres_proposal=pres_proposal, + auto_present=True, + ) + with async_mock.patch.object( + test_module, "V20PresManager", autospec=True + ) as mock_pres_mgr, async_mock.patch.object( + test_module, "V20PresExRecord", autospec=True + ) as mock_pres_ex_rec_cls: + + mock_pres_ex_rec_cls.return_value = px_rec_instance + mock_pres_ex_rec_cls.retrieve_by_tag_filter = async_mock.CoroutineMock( + return_value=px_rec_instance + ) + mock_pres_mgr.return_value.receive_pres_request = async_mock.CoroutineMock( + return_value=px_rec_instance + ) + + mock_pres_mgr.return_value.create_pres = async_mock.CoroutineMock( + return_value=(px_rec_instance, "pres message") + ) + request_context.connection_ready = True + handler_inst = test_module.V20PresRequestHandler() + responder = MockResponder() + await handler_inst.handle(request_context, responder) + mock_pres_mgr.return_value.create_pres.assert_called_once() + + mock_pres_mgr.return_value.receive_pres_request.assert_called_once_with( + px_rec_instance + ) + messages = responder.messages + assert len(messages) == 1 + (result, target) = messages[0] + assert result == "pres message" + assert target == {} async def test_called_auto_present_no_preview(self): request_context = RequestContext.test_context() request_context.connection_record = async_mock.MagicMock() request_context.connection_record.connection_id = "dummy" - request_context.message = V20PresRequest() + request_context.message = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="indy", + format_=V20PresFormat.Format.INDY.aries, + ) + ] + ) request_context.message.attachment = async_mock.MagicMock( return_value=INDY_PROOF_REQ ) @@ -310,7 +498,7 @@ async def test_called_auto_present_no_preview(self): ) as mock_pres_mgr, async_mock.patch.object( test_module, "V20PresExRecord", autospec=True ) as mock_pres_ex_rec_cls, async_mock.patch.object( - test_module, "IndyHolder", autospec=True + test_indy_handler, "IndyHolder", autospec=True ) as mock_holder: mock_holder.get_credentials_for_presentation_request_by_referent = ( @@ -355,17 +543,32 @@ async def test_called_auto_present_pred_no_match(self): request_context.connection_record.connection_id = "dummy" request_context.message = V20PresRequest() request_context.message.attachment = async_mock.MagicMock( - return_value=INDY_PROOF_REQ_PRED + return_value=INDY_PROOF_REQ ) request_context.message_receipt = MessageReceipt() - px_rec_instance = test_module.V20PresExRecord(auto_present=True) + pres_proposal = V20PresProposal( + formats=[ + V20PresFormat( + attach_id="indy", + format_=V20PresFormat.Format.INDY.aries, + ) + ], + proposals_attach=[ + AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy") + ], + ) + mock_px_rec = async_mock.MagicMock( + pres_proposal=pres_proposal.serialize(), + auto_present=True, + save_error_state=async_mock.CoroutineMock(), + ) with async_mock.patch.object( test_module, "V20PresManager", autospec=True ) as mock_pres_mgr, async_mock.patch.object( test_module, "V20PresExRecord", autospec=True ) as mock_pres_ex_rec_cls, async_mock.patch.object( - test_module, "IndyHolder", autospec=True + test_indy_handler, "IndyHolder", autospec=True ) as mock_holder: mock_holder.get_credentials_for_presentation_request_by_referent = ( @@ -373,33 +576,40 @@ async def test_called_auto_present_pred_no_match(self): ) request_context.inject = async_mock.MagicMock(return_value=mock_holder) - mock_pres_ex_rec_cls.return_value = px_rec_instance + mock_pres_ex_rec_cls.return_value = mock_px_rec mock_pres_ex_rec_cls.retrieve_by_tag_filter = async_mock.CoroutineMock( - return_value=px_rec_instance + return_value=mock_px_rec ) mock_pres_mgr.return_value.receive_pres_request = async_mock.CoroutineMock( - return_value=px_rec_instance + return_value=mock_px_rec ) mock_pres_mgr.return_value.create_pres = async_mock.CoroutineMock( - return_value=(px_rec_instance, "pres message") + side_effect=test_indy_handler.V20PresFormatHandlerError ) request_context.connection_ready = True handler = test_module.V20PresRequestHandler() responder = MockResponder() + await handler.handle(request_context, responder) - mock_pres_mgr.return_value.create_pres.assert_not_called() + mock_px_rec.save_error_state.assert_called_once() mock_pres_mgr.return_value.receive_pres_request.assert_called_once_with( - px_rec_instance + mock_px_rec ) - assert not responder.messages async def test_called_auto_present_pred_single_match(self): request_context = RequestContext.test_context() request_context.connection_record = async_mock.MagicMock() request_context.connection_record.connection_id = "dummy" - request_context.message = V20PresRequest() + request_context.message = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="indy", + format_=V20PresFormat.Format.INDY.aries, + ) + ] + ) request_context.message.attachment = async_mock.MagicMock( return_value=INDY_PROOF_REQ_PRED ) @@ -411,7 +621,7 @@ async def test_called_auto_present_pred_single_match(self): ) as mock_pres_mgr, async_mock.patch.object( test_module, "V20PresExRecord", autospec=True ) as mock_pres_ex_rec_cls, async_mock.patch.object( - test_module, "IndyHolder", autospec=True + test_indy_handler, "IndyHolder", autospec=True ) as mock_holder: mock_holder.get_credentials_for_presentation_request_by_referent = ( @@ -451,7 +661,14 @@ async def test_called_auto_present_pred_multi_match(self): request_context = RequestContext.test_context() request_context.connection_record = async_mock.MagicMock() request_context.connection_record.connection_id = "dummy" - request_context.message = V20PresRequest() + request_context.message = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="indy", + format_=V20PresFormat.Format.INDY.aries, + ) + ] + ) request_context.message.attachment = async_mock.MagicMock( return_value=INDY_PROOF_REQ_PRED ) @@ -463,7 +680,7 @@ async def test_called_auto_present_pred_multi_match(self): ) as mock_pres_mgr, async_mock.patch.object( test_module, "V20PresExRecord", autospec=True ) as mock_pres_ex_rec_cls, async_mock.patch.object( - test_module, "IndyHolder", autospec=True + test_indy_handler, "IndyHolder", autospec=True ) as mock_holder: mock_holder.get_credentials_for_presentation_request_by_referent = ( @@ -506,7 +723,14 @@ async def test_called_auto_present_multi_cred_match_reft(self): request_context = RequestContext.test_context() request_context.connection_record = async_mock.MagicMock() request_context.connection_record.connection_id = "dummy" - request_context.message = V20PresRequest() + request_context.message = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="indy", + format_=V20PresFormat.Format.INDY.aries, + ) + ] + ) request_context.message.attachment = async_mock.MagicMock( return_value=INDY_PROOF_REQ ) @@ -532,7 +756,7 @@ async def test_called_auto_present_multi_cred_match_reft(self): ) as mock_pres_mgr, async_mock.patch.object( test_module, "V20PresExRecord", autospec=True ) as mock_pres_ex_rec_cls, async_mock.patch.object( - test_module, "IndyHolder", autospec=True + test_indy_handler, "IndyHolder", autospec=True ) as mock_holder: mock_holder.get_credentials_for_presentation_request_by_referent = ( diff --git a/aries_cloudagent/protocols/present_proof/v2_0/manager.py b/aries_cloudagent/protocols/present_proof/v2_0/manager.py index d35da669d2..69bd99b296 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/manager.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/manager.py @@ -1,34 +1,23 @@ """Classes to manage presentations.""" -import json import logging -import time from typing import Tuple from ....connections.models.conn_record import ConnRecord from ....core.error import BaseError from ....core.profile import Profile -from ....indy.holder import IndyHolder, IndyHolderError -from ....indy.sdk.models.predicate import Predicate -from ....indy.sdk.models.xform import indy_proof_req2non_revoc_intervals -from ....indy.util import generate_pr_nonce -from ....indy.verifier import IndyVerifier -from ....ledger.base import BaseLedger -from ....messaging.decorators.attach_decorator import AttachDecorator from ....messaging.responder import BaseResponder -from ....messaging.util import canon -from ....revocation.models.revocation_registry import RevocationRegistry from ....storage.error import StorageNotFoundError -from .models.pres_exchange import V20PresExRecord -from .message_types import ATTACHMENT_FORMAT, PRES_20_REQUEST, PRES_20 from .messages.pres import V20Pres from .messages.pres_ack import V20PresAck from .messages.pres_format import V20PresFormat from .messages.pres_problem_report import V20PresProblemReport, ProblemReportReason from .messages.pres_proposal import V20PresProposal from .messages.pres_request import V20PresRequest +from .models.pres_exchange import V20PresExRecord + LOGGER = logging.getLogger(__name__) @@ -80,6 +69,7 @@ async def create_exchange_for_proposal( auto_present=auto_present, trace=(pres_proposal_message._trace is not None), ) + async with self._profile.session() as session: await pres_ex_record.save( session, reason="create v2.0 presentation proposal" @@ -106,6 +96,7 @@ async def receive_pres_proposal( pres_proposal=message, trace=(message._trace is not None), ) + async with self._profile.session() as session: await pres_ex_record.save( session, reason="receive v2.0 presentation request" @@ -116,9 +107,7 @@ async def receive_pres_proposal( async def create_bound_request( self, pres_ex_record: V20PresExRecord, - name: str = None, - version: str = None, - nonce: str = None, + request_data: dict = None, comment: str = None, ): """ @@ -127,40 +116,34 @@ async def create_bound_request( Args: pres_ex_record: Presentation exchange record for which to create presentation request - name: name to use in presentation request (None for default) - version: version to use in presentation request (None for default) - nonce: nonce to use in presentation request (None to generate) comment: Optional human-readable comment pertaining to request creation Returns: A tuple (updated presentation exchange record, presentation request message) """ - indy_proof_request = pres_ex_record.pres_proposal.attachment( - V20PresFormat.Format.INDY - ) # will change for DIF - - indy_proof_request["name"] = name or "proof-request" - indy_proof_request["version"] = version or "1.0" - indy_proof_request["nonce"] = nonce or await generate_pr_nonce() - + proof_proposal = pres_ex_record.pres_proposal + input_formats = proof_proposal.formats + request_formats = [] + for format in input_formats: + pres_exch_format = V20PresFormat.Format.get(format.format) + + if pres_exch_format: + request_formats.append( + await pres_exch_format.handler(self._profile).create_bound_request( + pres_ex_record, + request_data, + ) + ) + if len(request_formats) == 0: + raise V20PresManagerError( + "Unable to create presentation request. No supported formats" + ) pres_request_message = V20PresRequest( comment=comment, will_confirm=True, - formats=[ - V20PresFormat( - attach_id="indy", - format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ - V20PresFormat.Format.INDY.api - ], - ) - ], - request_presentations_attach=[ - AttachDecorator.data_base64( - mapping=indy_proof_request, - ident="indy", - ) - ], + formats=[format for (format, _) in request_formats], + request_presentations_attach=[attach for (_, attach) in request_formats], ) pres_request_message._thread = {"thid": pres_ex_record.thread_id} pres_request_message.assign_trace_decorator( @@ -230,10 +213,9 @@ async def receive_pres_request(self, pres_ex_record: V20PresExRecord): async def create_pres( self, pres_ex_record: V20PresExRecord, - requested_credentials: dict, - comment: str = None, + request_data: dict = {}, *, - format_: V20PresFormat.Format = None, + comment: str = None, ) -> Tuple[V20PresExRecord, V20Pres]: """ Create a presentation. @@ -270,175 +252,27 @@ async def create_pres( A tuple (updated presentation exchange record, presentation message) """ - assert format_ in (None, V20PresFormat.Format.INDY) # until DIF support - - # Get all credentials for this presentation - holder = self._profile.inject(IndyHolder) - credentials = {} - - # extract credential ids and non_revoked - requested_referents = {} - proof_request = pres_ex_record.pres_request.attachment(format_) - non_revoc_intervals = indy_proof_req2non_revoc_intervals(proof_request) - attr_creds = requested_credentials.get("requested_attributes", {}) - req_attrs = proof_request.get("requested_attributes", {}) - for reft in attr_creds: - requested_referents[reft] = {"cred_id": attr_creds[reft]["cred_id"]} - if reft in req_attrs and reft in non_revoc_intervals: - requested_referents[reft]["non_revoked"] = non_revoc_intervals[reft] - - pred_creds = requested_credentials.get("requested_predicates", {}) - req_preds = proof_request.get("requested_predicates", {}) - for reft in pred_creds: - requested_referents[reft] = {"cred_id": pred_creds[reft]["cred_id"]} - if reft in req_preds and reft in non_revoc_intervals: - requested_referents[reft]["non_revoked"] = non_revoc_intervals[reft] - - # extract mapping of presentation referents to credential ids - for reft in requested_referents: - credential_id = requested_referents[reft]["cred_id"] - if credential_id not in credentials: - credentials[credential_id] = json.loads( - await holder.get_credential(credential_id) - ) - - # remove any timestamps that cannot correspond to non-revoc intervals - for r in ("requested_attributes", "requested_predicates"): - for reft, req_item in requested_credentials.get(r, {}).items(): - if not credentials[req_item["cred_id"]].get( - "rev_reg_id" - ) and req_item.pop("timestamp", None): - LOGGER.info( - f"Removed superfluous timestamp from requested_credentials {r} " - f"{reft} for non-revocable credential {req_item['cred_id']}" - ) - - # Get all schemas, credential definitions, and revocation registries in use - ledger = self._profile.inject(BaseLedger) - schemas = {} - cred_defs = {} - revocation_registries = {} - - async with ledger: - for credential in credentials.values(): - schema_id = credential["schema_id"] - if schema_id not in schemas: - schemas[schema_id] = await ledger.get_schema(schema_id) - - cred_def_id = credential["cred_def_id"] - if cred_def_id not in cred_defs: - cred_defs[cred_def_id] = await ledger.get_credential_definition( - cred_def_id - ) - - if credential.get("rev_reg_id"): - revocation_registry_id = credential["rev_reg_id"] - if revocation_registry_id not in revocation_registries: - revocation_registries[ - revocation_registry_id - ] = RevocationRegistry.from_definition( - await ledger.get_revoc_reg_def(revocation_registry_id), True - ) - - # Get delta with non-revocation interval defined in "non_revoked" - # of the presentation request or attributes - epoch_now = int(time.time()) - - revoc_reg_deltas = {} - async with ledger: - for precis in requested_referents.values(): # cred_id, non-revoc interval - credential_id = precis["cred_id"] - if not credentials[credential_id].get("rev_reg_id"): - continue - if "timestamp" in precis: - continue - rev_reg_id = credentials[credential_id]["rev_reg_id"] - reft_non_revoc_interval = precis.get("non_revoked") - if reft_non_revoc_interval: - key = ( - f"{rev_reg_id}_" - f"{reft_non_revoc_interval.get('from', 0)}_" - f"{reft_non_revoc_interval.get('to', epoch_now)}" - ) - if key not in revoc_reg_deltas: - (delta, delta_timestamp) = await ledger.get_revoc_reg_delta( - rev_reg_id, - reft_non_revoc_interval.get("from", 0), - reft_non_revoc_interval.get("to", epoch_now), - ) - revoc_reg_deltas[key] = ( - rev_reg_id, - credential_id, - delta, - delta_timestamp, - ) - for stamp_me in requested_referents.values(): - # often one cred satisfies many requested attrs/preds - if stamp_me["cred_id"] == credential_id: - stamp_me["timestamp"] = revoc_reg_deltas[key][3] - - # Get revocation states to prove non-revoked - revocation_states = {} - for ( - rev_reg_id, - credential_id, - delta, - delta_timestamp, - ) in revoc_reg_deltas.values(): - if rev_reg_id not in revocation_states: - revocation_states[rev_reg_id] = {} - - rev_reg = revocation_registries[rev_reg_id] - tails_local_path = await rev_reg.get_or_fetch_local_tails_path() - - try: - revocation_states[rev_reg_id][delta_timestamp] = json.loads( - await holder.create_revocation_state( - credentials[credential_id]["cred_rev_id"], - rev_reg.reg_def, - delta, - delta_timestamp, - tails_local_path, + proof_request = pres_ex_record.pres_request + input_formats = proof_request.formats + pres_formats = [] + for format in input_formats: + pres_exch_format = V20PresFormat.Format.get(format.format) + + if pres_exch_format: + pres_formats.append( + await pres_exch_format.handler(self._profile).create_pres( + pres_ex_record, + request_data, ) ) - except IndyHolderError as e: - LOGGER.error( - f"Failed to create revocation state: {e.error_code}, {e.message}" - ) - raise e - - for (referent, precis) in requested_referents.items(): - if "timestamp" not in precis: - continue - if referent in requested_credentials["requested_attributes"]: - requested_credentials["requested_attributes"][referent][ - "timestamp" - ] = precis["timestamp"] - if referent in requested_credentials["requested_predicates"]: - requested_credentials["requested_predicates"][referent][ - "timestamp" - ] = precis["timestamp"] - - indy_proof_json = await holder.create_presentation( - proof_request, - requested_credentials, - schemas, - cred_defs, - revocation_states, - ) - indy_proof = json.loads(indy_proof_json) - + if len(pres_formats) == 0: + raise V20PresManagerError( + "Unable to create presentation. No supported formats" + ) pres_message = V20Pres( comment=comment, - formats=[ - V20PresFormat( - attach_id="indy", - format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], - ) - ], - presentations_attach=[ - AttachDecorator.data_base64(mapping=indy_proof, ident="indy") - ], + formats=[format for (format, _) in pres_formats], + presentations_attach=[attach for (_, attach) in pres_formats], ) pres_message._thread = {"thid": pres_ex_record.thread_id} @@ -449,22 +283,11 @@ async def create_pres( # save presentation exchange state pres_ex_record.state = V20PresExRecord.STATE_PRESENTATION_SENT pres_ex_record.pres = V20Pres( - formats=[ - V20PresFormat( - attach_id="indy", - format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], - ), - ], - presentations_attach=[ - AttachDecorator.data_base64( - mapping=indy_proof, - ident="indy", - ) - ], + formats=[format for (format, _) in pres_formats], + presentations_attach=[attach for (_, attach) in pres_formats], ) async with self._profile.session() as session: await pres_ex_record.save(session, reason="create v2.0 presentation") - return pres_ex_record, pres_message async def receive_pres(self, message: V20Pres, conn_record: ConnRecord): @@ -476,133 +299,6 @@ async def receive_pres(self, message: V20Pres, conn_record: ConnRecord): """ - def _check_proof_vs_proposal(): - """Check for bait and switch in presented values vs. proposal request.""" - proof_req = pres_ex_record.pres_request.attachment( - V20PresFormat.Format.INDY - ) # will change for DIF - - # revealed attrs - for reft, attr_spec in proof["requested_proof"]["revealed_attrs"].items(): - proof_req_attr_spec = proof_req["requested_attributes"].get(reft) - if not proof_req_attr_spec: - raise V20PresManagerError( - f"Presentation referent {reft} not in proposal request" - ) - req_restrictions = proof_req_attr_spec.get("restrictions", {}) - - name = proof_req_attr_spec["name"] - proof_value = attr_spec["raw"] - sub_proof_index = attr_spec["sub_proof_index"] - schema_id = proof["identifiers"][sub_proof_index]["schema_id"] - cred_def_id = proof["identifiers"][sub_proof_index]["cred_def_id"] - criteria = { - "schema_id": schema_id, - "schema_issuer_did": schema_id.split(":")[-4], - "schema_name": schema_id.split(":")[-2], - "schema_version": schema_id.split(":")[-1], - "cred_def_id": cred_def_id, - "issuer_did": cred_def_id.split(":")[-5], - f"attr::{name}::value": proof_value, - } - - if not any(r.items() <= criteria.items() for r in req_restrictions): - raise V20PresManagerError( - f"Presented attribute {reft} does not satisfy proof request " - f"restrictions {req_restrictions}" - ) - - # revealed attr groups - for reft, attr_spec in ( - proof["requested_proof"].get("revealed_attr_groups", {}).items() - ): - proof_req_attr_spec = proof_req["requested_attributes"].get(reft) - if not proof_req_attr_spec: - raise V20PresManagerError( - f"Presentation referent {reft} not in proposal request" - ) - req_restrictions = proof_req_attr_spec.get("restrictions", {}) - proof_values = { - name: values["raw"] for name, values in attr_spec["values"].items() - } - sub_proof_index = attr_spec["sub_proof_index"] - schema_id = proof["identifiers"][sub_proof_index]["schema_id"] - cred_def_id = proof["identifiers"][sub_proof_index]["cred_def_id"] - criteria = { - "schema_id": schema_id, - "schema_issuer_did": schema_id.split(":")[-4], - "schema_name": schema_id.split(":")[-2], - "schema_version": schema_id.split(":")[-1], - "cred_def_id": cred_def_id, - "issuer_did": cred_def_id.split(":")[-5], - **{ - f"attr::{name}::value": value - for name, value in proof_values.items() - }, - } - - if not any(r.items() <= criteria.items() for r in req_restrictions): - raise V20PresManagerError( - f"Presented attr group {reft} does not satisfy proof request " - f"restrictions {req_restrictions}" - ) - - # predicate bounds - for reft, pred_spec in proof["requested_proof"]["predicates"].items(): - proof_req_pred_spec = proof_req["requested_predicates"].get(reft) - if not proof_req_pred_spec: - raise V20PresManagerError( - f"Presentation referent {reft} not in proposal request" - ) - req_name = proof_req_pred_spec["name"] - req_pred = Predicate.get(proof_req_pred_spec["p_type"]) - req_value = proof_req_pred_spec["p_value"] - req_restrictions = proof_req_pred_spec.get("restrictions", {}) - for req_restriction in req_restrictions: - for k in [k for k in req_restriction]: # cannot modify en passant - if k.startswith("attr::"): - req_restriction.pop(k) # let indy-sdk reject mismatch here - - sub_proof_index = pred_spec["sub_proof_index"] - for ge_proof in proof["proof"]["proofs"][sub_proof_index][ - "primary_proof" - ]["ge_proofs"]: - proof_pred_spec = ge_proof["predicate"] - if proof_pred_spec["attr_name"] != canon(req_name): - continue - if not ( - Predicate.get(proof_pred_spec["p_type"]) is req_pred - and proof_pred_spec["value"] == req_value - ): - raise V20PresManagerError( - f"Presentation predicate on {req_name} " - "mismatches proposal request" - ) - break - else: - raise V20PresManagerError( - f"Proposed request predicate on {req_name} not in presentation" - ) - - schema_id = proof["identifiers"][sub_proof_index]["schema_id"] - cred_def_id = proof["identifiers"][sub_proof_index]["cred_def_id"] - criteria = { - "schema_id": schema_id, - "schema_issuer_did": schema_id.split(":")[-4], - "schema_name": schema_id.split(":")[-2], - "schema_version": schema_id.split(":")[-1], - "cred_def_id": cred_def_id, - "issuer_did": cred_def_id.split(":")[-5], - } - - if not any(r.items() <= criteria.items() for r in req_restrictions): - raise V20PresManagerError( - f"Presented predicate {reft} does not satisfy proof request " - f"restrictions {req_restrictions}" - ) - - proof = message.attachment(V20PresFormat.Format.INDY) - thread_id = message._thread_id conn_id_filter = ( None @@ -620,8 +316,16 @@ def _check_proof_vs_proposal(): session, {"thread_id": thread_id}, None ) - _check_proof_vs_proposal() + input_formats = message.formats + + for format in input_formats: + pres_format = V20PresFormat.Format.get(format.format) + if pres_format: + await pres_format.handler(self._profile).receive_pres( + message, + pres_ex_record, + ) pres_ex_record.pres = message pres_ex_record.state = V20PresExRecord.STATE_PRESENTATION_RECEIVED @@ -643,73 +347,17 @@ async def verify_pres(self, pres_ex_record: V20PresExRecord): """ pres_request_msg = pres_ex_record.pres_request - indy_proof_request = pres_request_msg.attachment(V20PresFormat.Format.INDY) - indy_proof = pres_ex_record.pres.attachment( - V20PresFormat.Format.INDY - ) # will change for DIF - - schema_ids = [] - cred_def_ids = [] - - schemas = {} - cred_defs = {} - rev_reg_defs = {} - rev_reg_entries = {} - - identifiers = indy_proof["identifiers"] - ledger = self._profile.inject(BaseLedger) - async with ledger: - for identifier in identifiers: - schema_ids.append(identifier["schema_id"]) - cred_def_ids.append(identifier["cred_def_id"]) - - # Build schemas for anoncreds - if identifier["schema_id"] not in schemas: - schemas[identifier["schema_id"]] = await ledger.get_schema( - identifier["schema_id"] - ) - - if identifier["cred_def_id"] not in cred_defs: - cred_defs[ - identifier["cred_def_id"] - ] = await ledger.get_credential_definition( - identifier["cred_def_id"] - ) + input_formats = pres_request_msg.formats + for format in input_formats: + pres_exch_format = V20PresFormat.Format.get(format.format) + + if pres_exch_format: + pres_ex_record = await pres_exch_format.handler( + self._profile + ).verify_pres( + pres_ex_record, + ) - if identifier.get("rev_reg_id"): - if identifier["rev_reg_id"] not in rev_reg_defs: - rev_reg_defs[ - identifier["rev_reg_id"] - ] = await ledger.get_revoc_reg_def(identifier["rev_reg_id"]) - - if identifier.get("timestamp"): - rev_reg_entries.setdefault(identifier["rev_reg_id"], {}) - - if ( - identifier["timestamp"] - not in rev_reg_entries[identifier["rev_reg_id"]] - ): - ( - found_rev_reg_entry, - _found_timestamp, - ) = await ledger.get_revoc_reg_entry( - identifier["rev_reg_id"], identifier["timestamp"] - ) - rev_reg_entries[identifier["rev_reg_id"]][ - identifier["timestamp"] - ] = found_rev_reg_entry - - verifier = self._profile.inject(IndyVerifier) - pres_ex_record.verified = json.dumps( # tag: needs string value - await verifier.verify_presentation( - indy_proof_request, - indy_proof, - schemas, - cred_defs, - rev_reg_defs, - rev_reg_entries, - ) - ) pres_ex_record.state = V20PresExRecord.STATE_DONE async with self._profile.session() as session: diff --git a/aries_cloudagent/protocols/present_proof/v2_0/messages/pres.py b/aries_cloudagent/protocols/present_proof/v2_0/messages/pres.py index f60e95c20f..d6b7861d9e 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/messages/pres.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/messages/pres.py @@ -1,10 +1,8 @@ """A (proof) presentation content message.""" +from marshmallow import EXCLUDE, fields, validates_schema, ValidationError from typing import Sequence -from marshmallow import EXCLUDE, fields, RAISE, validates_schema, ValidationError - -from .....indy.sdk.models.proof import IndyProofSchema from .....messaging.agent_message import AgentMessage, AgentMessageSchema from .....messaging.decorators.attach_decorator import ( AttachDecorator, @@ -118,5 +116,6 @@ def get_attach_by_id(attach_id): for fmt in formats: atch = get_attach_by_id(fmt.attach_id) - if V20PresFormat.Format.get(fmt.format) is V20PresFormat.Format.INDY: - IndyProofSchema(unknown=RAISE).load(atch.content) + pres_format = V20PresFormat.Format.get(fmt.format) + if pres_format: + pres_format.validate_fields(PRES_20, atch.content) diff --git a/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_format.py b/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_format.py index c492a2fd5c..e9c818a8f1 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_format.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_format.py @@ -2,18 +2,20 @@ from collections import namedtuple from enum import Enum -from typing import Sequence, Union -from uuid import uuid4 - from marshmallow import EXCLUDE, fields +from typing import Mapping, Sequence, Type, Union, TYPE_CHECKING +from uuid import uuid4 from .....messaging.decorators.attach_decorator import AttachDecorator from .....messaging.models.base import BaseModel, BaseModelSchema from .....messaging.valid import UUIDFour +from .....utils.classloader import DeferLoad +if TYPE_CHECKING: + from ..formats.handler import V20PresFormatHandler # aries prefix -FormatSpec = namedtuple("FormatSpec", "aries") +FormatSpec = namedtuple("FormatSpec", "aries handler") class V20PresFormat(BaseModel): @@ -27,8 +29,20 @@ class Meta: class Format(Enum): """Attachment format.""" - INDY = FormatSpec("hlindy/") - DIF = FormatSpec("dif/") + INDY = FormatSpec( + "hlindy/", + DeferLoad( + "aries_cloudagent.protocols.present_proof.v2_0" + ".formats.indy.handler.IndyPresExchangeHandler" + ), + ) + DIF = FormatSpec( + "dif/", + DeferLoad( + "aries_cloudagent.protocols.present_proof.v2_0" + ".formats.dif.handler.DIFPresFormatHandler" + ), + ) @classmethod def get(cls, label: Union[str, "V20PresFormat.Format"]): @@ -52,6 +66,15 @@ def aries(self) -> str: """Accessor for aries identifier.""" return self.value.aries + @property + def handler(self) -> Type["V20PresFormatHandler"]: + """Accessor for presentation exchange format handler.""" + return self.value.handler.resolved + + def validate_fields(self, message_type: str, attachment_data: Mapping): + """Raise ValidationError for invalid attachment formats.""" + self.handler.validate_fields(message_type, attachment_data) + def get_attachment_data( self, formats: Sequence["V20PresFormat"], diff --git a/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_proposal.py b/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_proposal.py index b3b9d6c6d3..4f1dea6c1b 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_proposal.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_proposal.py @@ -1,10 +1,8 @@ """A presentation proposal content message.""" +from marshmallow import EXCLUDE, fields, validates_schema, ValidationError from typing import Sequence -from marshmallow import EXCLUDE, fields, RAISE, validates_schema, ValidationError - -from .....indy.sdk.models.proof_request import IndyProofRequestSchema from .....messaging.agent_message import AgentMessage, AgentMessageSchema from .....messaging.decorators.attach_decorator import ( AttachDecorator, @@ -120,5 +118,7 @@ def get_attach_by_id(attach_id): for fmt in formats: atch = get_attach_by_id(fmt.attach_id) - if V20PresFormat.Format.get(fmt.format) is V20PresFormat.Format.INDY: - IndyProofRequestSchema(unknown=RAISE).load(atch.content) + pres_format = V20PresFormat.Format.get(fmt.format) + + if pres_format: + pres_format.validate_fields(PRES_20_PROPOSAL, atch.content) diff --git a/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_request.py b/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_request.py index e75b2daec0..51296edd7a 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_request.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_request.py @@ -1,10 +1,8 @@ """A presentation request content message.""" +from marshmallow import EXCLUDE, fields, validates_schema, ValidationError from typing import Sequence -from marshmallow import EXCLUDE, fields, RAISE, validates_schema, ValidationError - -from .....indy.sdk.models.proof_request import IndyProofRequestSchema from .....messaging.agent_message import AgentMessage, AgentMessageSchema from .....messaging.decorators.attach_decorator import ( AttachDecorator, @@ -130,5 +128,7 @@ def get_attach_by_id(attach_id): for fmt in formats: atch = get_attach_by_id(fmt.attach_id) - if V20PresFormat.Format.get(fmt.format) is V20PresFormat.Format.INDY: - IndyProofRequestSchema(unknown=RAISE).load(atch.content) + + pres_format = V20PresFormat.Format.get(fmt.format) + if pres_format: + pres_format.validate_fields(PRES_20_REQUEST, atch.content) diff --git a/aries_cloudagent/protocols/present_proof/v2_0/messages/tests/test_pres_format.py b/aries_cloudagent/protocols/present_proof/v2_0/messages/tests/test_pres_format.py index ee46533e0b..90a62f5465 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/messages/tests/test_pres_format.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/messages/tests/test_pres_format.py @@ -2,7 +2,7 @@ from marshmallow import ValidationError -from ......indy.sdk.models.pres_preview import ( +from ......indy.models.pres_preview import ( IndyPresAttrSpec, IndyPresPreview, IndyPresPredSpec, diff --git a/aries_cloudagent/protocols/present_proof/v2_0/messages/tests/test_pres_proposal.py b/aries_cloudagent/protocols/present_proof/v2_0/messages/tests/test_pres_proposal.py index 2eb0bc2038..a2815c2ce4 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/messages/tests/test_pres_proposal.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/messages/tests/test_pres_proposal.py @@ -2,7 +2,7 @@ from unittest import TestCase -from ......indy.sdk.models.pres_preview import ( +from ......indy.models.pres_preview import ( IndyPresAttrSpec, IndyPresPredSpec, IndyPresPreview, diff --git a/aries_cloudagent/protocols/present_proof/v2_0/messages/tests/test_pres_request.py b/aries_cloudagent/protocols/present_proof/v2_0/messages/tests/test_pres_request.py index 3ed2b6962d..91b87bfa6e 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/messages/tests/test_pres_request.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/messages/tests/test_pres_request.py @@ -4,7 +4,7 @@ from datetime import datetime, timezone from unittest import TestCase -from ......indy.sdk.models.pres_preview import PRESENTATION_PREVIEW +from ......indy.models.pres_preview import PRESENTATION_PREVIEW from ......messaging.decorators.attach_decorator import AttachDecorator from ......messaging.models.base import BaseModelError from ......messaging.util import str_to_datetime, str_to_epoch diff --git a/aries_cloudagent/protocols/present_proof/v2_0/models/tests/test_record.py b/aries_cloudagent/protocols/present_proof/v2_0/models/tests/test_record.py index 4838f55f6b..3f1922cb8d 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/models/tests/test_record.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/models/tests/test_record.py @@ -1,7 +1,7 @@ from asynctest import mock as async_mock, TestCase as AsyncTestCase from ......core.in_memory import InMemoryProfile -from ......indy.sdk.models.pres_preview import ( +from ......indy.models.pres_preview import ( IndyPresAttrSpec, IndyPresPredSpec, IndyPresPreview, diff --git a/aries_cloudagent/protocols/present_proof/v2_0/routes.py b/aries_cloudagent/protocols/present_proof/v2_0/routes.py index 8d093db066..681e506d9b 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/routes.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/routes.py @@ -2,7 +2,7 @@ import json -from typing import Mapping +from typing import Mapping, Sequence, Tuple from aiohttp import web from aiohttp_apispec import ( @@ -17,9 +17,9 @@ from ....admin.request_context import AdminRequestContext from ....connections.models.conn_record import ConnRecord from ....indy.holder import IndyHolder, IndyHolderError -from ....indy.sdk.models.cred_precis import IndyCredPrecisSchema -from ....indy.sdk.models.proof import IndyPresSpecSchema -from ....indy.sdk.models.proof_request import IndyProofRequestSchema +from ....indy.models.cred_precis import IndyCredPrecisSchema +from ....indy.models.proof import IndyPresSpecSchema +from ....indy.models.proof_request import IndyProofRequestSchema from ....indy.util import generate_pr_nonce from ....ledger.error import LedgerError from ....messaging.decorators.attach_decorator import AttachDecorator @@ -33,10 +33,21 @@ UUID4, ) from ....storage.error import StorageError, StorageNotFoundError +from ....storage.vc_holder.base import VCHolder +from ....storage.vc_holder.vc_record import VCRecord from ....utils.tracing import trace_event, get_timer, AdminAPIMessageTracingSchema +from ....vc.ld_proofs.constants import EXPANDED_TYPE_CREDENTIALS_CONTEXT_V1_VC_TYPE from ....wallet.error import WalletNotFoundError +from ..dif.pres_exch import InputDescriptors +from ..dif.pres_proposal_schema import DIFProofProposalSchema +from ..dif.pres_request_schema import ( + DIFProofRequestSchema, + DIFPresSpecSchema, +) + from . import problem_report_for_record, report_problem +from .formats.handler import V20PresFormatHandlerError from .manager import V20PresManager from .message_types import ( ATTACHMENT_FORMAT, @@ -101,33 +112,6 @@ class V20PresExRecordListSchema(OpenAPISchema): ) -class DIFPresProposalSchema(OpenAPISchema): - """DIF presentation proposal schema placeholder.""" - - some_dif = fields.Str( - description="Placeholder for W3C/DIF/JSON-LD presentation proposal format", - required=False, - ) - - -class DIFPresRequestSchema(OpenAPISchema): - """DIF presentation request schema placeholder.""" - - some_dif = fields.Str( - description="Placeholder for W3C/DIF/JSON-LD presentation request format", - required=False, - ) - - -class DIFPresSpecSchema(OpenAPISchema): - """DIF presentation schema specification placeholder.""" - - some_dif = fields.Str( - description="Placeholder for W3C/DIF/JSON-LD presentation format", - required=False, - ) - - class V20PresProposalByFormatSchema(OpenAPISchema): """Schema for presentation proposal per format.""" @@ -137,7 +121,7 @@ class V20PresProposalByFormatSchema(OpenAPISchema): description="Presentation proposal for indy", ) dif = fields.Nested( - DIFPresProposalSchema, + DIFProofProposalSchema, required=False, description="Presentation proposal for DIF", ) @@ -197,7 +181,7 @@ class V20PresRequestByFormatSchema(OpenAPISchema): description="Presentation request for indy", ) dif = fields.Nested( - DIFPresRequestSchema, + DIFProofRequestSchema, required=False, description="Presentation request for DIF", ) @@ -251,7 +235,10 @@ class V20PresSpecByFormatRequestSchema(AdminAPIMessageTracingSchema): dif = fields.Nested( DIFPresSpecSchema, required=False, - description="Presentation specification for DIF", + description=( + "Optional Presentation specification for DIF, " + "overrides the PresentionExchange record's PresRequest" + ), ) @validates_schema @@ -322,7 +309,14 @@ async def _add_nonce(indy_proof_request: Mapping) -> Mapping: def _formats_attach(by_format: Mapping, msg_type: str, spec: str) -> Mapping: """Break out formats and proposals/requests/presentations for v2.0 messages.""" - + attach = [] + for (fmt_api, item_by_fmt) in by_format.items(): + if fmt_api == V20PresFormat.Format.INDY.api: + attach.append( + AttachDecorator.data_base64(mapping=item_by_fmt, ident=fmt_api) + ) + elif fmt_api == V20PresFormat.Format.DIF.api: + attach.append(AttachDecorator.data_json(mapping=item_by_fmt, ident=fmt_api)) return { "formats": [ V20PresFormat( @@ -331,10 +325,7 @@ def _formats_attach(by_format: Mapping, msg_type: str, spec: str) -> Mapping: ) for fmt_api in by_format ], - f"{spec}_attach": [ - AttachDecorator.data_base64(mapping=item_by_fmt, ident=fmt_api) - for (fmt_api, item_by_fmt) in by_format.items() - ], + f"{spec}_attach": attach, } @@ -464,19 +455,23 @@ async def present_proof_credentials_list(request: web.BaseRequest): start = int(start) if isinstance(start, str) else 0 count = int(count) if isinstance(count, str) else 10 - holder = context.profile.inject(IndyHolder) + indy_holder = context.profile.inject(IndyHolder) + indy_credentials = [] + # INDY try: - # TODO: allow for choice of format from those specified in pres req - pres_request = pres_ex_record.by_format["pres_request"].get( + indy_pres_request = pres_ex_record.by_format["pres_request"].get( V20PresFormat.Format.INDY.api ) - credentials = await holder.get_credentials_for_presentation_request_by_referent( - pres_request, - pres_referents, - start, - count, - extra_query, - ) + if indy_pres_request: + indy_credentials = ( + await indy_holder.get_credentials_for_presentation_request_by_referent( + indy_pres_request, + pres_referents, + start, + count, + extra_query, + ) + ) except IndyHolderError as err: if pres_ex_record: async with context.session() as session: @@ -489,19 +484,99 @@ async def present_proof_credentials_list(request: web.BaseRequest): outbound_handler, ) - pres_ex_record.log_state( - "Retrieved presentation credentials", - { - "presentation_exchange_id": pres_ex_id, - "referents": pres_referents, - "extra_query": extra_query, - "credentials": credentials, - }, - settings=context.settings, - ) + dif_holder = context.profile.inject(VCHolder) + dif_credentials = [] + dif_cred_value_list = [] + # DIF + try: + dif_pres_request = pres_ex_record.by_format["pres_request"].get( + V20PresFormat.Format.DIF.api + ) + if dif_pres_request: + input_descriptors_list = dif_pres_request.get( + "presentation_definition" + ).get("input_descriptors") + input_descriptors = [] + for input_desc_dict in input_descriptors_list: + input_descriptors.append(InputDescriptors.deserialize(input_desc_dict)) + record_ids = set() + for input_descriptor in input_descriptors: + expanded_types = set() + schema_ids = set() + for schema in input_descriptor.schemas: + uri = schema.uri + required = schema.required or True + if required: + # JSONLD Expanded URLs + if "#" in uri: + expanded_types.add(uri) + else: + schema_ids.add(uri) + if len(schema_ids) == 0: + schema_ids_list = None + else: + schema_ids_list = list(schema_ids) + if len(expanded_types) == 0: + expanded_types_list = None + else: + expanded_types_list = list(expanded_types) + # Raise Exception if expanded type extracted from + # CREDENTIALS_CONTEXT_V1_URL and + # VERIFIABLE_CREDENTIAL_TYPE is the only schema.uri + # specified in the presentation_definition. + if len(expanded_types_list) == 1: + if expanded_types_list[0] in [ + EXPANDED_TYPE_CREDENTIALS_CONTEXT_V1_VC_TYPE + ]: + raise V20PresFormatHandlerError( + "Only expanded type extracted from " + "CREDENTIALS_CONTEXT_V1_URL " + "and VERIFIABLE_CREDENTIAL_TYPE " + "included as the schema.uri" + ) + search = dif_holder.search_credentials( + types=expanded_types_list, + schema_ids=schema_ids_list, + ) + records = await search.fetch(count) + # Avoiding addition of duplicate records + vcrecord_list, vcrecord_ids_set = await process_vcrecords_return_list( + records, record_ids + ) + record_ids = vcrecord_ids_set + dif_credentials = dif_credentials + vcrecord_list + for dif_credential in dif_credentials: + dif_cred_value_list.append(dif_credential.cred_value) + except ( + StorageNotFoundError, + V20PresFormatHandlerError, + ) as err: + if pres_ex_record: + async with context.session() as session: + await pres_ex_record.save_error_state(session, reason=err.roll_up) + await report_problem( + err, + ProblemReportReason.ABANDONED.value, + web.HTTPBadRequest, + pres_ex_record, + outbound_handler, + ) + credentials = indy_credentials + dif_cred_value_list return web.json_response(credentials) +async def process_vcrecords_return_list( + vc_records: Sequence[VCRecord], record_ids: set +) -> Tuple[Sequence[VCRecord], set]: + """Return list of non-duplicate VCRecords.""" + to_add = [] + for vc_record in vc_records: + if vc_record.record_id not in record_ids: + to_add.append(vc_record) + record_ids.add(vc_record.record_id) + return (to_add, record_ids) + + @docs(tags=["present-proof v2.0"], summary="Sends a presentation proposal") @request_schema(V20PresProposalRequestSchema()) @response_schema(V20PresExRecordSchema(), 200, description="") @@ -834,8 +909,8 @@ async def present_proof_send_presentation(request: web.BaseRequest): outbound_handler = request["outbound_message_router"] pres_ex_id = request.match_info["pres_ex_id"] body = await request.json() - fmt = V20PresFormat.Format.get([f for f in body][0]) # "indy" xor "dif" - + fmt = V20PresFormat.Format.get([f for f in body][0]).api # "indy" xor "dif" + comment = body.get("comment") pres_ex_record = None async with context.session() as session: try: @@ -863,22 +938,18 @@ async def present_proof_send_presentation(request: web.BaseRequest): pres_manager = V20PresManager(context.profile) try: - indy_spec = body.get(V20PresFormat.Format.INDY.api) # TODO: accommodate DIF + request_data = {fmt: body.get(fmt)} pres_ex_record, pres_message = await pres_manager.create_pres( pres_ex_record, - { - "self_attested_attributes": indy_spec["self_attested_attributes"], - "requested_attributes": indy_spec["requested_attributes"], - "requested_predicates": indy_spec["requested_predicates"], - }, - comment=body.get("comment"), - format_=fmt, + request_data=request_data, + comment=comment, ) result = pres_ex_record.serialize() except ( BaseModelError, IndyHolderError, LedgerError, + V20PresFormatHandlerError, StorageError, WalletNotFoundError, ) as err: diff --git a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_manager.py b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_manager.py index 61697388d2..045ab829d3 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_manager.py @@ -7,25 +7,23 @@ from .....core.in_memory import InMemoryProfile from .....indy.holder import IndyHolder -from .....indy.sdk.holder import IndySdkHolder -from .....indy.issuer import IndyIssuer -from .....indy.sdk.models.pres_preview import ( +from .....indy.models.xform import indy_proof_req_preview2indy_requested_creds +from .....indy.models.pres_preview import ( IndyPresAttrSpec, IndyPresPreview, IndyPresPredSpec, ) +from .....indy.verifier import IndyVerifier from .....ledger.base import BaseLedger from .....messaging.decorators.attach_decorator import AttachDecorator -from .....messaging.request_context import RequestContext from .....messaging.responder import BaseResponder, MockResponder from .....storage.error import StorageNotFoundError -from .....indy.sdk.models.xform import indy_proof_req_preview2indy_requested_creds -from .....indy.sdk.verifier import IndySdkVerifier -from .....indy.verifier import IndyVerifier -from ....didcomm_prefix import DIDCommPrefix +from ...indy import pres_exch_handler as test_indy_util_module from .. import manager as test_module +from ..formats.handler import V20PresFormatHandlerError +from ..formats.indy import handler as test_indy_handler from ..manager import V20PresManager, V20PresManagerError from ..message_types import ( ATTACHMENT_FORMAT, @@ -34,14 +32,12 @@ PRES_20, ) from ..messages.pres import V20Pres -from ..messages.pres_ack import V20PresAck from ..messages.pres_format import V20PresFormat from ..messages.pres_problem_report import V20PresProblemReport from ..messages.pres_proposal import V20PresProposal from ..messages.pres_request import V20PresRequest from ..models.pres_exchange import V20PresExRecord - CONN_ID = "connection_id" ISSUER_DID = "NcYxiDXkpYi6ov5FcYDi1e" S_ID = f"{ISSUER_DID}:2:vidya:1.0" @@ -502,7 +498,11 @@ async def test_record_eq(self): assert diff[i] == diff[j] if i == j else diff[i] != diff[j] async def test_create_exchange_for_proposal(self): - proposal = V20PresProposal() + proposal = V20PresProposal( + formats=[ + V20PresFormat(attach_id="indy", format_=V20PresFormat.Format.INDY.aries) + ] + ) with async_mock.patch.object( V20PresExRecord, "save", autospec=True @@ -521,8 +521,11 @@ async def test_create_exchange_for_proposal(self): async def test_receive_proposal(self): connection_record = async_mock.MagicMock(connection_id=CONN_ID) - proposal = V20PresProposal() - + proposal = V20PresProposal( + formats=[ + V20PresFormat(attach_id="indy", format_=V20PresFormat.Format.INDY.aries) + ] + ) with async_mock.patch.object(V20PresExRecord, "save", autospec=True) as save_ex: px_rec = await self.manager.receive_pres_proposal( proposal, @@ -553,16 +556,53 @@ async def test_create_bound_request(self): role=V20PresExRecord.ROLE_VERIFIER, ) px_rec.save = async_mock.CoroutineMock() + request_data = { + "name": PROOF_REQ_NAME, + "version": PROOF_REQ_VERSION, + "nonce": PROOF_REQ_NONCE, + } (ret_px_rec, pres_req_msg) = await self.manager.create_bound_request( pres_ex_record=px_rec, - name=PROOF_REQ_NAME, - version=PROOF_REQ_VERSION, - nonce=PROOF_REQ_NONCE, + request_data=request_data, comment=comment, ) assert ret_px_rec is px_rec px_rec.save.assert_called_once() + async def test_create_bound_request_no_format(self): + px_rec = V20PresExRecord( + pres_proposal=V20PresProposal( + formats=[], + proposals_attach=[], + ).serialize(), + role=V20PresExRecord.ROLE_VERIFIER, + ) + with self.assertRaises(V20PresManagerError) as context: + await self.manager.create_bound_request( + pres_ex_record=px_rec, + request_data={}, + comment="test", + ) + assert "No supported formats" in str(context.exception) + + async def test_create_pres_no_format(self): + px_rec = V20PresExRecord( + pres_proposal=V20PresProposal( + formats=[], + proposals_attach=[], + ).serialize(), + pres_request=V20PresRequest( + formats=[], request_presentations_attach=[] + ).serialize(), + ) + with self.assertRaises(V20PresManagerError) as context: + await self.manager.create_pres( + pres_ex_record=px_rec, + request_data={}, + comment="test", + ) + assert "No supported formats" in str(context.exception) + async def test_create_exchange_for_request(self): pres_req = V20PresRequest( comment="Test", @@ -599,7 +639,7 @@ async def test_receive_pres_request(self): assert px_rec_out.state == V20PresExRecord.STATE_REQUEST_RECEIVED - async def test_create_pres(self): + async def test_create_pres_indy(self): pres_request = V20PresRequest( formats=[ V20PresFormat( @@ -622,9 +662,9 @@ async def test_create_pres(self): with async_mock.patch.object( V20PresExRecord, "save", autospec=True ) as save_ex, async_mock.patch.object( - test_module, "AttachDecorator", autospec=True + test_indy_handler, "AttachDecorator", autospec=True ) as mock_attach_decorator, async_mock.patch.object( - test_module, "RevocationRegistry", autospec=True + test_indy_util_module, "RevocationRegistry", autospec=True ) as mock_rr: mock_rr.from_definition = async_mock.MagicMock(return_value=more_magic_rr) @@ -635,12 +675,13 @@ async def test_create_pres(self): req_creds = await indy_proof_req_preview2indy_requested_creds( INDY_PROOF_REQ_NAME, preview=None, holder=self.holder ) + request_data = {"indy": req_creds} assert not req_creds["self_attested_attributes"] assert len(req_creds["requested_attributes"]) == 2 assert len(req_creds["requested_predicates"]) == 1 (px_rec_out, pres_msg) = await self.manager.create_pres( - px_rec_in, req_creds + px_rec_in, request_data ) save_ex.assert_called_once() assert px_rec_out.state == V20PresExRecord.STATE_PRESENTATION_SENT @@ -671,9 +712,9 @@ async def test_create_pres_proof_req_non_revoc_interval_none(self): with async_mock.patch.object( V20PresExRecord, "save", autospec=True ) as save_ex, async_mock.patch.object( - test_module, "AttachDecorator", autospec=True + test_indy_handler, "AttachDecorator", autospec=True ) as mock_attach_decorator, async_mock.patch.object( - test_module, "RevocationRegistry", autospec=True + test_indy_util_module, "RevocationRegistry", autospec=True ) as mock_rr: mock_rr.from_definition = async_mock.MagicMock(return_value=more_magic_rr) @@ -684,12 +725,13 @@ async def test_create_pres_proof_req_non_revoc_interval_none(self): req_creds = await indy_proof_req_preview2indy_requested_creds( indy_proof_req_vcx, preview=None, holder=self.holder ) + request_data = {"indy": req_creds} assert not req_creds["self_attested_attributes"] assert len(req_creds["requested_attributes"]) == 2 assert len(req_creds["requested_predicates"]) == 1 (px_rec_out, pres_msg) = await self.manager.create_pres( - px_rec_in, req_creds + px_rec_in, request_data ) save_ex.assert_called_once() assert px_rec_out.state == V20PresExRecord.STATE_PRESENTATION_SENT @@ -718,9 +760,9 @@ async def test_create_pres_self_asserted(self): with async_mock.patch.object( V20PresExRecord, "save", autospec=True ) as save_ex, async_mock.patch.object( - test_module, "AttachDecorator", autospec=True + test_indy_handler, "AttachDecorator", autospec=True ) as mock_attach_decorator, async_mock.patch.object( - test_module, "RevocationRegistry", autospec=True + test_indy_util_module, "RevocationRegistry", autospec=True ) as mock_rr: mock_rr.from_definition = async_mock.MagicMock(return_value=more_magic_rr) @@ -731,12 +773,14 @@ async def test_create_pres_self_asserted(self): req_creds = await indy_proof_req_preview2indy_requested_creds( INDY_PROOF_REQ_SELFIE, preview=None, holder=self.holder ) + request_data = {"indy": req_creds} + assert len(req_creds["self_attested_attributes"]) == 3 assert not req_creds["requested_attributes"] assert not req_creds["requested_predicates"] (px_rec_out, pres_msg) = await self.manager.create_pres( - px_rec_in, req_creds + px_rec_in, request_data ) save_ex.assert_called_once() assert px_rec_out.state == V20PresExRecord.STATE_PRESENTATION_SENT @@ -798,9 +842,9 @@ async def test_create_pres_no_revocation(self): with async_mock.patch.object( V20PresExRecord, "save", autospec=True ) as save_ex, async_mock.patch.object( - test_module, "AttachDecorator", autospec=True + test_indy_handler, "AttachDecorator", autospec=True ) as mock_attach_decorator, async_mock.patch.object( - test_module.LOGGER, "info", async_mock.MagicMock() + test_indy_util_module.LOGGER, "info", async_mock.MagicMock() ) as mock_log_info: mock_attach_decorator.data_base64 = async_mock.MagicMock( return_value=mock_attach_decorator @@ -809,9 +853,16 @@ async def test_create_pres_no_revocation(self): req_creds = await indy_proof_req_preview2indy_requested_creds( INDY_PROOF_REQ_NAME, preview=None, holder=self.holder ) + request_data = { + "indy": { + "self_attested_attributes": req_creds["self_attested_attributes"], + "requested_attributes": req_creds["requested_attributes"], + "requested_predicates": req_creds["requested_predicates"], + } + } (px_rec_out, pres_msg) = await self.manager.create_pres( - px_rec_in, req_creds + px_rec_in, request_data ) save_ex.assert_called_once() assert px_rec_out.state == V20PresExRecord.STATE_PRESENTATION_SENT @@ -819,7 +870,14 @@ async def test_create_pres_no_revocation(self): # exercise superfluous timestamp removal for pred_reft_spec in req_creds["requested_predicates"].values(): pred_reft_spec["timestamp"] = 1234567890 - await self.manager.create_pres(px_rec_in, req_creds) + request_data = { + "indy": { + "self_attested_attributes": req_creds["self_attested_attributes"], + "requested_attributes": req_creds["requested_attributes"], + "requested_predicates": req_creds["requested_predicates"], + } + } + await self.manager.create_pres(px_rec_in, request_data) mock_log_info.assert_called_once() async def test_create_pres_bad_revoc_state(self): @@ -866,7 +924,9 @@ async def test_create_pres_bad_revoc_state(self): ) self.holder.create_presentation = async_mock.CoroutineMock(return_value="{}") self.holder.create_revocation_state = async_mock.CoroutineMock( - side_effect=test_module.IndyHolderError("Problem", {"message": "Nope"}) + side_effect=test_indy_util_module.IndyHolderError( + "Problem", {"message": "Nope"} + ) ) self.profile.context.injector.bind_instance(IndyHolder, self.holder) @@ -878,22 +938,20 @@ async def test_create_pres_bad_revoc_state(self): with async_mock.patch.object( V20PresExRecord, "save", autospec=True ) as save_ex, async_mock.patch.object( - test_module, "AttachDecorator", autospec=True + test_indy_handler, "AttachDecorator", autospec=True ) as mock_attach_decorator, async_mock.patch.object( - test_module, "RevocationRegistry", autospec=True - ) as mock_rr: + test_indy_util_module, "RevocationRegistry", autospec=True + ) as mock_rr, async_mock.patch.object( + test_indy_util_module.LOGGER, "error", async_mock.MagicMock() + ) as mock_log_error: mock_rr.from_definition = async_mock.MagicMock(return_value=more_magic_rr) mock_attach_decorator.data_base64 = async_mock.MagicMock( return_value=mock_attach_decorator ) - - req_creds = await indy_proof_req_preview2indy_requested_creds( - INDY_PROOF_REQ_NAME, preview=None, holder=self.holder - ) - - with self.assertRaises(test_module.IndyHolderError): - await self.manager.create_pres(px_rec_in, req_creds) + request_data = {} + with self.assertRaises(test_indy_util_module.IndyHolderError): + await self.manager.create_pres(px_rec_in, request_data) async def test_create_pres_multi_matching_proposal_creds_names(self): pres_request = V20PresRequest( @@ -970,9 +1028,9 @@ async def test_create_pres_multi_matching_proposal_creds_names(self): with async_mock.patch.object( V20PresExRecord, "save", autospec=True ) as save_ex, async_mock.patch.object( - test_module, "AttachDecorator", autospec=True + test_indy_handler, "AttachDecorator", autospec=True ) as mock_attach_decorator, async_mock.patch.object( - test_module, "RevocationRegistry", autospec=True + test_indy_util_module, "RevocationRegistry", autospec=True ) as mock_rr: mock_rr.from_definition = async_mock.MagicMock(return_value=more_magic_rr) @@ -986,9 +1044,9 @@ async def test_create_pres_multi_matching_proposal_creds_names(self): assert not req_creds["self_attested_attributes"] assert len(req_creds["requested_attributes"]) == 1 assert len(req_creds["requested_predicates"]) == 1 - + request_data = {"indy": req_creds} (px_rec_out, pres_msg) = await self.manager.create_pres( - px_rec_in, req_creds + px_rec_in, request_data ) save_ex.assert_called_once() assert px_rec_out.state == V20PresExRecord.STATE_PRESENTATION_SENT @@ -1029,6 +1087,44 @@ async def test_no_matching_creds_for_proof_req(self): ) ) self.holder.get_credentials_for_presentation_request_by_referent = get_creds + await indy_proof_req_preview2indy_requested_creds( + INDY_PROOF_REQ_NAMES, preview=None, holder=self.holder + ) + + async def test_no_matching_creds_indy_handler(self): + pres_request = V20PresRequest( + formats=[ + V20PresFormat( + attach_id="indy", + format_=ATTACHMENT_FORMAT[PRES_20_REQUEST][ + V20PresFormat.Format.INDY.api + ], + ) + ], + request_presentations_attach=[ + AttachDecorator.data_base64(INDY_PROOF_REQ_NAMES, ident="indy") + ], + ) + px_rec_in = V20PresExRecord(pres_request=pres_request.serialize()) + get_creds = async_mock.CoroutineMock(return_value=()) + self.holder.get_credentials_for_presentation_request_by_referent = get_creds + + with async_mock.patch.object( + V20PresExRecord, "save", autospec=True + ) as save_ex, async_mock.patch.object( + test_indy_handler, "AttachDecorator", autospec=True + ) as mock_attach_decorator: + mock_attach_decorator.data_base64 = async_mock.MagicMock( + return_value=mock_attach_decorator + ) + request_data = {} + with self.assertRaises( + test_indy_handler.V20PresFormatHandlerError + ) as context: + (px_rec_out, pres_msg) = await self.manager.create_pres( + px_rec_in, request_data + ) + assert "No matching Indy" in str(context.exception) async def test_receive_pres(self): connection_record = async_mock.MagicMock(connection_id=CONN_ID) @@ -1228,7 +1324,7 @@ async def test_receive_pres_bait_and_switch_attr_name(self): V20PresExRecord, "retrieve_by_tag_filter", autospec=True ) as retrieve_ex: retrieve_ex.return_value = px_rec_dummy - with self.assertRaises(V20PresManagerError) as context: + with self.assertRaises(V20PresFormatHandlerError) as context: await self.manager.receive_pres(pres_x, connection_record) assert "does not satisfy proof request restrictions" in str( context.exception @@ -1284,7 +1380,7 @@ async def test_receive_pres_bait_and_switch_attr_name(self): V20PresExRecord, "retrieve_by_tag_filter", autospec=True ) as retrieve_ex: retrieve_ex.return_value = px_rec_dummy - with self.assertRaises(V20PresManagerError) as context: + with self.assertRaises(V20PresFormatHandlerError) as context: await self.manager.receive_pres(pres_x, connection_record) assert "Presentation referent" in str(context.exception) @@ -1343,7 +1439,7 @@ async def test_receive_pres_bait_and_switch_attr_names(self): V20PresExRecord, "retrieve_by_tag_filter", autospec=True ) as retrieve_ex: retrieve_ex.return_value = px_rec_dummy - with self.assertRaises(V20PresManagerError) as context: + with self.assertRaises(V20PresFormatHandlerError) as context: await self.manager.receive_pres(pres_x, connection_record) assert "does not satisfy proof request restrictions " in str( context.exception @@ -1399,7 +1495,7 @@ async def test_receive_pres_bait_and_switch_attr_names(self): V20PresExRecord, "retrieve_by_tag_filter", autospec=True ) as retrieve_ex: retrieve_ex.return_value = px_rec_dummy - with self.assertRaises(V20PresManagerError) as context: + with self.assertRaises(V20PresFormatHandlerError) as context: await self.manager.receive_pres(pres_x, connection_record) assert "Presentation referent" in str(context.exception) @@ -1456,7 +1552,7 @@ async def test_receive_pres_bait_and_switch_pred(self): V20PresExRecord, "retrieve_by_tag_filter", autospec=True ) as retrieve_ex: retrieve_ex.return_value = px_rec_dummy - with self.assertRaises(V20PresManagerError) as context: + with self.assertRaises(V20PresFormatHandlerError) as context: await self.manager.receive_pres(pres_x, connection_record) assert "not in proposal request" in str(context.exception) @@ -1514,7 +1610,7 @@ async def test_receive_pres_bait_and_switch_pred(self): V20PresExRecord, "retrieve_by_tag_filter", autospec=True ) as retrieve_ex: retrieve_ex.return_value = px_rec_dummy - with self.assertRaises(V20PresManagerError) as context: + with self.assertRaises(V20PresFormatHandlerError) as context: await self.manager.receive_pres(pres_x, connection_record) assert "shenanigans not in presentation" in str(context.exception) @@ -1572,7 +1668,7 @@ async def test_receive_pres_bait_and_switch_pred(self): V20PresExRecord, "retrieve_by_tag_filter", autospec=True ) as retrieve_ex: retrieve_ex.return_value = px_rec_dummy - with self.assertRaises(V20PresManagerError) as context: + with self.assertRaises(V20PresFormatHandlerError) as context: await self.manager.receive_pres(pres_x, connection_record) assert "highScore mismatches proposal request" in str(context.exception) @@ -1630,7 +1726,7 @@ async def test_receive_pres_bait_and_switch_pred(self): V20PresExRecord, "retrieve_by_tag_filter", autospec=True ) as retrieve_ex: retrieve_ex.return_value = px_rec_dummy - with self.assertRaises(V20PresManagerError) as context: + with self.assertRaises(V20PresFormatHandlerError) as context: await self.manager.receive_pres(pres_x, connection_record) assert "does not satisfy proof request restrictions " in str( context.exception @@ -1663,8 +1759,8 @@ async def test_verify_pres(self): ], ) px_rec_in = V20PresExRecord( - pres_request=pres_request.serialize(), - pres=pres.serialize(), + pres_request=pres_request, + pres=pres, ) with async_mock.patch.object(V20PresExRecord, "save", autospec=True) as save_ex: diff --git a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes.py b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes.py index c6f8368273..9f43e3c573 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes.py @@ -1,20 +1,21 @@ from copy import deepcopy -from time import time - from asynctest import TestCase as AsyncTestCase from asynctest import mock as async_mock from marshmallow import ValidationError +from time import time from .....admin.request_context import AdminRequestContext -from .....core.in_memory import InMemoryProfile from .....indy.holder import IndyHolder -from .....indy.sdk.models.proof_request import IndyProofReqAttrSpecSchema +from .....indy.models.proof_request import IndyProofReqAttrSpecSchema from .....indy.verifier import IndyVerifier from .....ledger.base import BaseLedger from .....storage.error import StorageNotFoundError +from .....storage.vc_holder.base import VCHolder +from .....storage.vc_holder.vc_record import VCRecord from .. import routes as test_module from ..messages.pres_format import V20PresFormat +from ..models.pres_exchange import V20PresExRecord ISSUER_DID = "NcYxiDXkpYi6ov5FcYDi1e" S_ID = f"{ISSUER_DID}:2:vidya:1.0" @@ -52,6 +53,73 @@ }, } +DIF_PROOF_REQ = { + "options": { + "challenge": "3fa85f64-5717-4562-b3fc-2c963f66afa7", + "domain": "4jt78h47fh47", + }, + "presentation_definition": { + "id": "32f54163-7166-48f1-93d8-ff217bdb0654", + "submission_requirements": [ + { + "name": "Citizenship Information", + "rule": "pick", + "min": 1, + "from": "A", + } + ], + "input_descriptors": [ + { + "id": "citizenship_input_1", + "name": "EU Driver's License", + "group": ["A"], + "schema": [ + {"uri": "https://www.w3.org/2018/credentials#VerifiableCredential"}, + {"uri": "https://w3id.org/citizenship#PermanentResidentCard"}, + ], + "constraints": { + "limit_disclosure": "required", + "fields": [ + { + "path": ["$.credentialSubject.givenName"], + "purpose": "The claim must be from one of the specified issuers", + "filter": { + "type": "string", + "enum": ["JOHN", "CAI"], + }, + } + ], + }, + } + ], + }, +} + + +DIF_PRES_PROPOSAL = { + "input_descriptors": [ + { + "id": "citizenship_input_1", + "name": "EU Driver's License", + "group": ["A"], + "schema": [ + {"uri": "https://www.w3.org/2018/credentials#VerifiableCredential"}, + {"uri": "https://w3id.org/citizenship#PermanentResidentCard"}, + ], + "constraints": { + "limit_disclosure": "required", + "fields": [ + { + "path": ["$.credentialSubject.givenName"], + "purpose": "The claim must be from one of the specified issuers", + "filter": {"type": "string", "enum": ["JOHN", "CAI"]}, + } + ], + }, + } + ] +} + class TestPresentProofRoutes(AsyncTestCase): def setUp(self): @@ -315,6 +383,274 @@ async def test_present_proof_credentials_list_multiple_referents(self): await test_module.present_proof_credentials_list(self.request) mock_response.assert_called_once_with(returned_credentials) + async def test_present_proof_credentials_list_dif(self): + self.request.match_info = { + "pres_ex_id": "123-456-789", + } + self.request.query = {"extra_query": {}} + + returned_credentials = [ + async_mock.MagicMock(cred_value={"name": "Credential1"}), + async_mock.MagicMock(cred_value={"name": "Credential2"}), + ] + self.profile.context.injector.bind_instance( + IndyHolder, + async_mock.MagicMock( + get_credentials_for_presentation_request_by_referent=( + async_mock.CoroutineMock() + ) + ), + ) + self.profile.context.injector.bind_instance( + VCHolder, + async_mock.MagicMock( + search_credentials=async_mock.MagicMock( + return_value=async_mock.MagicMock( + fetch=async_mock.CoroutineMock( + return_value=returned_credentials + ) + ) + ) + ), + ) + record = V20PresExRecord( + state="request-received", + role="prover", + pres_proposal=None, + pres_request={ + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/present-proof/2.0/request-presentation", + "@id": "6ae00c6c-87fa-495a-b546-5f5953817c92", + "comment": "string", + "formats": [ + { + "attach_id": "dif", + "format": "dif/presentation-exchange/definitions@v1.0", + } + ], + "request_presentations~attach": [ + { + "@id": "dif", + "mime-type": "application/json", + "data": {"json": DIF_PROOF_REQ}, + } + ], + "will_confirm": True, + }, + pres=None, + verified=None, + auto_present=False, + error_msg=None, + ) + + with async_mock.patch.object( + test_module, "V20PresExRecord", autospec=True + ) as mock_pres_ex_rec_cls, async_mock.patch.object( + test_module.web, "json_response", async_mock.MagicMock() + ) as mock_response: + mock_pres_ex_rec_cls.retrieve_by_id.return_value = record + + await test_module.present_proof_credentials_list(self.request) + mock_response.assert_called_once_with( + [{"name": "Credential1"}, {"name": "Credential2"}] + ) + + async def test_present_proof_credentials_list_cred_v1_context_schema_uri(self): + self.request.match_info = { + "pres_ex_id": "123-456-789", + } + self.request.query = {"extra_query": {}} + test_pd = deepcopy(DIF_PROOF_REQ) + test_pd["presentation_definition"]["input_descriptors"][0]["schema"].pop(1) + record = V20PresExRecord( + state="request-received", + role="prover", + pres_proposal=None, + pres_request={ + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/present-proof/2.0/request-presentation", + "@id": "6ae00c6c-87fa-495a-b546-5f5953817c92", + "comment": "string", + "formats": [ + { + "attach_id": "dif", + "format": "dif/presentation-exchange/definitions@v1.0", + } + ], + "request_presentations~attach": [ + { + "@id": "dif", + "mime-type": "application/json", + "data": {"json": test_pd}, + } + ], + "will_confirm": True, + }, + pres=None, + verified=None, + auto_present=False, + error_msg=None, + ) + + self.profile.context.injector.bind_instance( + IndyHolder, + async_mock.MagicMock( + get_credentials_for_presentation_request_by_referent=( + async_mock.CoroutineMock() + ) + ), + ) + self.profile.context.injector.bind_instance( + VCHolder, + async_mock.MagicMock(search_credentials=async_mock.CoroutineMock()), + ) + + with async_mock.patch.object( + test_module, "V20PresExRecord", autospec=True + ) as mock_pres_ex_rec_cls, async_mock.patch.object( + test_module.web, "json_response", async_mock.MagicMock() + ) as mock_response: + mock_pres_ex_rec_cls.retrieve_by_id.return_value = record + with self.assertRaises(test_module.web.HTTPBadRequest): + await test_module.present_proof_credentials_list(self.request) + + async def test_present_proof_credentials_list_schema_uri(self): + self.request.match_info = { + "pres_ex_id": "123-456-789", + } + self.request.query = {"extra_query": {}} + test_pd = deepcopy(DIF_PROOF_REQ) + test_pd["presentation_definition"]["input_descriptors"][0]["schema"][0][ + "uri" + ] = "https://example.org/test.json" + test_pd["presentation_definition"]["input_descriptors"][0]["schema"].pop(1) + record = V20PresExRecord( + state="request-received", + role="prover", + pres_proposal=None, + pres_request={ + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/present-proof/2.0/request-presentation", + "@id": "6ae00c6c-87fa-495a-b546-5f5953817c92", + "comment": "string", + "formats": [ + { + "attach_id": "dif", + "format": "dif/presentation-exchange/definitions@v1.0", + } + ], + "request_presentations~attach": [ + { + "@id": "dif", + "mime-type": "application/json", + "data": {"json": test_pd}, + } + ], + "will_confirm": True, + }, + pres=None, + verified=None, + auto_present=False, + error_msg=None, + ) + + returned_credentials = [ + async_mock.MagicMock(cred_value={"name": "Credential1"}), + async_mock.MagicMock(cred_value={"name": "Credential2"}), + ] + self.profile.context.injector.bind_instance( + IndyHolder, + async_mock.MagicMock( + get_credentials_for_presentation_request_by_referent=( + async_mock.CoroutineMock() + ) + ), + ) + self.profile.context.injector.bind_instance( + VCHolder, + async_mock.MagicMock( + search_credentials=async_mock.MagicMock( + return_value=async_mock.MagicMock( + fetch=async_mock.CoroutineMock( + return_value=returned_credentials + ) + ) + ) + ), + ) + + with async_mock.patch.object( + test_module, "V20PresExRecord", autospec=True + ) as mock_pres_ex_rec_cls, async_mock.patch.object( + test_module.web, "json_response", async_mock.MagicMock() + ) as mock_response: + mock_pres_ex_rec_cls.retrieve_by_id.return_value = record + await test_module.present_proof_credentials_list(self.request) + mock_response.assert_called_once_with( + [{"name": "Credential1"}, {"name": "Credential2"}] + ) + + async def test_present_proof_credentials_list_dif_error(self): + self.request.match_info = { + "pres_ex_id": "123-456-789", + } + self.request.query = {"extra_query": {}} + + self.profile.context.injector.bind_instance( + IndyHolder, + async_mock.MagicMock( + get_credentials_for_presentation_request_by_referent=( + async_mock.CoroutineMock() + ) + ), + ) + self.profile.context.injector.bind_instance( + VCHolder, + async_mock.MagicMock( + search_credentials=async_mock.MagicMock( + return_value=async_mock.MagicMock( + fetch=async_mock.CoroutineMock( + side_effect=test_module.StorageNotFoundError() + ) + ) + ) + ), + ) + record = V20PresExRecord( + state="request-received", + role="prover", + pres_proposal=None, + pres_request={ + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/present-proof/2.0/request-presentation", + "@id": "6ae00c6c-87fa-495a-b546-5f5953817c92", + "comment": "string", + "formats": [ + { + "attach_id": "dif", + "format": "dif/presentation-exchange/definitions@v1.0", + } + ], + "request_presentations~attach": [ + { + "@id": "dif", + "mime-type": "application/json", + "data": {"json": DIF_PROOF_REQ}, + } + ], + "will_confirm": True, + }, + pres=None, + verified=None, + auto_present=False, + error_msg=None, + ) + + with async_mock.patch.object( + test_module, "V20PresExRecord", autospec=True + ) as mock_pres_ex_rec_cls, async_mock.patch.object( + test_module.web, "json_response", async_mock.MagicMock() + ) as mock_response: + with self.assertRaises(test_module.web.HTTPBadRequest): + mock_pres_ex_rec_cls.retrieve_by_id.return_value = record + await test_module.present_proof_credentials_list(self.request) + async def test_present_proof_retrieve(self): self.request.match_info = {"pres_ex_id": "dummy"} @@ -924,6 +1260,131 @@ async def test_present_proof_send_presentation(self): mock_px_rec_inst.serialize.return_value ) + async def test_present_proof_send_presentation_dif(self): + proof_req = deepcopy(DIF_PROOF_REQ) + proof_req["issuer_id"] = "test123" + self.request.json = async_mock.CoroutineMock( + return_value={ + "dif": proof_req, + } + ) + self.request.match_info = { + "pres_ex_id": "dummy", + } + self.profile.context.injector.bind_instance( + IndyVerifier, + async_mock.MagicMock( + verify_presentation=async_mock.CoroutineMock(), + ), + ) + + with async_mock.patch.object( + test_module, "ConnRecord", autospec=True + ) as mock_conn_rec_cls, async_mock.patch.object( + test_module, "V20PresManager", autospec=True + ) as mock_pres_mgr_cls, async_mock.patch.object( + test_module, "V20PresExRecord", autospec=True + ) as mock_px_rec_cls, async_mock.patch.object( + test_module.web, "json_response" + ) as mock_response: + mock_px_rec_inst = async_mock.MagicMock( + connection_id="dummy", + state=test_module.V20PresExRecord.STATE_REQUEST_RECEIVED, + serialize=async_mock.MagicMock( + return_value={"thread_id": "sample-thread-id"} + ), + ) + mock_px_rec_cls.retrieve_by_id = async_mock.CoroutineMock( + return_value=mock_px_rec_inst + ) + + mock_conn_rec_inst = async_mock.MagicMock(is_ready=True) + mock_conn_rec_cls.retrieve_by_id = async_mock.CoroutineMock( + return_value=mock_conn_rec_inst + ) + + mock_pres_mgr_inst = async_mock.MagicMock( + create_pres=async_mock.CoroutineMock( + return_value=(mock_px_rec_inst, async_mock.MagicMock()) + ) + ) + mock_pres_mgr_cls.return_value = mock_pres_mgr_inst + + await test_module.present_proof_send_presentation(self.request) + mock_response.assert_called_once_with( + mock_px_rec_inst.serialize.return_value + ) + + async def test_present_proof_send_presentation_dif_error(self): + self.request.json = async_mock.CoroutineMock( + return_value={"dif": DIF_PROOF_REQ} + ) + self.request.match_info = { + "pres_ex_id": "dummy", + } + px_rec_instance = V20PresExRecord( + state="request-received", + role="prover", + pres_proposal=None, + pres_request={ + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/present-proof/2.0/request-presentation", + "@id": "6ae00c6c-87fa-495a-b546-5f5953817c92", + "comment": "string", + "formats": [ + { + "attach_id": "dif", + "format": "dif/presentation-exchange/definitions@v1.0", + } + ], + "request_presentations~attach": [ + { + "@id": "dif", + "mime-type": "application/json", + "data": {"json": DIF_PROOF_REQ}, + } + ], + "will_confirm": True, + }, + pres=None, + verified=None, + auto_present=False, + error_msg=None, + ) + self.profile.context.injector.bind_instance( + IndyVerifier, + async_mock.MagicMock( + verify_presentation=async_mock.CoroutineMock(), + ), + ) + + with async_mock.patch.object( + test_module, "ConnRecord", autospec=True + ) as mock_conn_rec_cls, async_mock.patch.object( + test_module, "V20PresManager", autospec=True + ) as mock_pres_mgr_cls, async_mock.patch.object( + test_module, "V20PresExRecord", autospec=True + ) as mock_px_rec_cls, async_mock.patch.object( + test_module.web, "json_response" + ) as mock_response: + mock_px_rec_cls.retrieve_by_id = async_mock.CoroutineMock( + return_value=px_rec_instance + ) + + mock_conn_rec_inst = async_mock.MagicMock(is_ready=True) + mock_conn_rec_cls.retrieve_by_id = async_mock.CoroutineMock( + return_value=mock_conn_rec_inst + ) + + mock_pres_mgr_inst = async_mock.MagicMock( + create_pres=async_mock.CoroutineMock( + side_effect=test_module.LedgerError() + ) + ) + mock_pres_mgr_cls.return_value = mock_pres_mgr_inst + with self.assertRaises(test_module.web.HTTPBadRequest): + await test_module.present_proof_send_presentation(self.request) + mock_response.assert_called_once_with(px_rec_instance.serialize()) + async def test_present_proof_send_presentation_px_rec_not_found(self): self.request.json = async_mock.CoroutineMock( return_value={ @@ -1425,3 +1886,70 @@ async def test_post_process_routes(self): mock_app = async_mock.MagicMock(_state={"swagger_dict": {}}) test_module.post_process_routes(mock_app) assert "tags" in mock_app._state["swagger_dict"] + + def test_format_attach_dif(self): + req_dict = {"dif": DIF_PROOF_REQ} + pres_req_dict = test_module._formats_attach( + by_format=req_dict, + msg_type="present-proof/2.0/request-presentation", + spec="request_presentations", + ) + assert pres_req_dict.get("formats")[0].attach_id == "dif" + assert ( + pres_req_dict.get("request_presentations_attach")[0].data.json_ + == DIF_PROOF_REQ + ) + + async def test_process_vcrecords_return_list(self): + cred_list = [ + VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=[ + "did:sov:LjgpST2rjsoxYegQDRm7EL", + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + ], + proof_types=["BbsBlsSignature2020"], + schema_ids=["https://example.org/examples/degree.json"], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + record_id="test1", + ), + VCRecord( + contexts=[ + "https://www.w3.org/2018/credentials/v1", + "https://www.w3.org/2018/credentials/examples/v1", + ], + expanded_types=[ + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", + ], + issuer_id="https://example.edu/issuers/565049", + subject_ids=[ + "did:sov:LjgpST2rjsoxYegQDRm7EL", + "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL", + ], + proof_types=["BbsBlsSignature2020"], + schema_ids=["https://example.org/examples/degree.json"], + cred_value={"...": "..."}, + given_id="http://example.edu/credentials/3732", + cred_tags={"some": "tag"}, + record_id="test2", + ), + ] + record_ids = {"test1"} + ( + returned_cred_list, + returned_record_ids, + ) = await test_module.process_vcrecords_return_list(cred_list, record_ids) + assert len(returned_cred_list) == 1 + assert len(returned_record_ids) == 2 + assert returned_cred_list[0].record_id == "test2" diff --git a/aries_cloudagent/revocation/models/issuer_rev_reg_record.py b/aries_cloudagent/revocation/models/issuer_rev_reg_record.py index 6c0e438fc9..cd66c3b412 100644 --- a/aries_cloudagent/revocation/models/issuer_rev_reg_record.py +++ b/aries_cloudagent/revocation/models/issuer_rev_reg_record.py @@ -16,7 +16,7 @@ from ...core.profile import Profile, ProfileSession from ...indy.util import indy_client_dir from ...indy.issuer import IndyIssuer, IndyIssuerError -from ...indy.sdk.models.revocation import ( +from ...indy.models.revocation import ( IndyRevRegDef, IndyRevRegDefSchema, IndyRevRegEntry, diff --git a/aries_cloudagent/revocation/models/tests/test_issuer_rev_reg_record.py b/aries_cloudagent/revocation/models/tests/test_issuer_rev_reg_record.py index d79864793f..5feeb8e21b 100644 --- a/aries_cloudagent/revocation/models/tests/test_issuer_rev_reg_record.py +++ b/aries_cloudagent/revocation/models/tests/test_issuer_rev_reg_record.py @@ -6,7 +6,7 @@ from ....core.in_memory import InMemoryProfile from ....indy.issuer import IndyIssuer, IndyIssuerError -from ....indy.sdk.models.revocation import IndyRevRegDef +from ....indy.models.revocation import IndyRevRegDef from ....indy.util import indy_client_dir from ....ledger.base import BaseLedger from ....tails.base import BaseTailsServer diff --git a/aries_cloudagent/storage/vc_holder/tests/test_in_memory_vc_holder.py b/aries_cloudagent/storage/vc_holder/tests/test_in_memory_vc_holder.py index 685c3cfbe3..84eed350b8 100644 --- a/aries_cloudagent/storage/vc_holder/tests/test_in_memory_vc_holder.py +++ b/aries_cloudagent/storage/vc_holder/tests/test_in_memory_vc_holder.py @@ -11,7 +11,7 @@ VC_CONTEXT = "https://www.w3.org/2018/credentials/v1" -VC_TYPE = "https://www.w3.org/2018/credentials/v1/VerifiableCredential" +VC_TYPE = "https://www.w3.org/2018/credentials#VerifiableCredential" VC_SUBJECT_ID = "did:example:ebfeb1f712ebc6f1c276e12ec21" VC_PROOF_TYPE = "Ed25519Signature2018" VC_ISSUER_ID = "https://example.edu/issuers/14" @@ -31,9 +31,9 @@ def test_record() -> VCRecord: VC_CONTEXT, "https://www.w3.org/2018/credentials/examples/v1", ], - types=[ + expanded_types=[ VC_TYPE, - "https://www.w3.org/2018/credentials/examples/v1/UniversityDegreeCredential", + "https://example.org/examples#UniversityDegreeCredential", ], schema_ids=[VC_SCHEMA_ID], issuer_id=VC_ISSUER_ID, diff --git a/aries_cloudagent/storage/vc_holder/tests/test_vc_record.py b/aries_cloudagent/storage/vc_holder/tests/test_vc_record.py index 8756d8f7f9..3682689255 100644 --- a/aries_cloudagent/storage/vc_holder/tests/test_vc_record.py +++ b/aries_cloudagent/storage/vc_holder/tests/test_vc_record.py @@ -108,8 +108,8 @@ "https://www.w3.org/2018/credentials/examples/v1", ] TYPES = [ - "https://www.w3.org/2018/credentials/v1/VerifiableCredential", - "https://www.w3.org/2018/credentials/examples/v1/UniversityDegreeCredential", + "https://www.w3.org/2018/credentials#VerifiableCredential", + "https://example.org/examples#UniversityDegreeCredential", ] ISSUER_ID = "https://example.edu/issuers/14" SUBJECT_IDS = ["did:example:ebfeb1f712ebc6f1c276e12ec21"] @@ -123,7 +123,7 @@ def test_record() -> VCRecord: return VCRecord( contexts=CONTEXTS, - types=TYPES, + expanded_types=TYPES, schema_ids=SCHEMA_IDS, issuer_id=ISSUER_ID, subject_ids=SUBJECT_IDS, @@ -139,7 +139,7 @@ def test_create(self): record = test_record() assert record.contexts == set(CONTEXTS) - assert record.types == set(TYPES) + assert record.expanded_types == set(TYPES) assert record.schema_ids == set(SCHEMA_IDS) assert record.subject_ids == set(SUBJECT_IDS) assert record.proof_types == set(PROOF_TYPES) diff --git a/aries_cloudagent/storage/vc_holder/vc_record.py b/aries_cloudagent/storage/vc_holder/vc_record.py index c5ea954525..e3c0e739c7 100644 --- a/aries_cloudagent/storage/vc_holder/vc_record.py +++ b/aries_cloudagent/storage/vc_holder/vc_record.py @@ -25,7 +25,7 @@ def __init__( self, *, contexts: Sequence[str], # context is required by spec - types: Sequence[str], # type is required by spec + expanded_types: Sequence[str], # expanded type from contexts and types issuer_id: str, # issuer ID is required by spec subject_ids: Sequence[str], # one or more subject IDs may be present schema_ids: Sequence[str], # one or more credential schema IDs may be present @@ -38,7 +38,7 @@ def __init__( """Initialize some defaults on record.""" super().__init__() self.contexts = set(contexts) if contexts else set() - self.types = set(types) if types else set() + self.expanded_types = set(expanded_types) if expanded_types else set() self.schema_ids = set(schema_ids) if schema_ids else set() self.issuer_id = issuer_id self.subject_ids = set(subject_ids) if subject_ids else set() @@ -73,7 +73,7 @@ def __eq__(self, other: object) -> bool: return False return ( other.contexts == self.contexts - and other.types == self.types + and other.expanded_types == self.expanded_types and other.subject_ids == self.subject_ids and other.schema_ids == self.schema_ids and other.issuer_id == self.issuer_id @@ -95,10 +95,10 @@ class Meta: unknown = EXCLUDE contexts = fields.List(fields.Str(description="Context", **ENDPOINT)) - types = fields.List( + expanded_types = fields.List( fields.Str( - description="Type", - example="VerifiableCredential", + description="JSON-LD expanded type extracted from type and context", + example="https://w3id.org/citizenship#PermanentResidentCard", ), ) schema_ids = fields.List( diff --git a/aries_cloudagent/storage/vc_holder/xform.py b/aries_cloudagent/storage/vc_holder/xform.py index a0cb678fdd..1254fd1e83 100644 --- a/aries_cloudagent/storage/vc_holder/xform.py +++ b/aries_cloudagent/storage/vc_holder/xform.py @@ -38,7 +38,7 @@ def storage_to_vc_record(record: StorageRecord) -> VCRecord: cred_tags[tagname] = tagval return VCRecord( contexts=contexts, - types=types, + expanded_types=types, schema_ids=schema_ids, issuer_id=issuer_id, subject_ids=subject_ids, @@ -55,7 +55,7 @@ def vc_to_storage_record(cred: VCRecord) -> StorageRecord: tags = {} for ctx_val in cred.contexts: tags[f"ctxt:{ctx_val}"] = "1" - for type_val in cred.types: + for type_val in cred.expanded_types: tags[f"type:{type_val}"] = "1" for schema_val in cred.schema_ids: tags[f"schm:{schema_val}"] = "1" diff --git a/aries_cloudagent/vc/ld_proofs/constants.py b/aries_cloudagent/vc/ld_proofs/constants.py index 93613b7a16..bac2bc2bef 100644 --- a/aries_cloudagent/vc/ld_proofs/constants.py +++ b/aries_cloudagent/vc/ld_proofs/constants.py @@ -14,3 +14,7 @@ VERIFIABLE_CREDENTIAL_TYPE = "VerifiableCredential" VERIFIABLE_PRESENTATION_TYPE = "VerifiableCredential" + +EXPANDED_TYPE_CREDENTIALS_CONTEXT_V1_VC_TYPE = ( + "https://www.w3.org/2018/credentials#VerifiableCredential" +) diff --git a/aries_cloudagent/vc/ld_proofs/suites/BbsBlsSignatureProof2020.py b/aries_cloudagent/vc/ld_proofs/suites/BbsBlsSignatureProof2020.py index 277b6fdd9c..3b00e8ed5f 100644 --- a/aries_cloudagent/vc/ld_proofs/suites/BbsBlsSignatureProof2020.py +++ b/aries_cloudagent/vc/ld_proofs/suites/BbsBlsSignatureProof2020.py @@ -1,5 +1,7 @@ """BbsBlsSignatureProof2020 class.""" +import re + from os import urandom from pyld import jsonld from typing import List @@ -326,21 +328,11 @@ def _transform_blank_node_ids_into_placeholder_node_ids( List[str]: List of transformed output statements """ - transformed_statements = [] - - for statement in statements: - if "_:c14n" in statement: - prefix_index = statement.index("_:c14n") - space_index = statement.index(" ", prefix_index) - - statement = statement.replace( - statement[prefix_index:space_index], - "".format( - ident=statement[prefix_index:space_index] - ), - ) - - transformed_statements.append(statement) + # replace all occurrences of _:c14nX with + transformed_statements = [ + re.sub(r"(_:c14n[0-9]+)", r"", statement) + for statement in statements + ] return transformed_statements @@ -359,24 +351,11 @@ def _transform_placeholder_node_ids_into_blank_node_ids( List[str]: List of transformed output statements """ - transformed_statements = [] - - prefix_string = "", prefix_index) - - urn_id_close = closing_index + 1 # > - urn_id_prefix_end = prefix_index + len(prefix_string) # with _:c14nX + transformed_statements = [ + re.sub(r"", r"\1", statement) + for statement in statements + ] return transformed_statements diff --git a/aries_cloudagent/vc/vc_ld/tests/test_vc_ld.py b/aries_cloudagent/vc/vc_ld/tests/test_vc_ld.py index 8c886efb41..24b370d260 100644 --- a/aries_cloudagent/vc/vc_ld/tests/test_vc_ld.py +++ b/aries_cloudagent/vc/vc_ld/tests/test_vc_ld.py @@ -208,6 +208,30 @@ async def test_create_presentation_x_invalid_credential_structures(self): presentation_id="https://presentation_id.com", ) + async def test_sign_presentation_bbsbls(self): + unsigned_presentation = await create_presentation( + credentials=[CREDENTIAL_ISSUED] + ) + + suite = BbsBlsSignature2020( + verification_method=self.bls12381g2_verification_method, + key_pair=WalletKeyPair( + wallet=self.wallet, + key_type=KeyType.BLS12381G2, + public_key_base58=self.bls12381g2_key_info.verkey, + ), + date=datetime.strptime("2020-12-11T03:50:55Z", "%Y-%m-%dT%H:%M:%SZ"), + ) + + assert unsigned_presentation == PRESENTATION_UNSIGNED + unsigned_presentation["@context"].append("https://w3id.org/security/bbs/v1") + presentation = await sign_presentation( + presentation=unsigned_presentation, + suite=suite, + document_loader=custom_document_loader, + challenge=self.presentation_challenge, + ) + async def test_verify_presentation(self): suite = Ed25519Signature2018( key_pair=WalletKeyPair(wallet=self.wallet, key_type=KeyType.ED25519), diff --git a/requirements.txt b/requirements.txt index 36611ca552..cbf72f9671 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,3 +17,7 @@ pyyaml~=5.4.0 ConfigArgParse~=1.2.3 pyjwt~=1.7.1 pydid~=0.3.0 +jsonpath_ng==1.5.2 +pytz~=2021.1 +python-dateutil~=2.8.1 +unflatten~=0.1 \ No newline at end of file