diff --git a/.github/actions/build-napi/action.yml b/.github/actions/build-napi/action.yml index c8ba0a4370..eb6ded59fb 100644 --- a/.github/actions/build-napi/action.yml +++ b/.github/actions/build-napi/action.yml @@ -12,7 +12,7 @@ inputs: node-version: default: 18 rust-version: - default: 1.65.0 + default: 1.70.0 runs: using: "composite" diff --git a/.github/actions/derive-cache-info/action.yml b/.github/actions/derive-cache-info/action.yml index 7c56db06a7..606087e601 100644 --- a/.github/actions/derive-cache-info/action.yml +++ b/.github/actions/derive-cache-info/action.yml @@ -23,7 +23,7 @@ runs: - id: main shell: bash run: | - source ./ci/util.sh + source ./.github/ci/util.sh DOCKER_IMG=${{ inputs.docker-img }} CACHE_KEY=`as_docker_cache_key $DOCKER_IMG` CACHE_DIR="/tmp/cachedir-$CACHE_KEY" diff --git a/.github/actions/publish-android/action.yml b/.github/actions/publish-android/action.yml deleted file mode 100644 index 2efc8b0ac0..0000000000 --- a/.github/actions/publish-android/action.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: 'publish-android' -description: "Build andorid .aar and publish it to /tmp/artifacts" - -inputs: - abis: - description: 'A space separated list of ABIs to create a AAR for' - default: 'armv7 arm64 x86 x86_64' - docker-img-name: - description: 'Name of the android image' - required: true - full-version-name: - description: 'Name of the resulting .aar' - required: true - -runs: - using: "composite" - steps: - - name: Build, run android wrapper tests, and publish artifacts - run: | - set -x - sudo rm -rf "/usr/local/share/boost" "/usr/local/lib/android" "/usr/share/dotnet" - docker run --name publish-android-wrapper \ - -e ABIS="${{ inputs.abis }}" \ - -e FULL_VERSION_NAME="${{ inputs.full-version-name }}" \ - ${{ inputs.docker-img-name }} \ - bash -c '(cd /home/indy/aries-vcx && ./wrappers/java/ci/android.build.sh $ABIS)' - mkdir -p /tmp/artifacts/aar - docker cp publish-android-wrapper:/home/indy/artifacts/aar /tmp/artifacts - docker rm publish-android-wrapper > /dev/null - shell: bash diff --git a/.github/actions/setup-codecov-rust/action.yml b/.github/actions/setup-codecov-rust/action.yml index fee5adaacb..1808bbec8b 100644 --- a/.github/actions/setup-codecov-rust/action.yml +++ b/.github/actions/setup-codecov-rust/action.yml @@ -29,4 +29,4 @@ runs: docker run --rm -d --name mysql --network host -e MYSQL_ROOT_PASSWORD=mysecretpassword mysql:5.7.35 docker run --rm -d --name indypool --network host ${{ env.DOCKER_IMAGE_POOL }} sleep 5 - docker-compose -f ./ci/agency/docker-compose.yml up -d \ No newline at end of file + docker-compose -f ./.github/ci/agency/docker-compose.yml up -d \ No newline at end of file diff --git a/.github/actions/setup-testing-nodejs/action.yml b/.github/actions/setup-testing-nodejs/action.yml index e924acc7c1..40a8512802 100644 --- a/.github/actions/setup-testing-nodejs/action.yml +++ b/.github/actions/setup-testing-nodejs/action.yml @@ -40,7 +40,7 @@ runs: docker run --rm -d --name mysql --network host -e MYSQL_ROOT_PASSWORD=mysecretpassword mysql:5.7.35 docker run --rm -d --name indypool --network host ${{ env.DOCKER_IMAGE_POOL }} sleep 5 - docker-compose -f ./ci/agency/docker-compose.yml up -d + docker-compose -f ./.github/ci/agency/docker-compose.yml up -d - name: "Build binding module" shell: bash run: (cd wrappers/vcx-napi-rs && npm install && npm run build:napi:debug) diff --git a/ci/agency/docker-compose.yml b/.github/ci/agency/docker-compose.yml similarity index 100% rename from ci/agency/docker-compose.yml rename to .github/ci/agency/docker-compose.yml diff --git a/ci/agency/localhost.env b/.github/ci/agency/localhost.env similarity index 100% rename from ci/agency/localhost.env rename to .github/ci/agency/localhost.env diff --git a/ci/util.sh b/.github/ci/util.sh similarity index 100% rename from ci/util.sh rename to .github/ci/util.sh diff --git a/ci/vdrproxy.dockerfile b/.github/ci/vdrproxy.dockerfile similarity index 95% rename from ci/vdrproxy.dockerfile rename to .github/ci/vdrproxy.dockerfile index 96c5e78692..95c2b3da47 100644 --- a/ci/vdrproxy.dockerfile +++ b/.github/ci/vdrproxy.dockerfile @@ -18,7 +18,7 @@ RUN apk update && apk upgrade && \ USER indy WORKDIR /home/indy -ARG RUST_VER="1.65.0" +ARG RUST_VER="1.70.0" RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain $RUST_VER --default-host x86_64-unknown-linux-musl ENV PATH="/home/indy/.cargo/bin:$PATH" @@ -32,7 +32,7 @@ RUN apk update && apk upgrade && \ USER indy RUN git clone https://github.com/hyperledger/indy-vdr.git WORKDIR /home/indy/indy-vdr/indy-vdr-proxy -RUN git checkout 32f44489 +RUN git checkout c143268 RUN cargo build --release FROM alpine:3.18 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 530403d7b6..ea81f7f8de 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -28,11 +28,12 @@ jobs: uses: actions/checkout@v3 - uses: actions-rs/toolchain@v1 with: - toolchain: nightly + toolchain: nightly-2023-05-08 components: rustfmt + override: true - name: "Verify code formatting" run: | - cargo +nightly fmt --check + cargo +nightly-2023-05-08 fmt --check workflow-setup: runs-on: ubuntu-20.04 @@ -64,8 +65,8 @@ jobs: run: | set -x - HASH_DOCKERFILE_LIBVCX=${{ hashFiles('ci/libvcx.dockerfile') }} - HASH_DOCKERFILE_VDRPROXY=${{ hashFiles('ci/vdrproxy.dockerfile') }} + HASH_DOCKERFILE_LIBVCX=${{ hashFiles('.github/ci/libvcx.dockerfile') }} + HASH_DOCKERFILE_VDRPROXY=${{ hashFiles('.github/ci/vdrproxy.dockerfile') }} HASH_SRC_LIBVDRTOOLS=${{ hashFiles('libvdrtools') }} HASH_SRC_LIBVCX=${{ hashFiles('libvcx') }} HASH_SRC_ARIESVCX=${{ hashFiles('aries_vcx') }} @@ -73,7 +74,6 @@ jobs: HASH_SRC_AGENCYCLIENT=${{ hashFiles('agency_client') }} HASH_SRC_DIDDOC=${{ hashFiles('diddoc') }} HASH_SRC_MESSAGES=${{ hashFiles('messages') }} - HASH_SRC_WRAPPER_JAVA=${{ hashFiles('wrappers/java') }} SEED_HASH_ARIESVCX=${HASH_SRC_LIBVDRTOOLS:0:11}-${HASH_SRC_ARIESVCX_CORE:0:11}-${HASH_SRC_ARIESVCX:0:11}-${HASH_SRC_AGENCYCLIENT:0:11}-${HASH_SRC_DIDDOC:0:11}-${HASH_SRC_MESSAGES:0:11}} HASH_ARIESVCX=$(echo -n "$SEED_HASH_ARIESVCX" | sha256sum | awk '{print $1}') @@ -118,7 +118,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - backend: ["vdrtools", "modular_libs", "vdr_proxy_ledger"] + backend: ["credx,vdrtools_wallet", "vdr_proxy_ledger"] steps: - name: "Git checkout" uses: actions/checkout@v3 @@ -133,38 +133,12 @@ jobs: sudo apt-get install -y libsodium-dev libssl-dev libzmq3-dev - name: "Verify clippy across the entire workspace with default features" run: | - cargo clippy -p aries-vcx --features legacy_proof,migration --features ${{ matrix.backend }} --no-default-features + cargo clippy -p aries-vcx --features legacy_proof --features ${{ matrix.backend }} --no-default-features env: RUSTFLAGS: -D warnings - aries_vcx_core_clippy: - runs-on: ubuntu-20.04 - strategy: - matrix: - backend: ["vdrtools_anoncreds", "modular_libs", "vdr_proxy_ledger"] - steps: - - name: "Git checkout" - uses: actions/checkout@v3 - - uses: actions-rs/toolchain@v1 - with: - toolchain: ${{ env.RUST_TOOLCHAIN_VERSON }} - components: clippy - - name: "Install dependencies" - shell: bash - run: | - sudo apt-get update -y - sudo apt-get install -y libsodium-dev libssl-dev libzmq3-dev - - name: "Verify clippy across the entire workspace with default features" - run: | - cargo clippy -p aries_vcx_core --features legacy_proof,vdrtools_wallet --features ${{ matrix.backend }} - env: - RUSTFLAGS: -D warnings - - libvcx_core_clippy: + aries_vcx_no_features_clippy: runs-on: ubuntu-20.04 - strategy: - matrix: - backend: ["anoncreds_credx", "anoncreds_vdrtools"] steps: - name: "Git checkout" uses: actions/checkout@v3 @@ -179,15 +153,15 @@ jobs: sudo apt-get install -y libsodium-dev libssl-dev libzmq3-dev - name: "Verify clippy across the entire workspace with default features" run: | - cargo clippy -p libvcx_core --features ${{ matrix.backend }} + cargo clippy -p aries-vcx --no-default-features env: RUSTFLAGS: -D warnings - node_wrapper_clippy: + aries_vcx_core_clippy: runs-on: ubuntu-20.04 strategy: matrix: - backend: ["anoncreds_credx", "anoncreds_vdrtools"] + backend: ["credx,vdrtools_wallet", "vdr_proxy_ledger"] steps: - name: "Git checkout" uses: actions/checkout@v3 @@ -202,7 +176,7 @@ jobs: sudo apt-get install -y libsodium-dev libssl-dev libzmq3-dev - name: "Verify clippy across the entire workspace with default features" run: | - cargo clippy -p vcx-napi-rs --features ${{ matrix.backend }} + cargo clippy -p aries_vcx_core --features legacy_proof,vdrtools_wallet --features ${{ matrix.backend }} env: RUSTFLAGS: -D warnings @@ -232,7 +206,7 @@ jobs: uses: ./.github/actions/build-image with: docker-img: ${{ env.DOCKER_IMG_CACHED }} - dockerfile-path: "ci/vdrproxy.dockerfile" + dockerfile-path: ".github/ci/vdrproxy.dockerfile" build-arg: "ALPINE_CORE_IMAGE=$DOCKER_IMG_CACHED_ALPINE_CORE" branch-name: ${{ env.BRANCH_NAME }} branch-main: ${{ env.MAIN_BRANCH }} @@ -283,7 +257,7 @@ jobs: run: | RUSTFLAGS='-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests' \ RUSTDOCFLAGS='-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests' \ - RUST_TEST_THREADS=1 CARGO_INCREMENTAL=0 TEST_POOL_IP=127.0.0.1 cargo test --package aries-vcx -F 'modular_libs' -- --ignored; + RUST_TEST_THREADS=1 CARGO_INCREMENTAL=0 TEST_POOL_IP=127.0.0.1 cargo test --package aries-vcx -- --ignored; mkdir -p /tmp/artifacts/coverage grcov ./target/debug/ -s . -t lcov --llvm --branch --ignore-not-existing -o /tmp/artifacts/coverage/coverage.lcov @@ -306,12 +280,6 @@ jobs: test-unit-workspace: needs: workflow-setup runs-on: ubuntu-20.04 - strategy: - matrix: - features: [ - "anoncreds_vdrtools", - "anoncreds_credx", - ] steps: - name: "Git checkout" uses: actions/checkout@v3 @@ -321,17 +289,11 @@ jobs: rust-toolchain-version: ${{ env.RUST_TOOLCHAIN_VERSON }} skip-docker-setup: true - name: "Run workspace unit tests" - run: RUST_TEST_THREADS=1 cargo test --workspace --lib --exclude aries-vcx-agent --exclude libvdrtools --exclude wallet_migrator --features ${{ matrix.features }} + run: RUST_TEST_THREADS=1 cargo test --workspace --lib --exclude aries-vcx-agent --exclude libvdrtools --exclude wallet_migrator test-integration-aries-vcx: needs: workflow-setup runs-on: ubuntu-20.04 - strategy: - matrix: - features: [ - "vdrtools", - "modular_libs" - ] steps: - name: "Git checkout" uses: actions/checkout@v3 @@ -340,7 +302,7 @@ jobs: with: rust-toolchain-version: ${{ env.RUST_TOOLCHAIN_VERSON }} - name: "Run aries-vcx integration tests" - run: RUST_TEST_THREADS=1 cargo test --manifest-path="aries_vcx/Cargo.toml" --features ${{ matrix.features }} -- --ignored; + run: RUST_TEST_THREADS=1 cargo test --manifest-path="aries_vcx/Cargo.toml" -- --ignored; test-integration-aries-vcx-mysql: needs: workflow-setup @@ -384,34 +346,10 @@ jobs: with: name: "docker-services-${{ github.job }}" - test-integration-aries-vcx-migration: - needs: workflow-setup - runs-on: ubuntu-20.04 - steps: - - name: "Git checkout" - uses: actions/checkout@v3 - - name: "Setup rust testing environment" - uses: ./.github/actions/setup-testing-rust - with: - rust-toolchain-version: ${{ env.RUST_TOOLCHAIN_VERSON }} - - name: "Run aries-vcx tests: pool_tests agency_pool_tests" - run: | - cargo test --manifest-path="wallet_migrator/Cargo.toml"; - RUST_TEST_THREADS=1 CARGO_INCREMENTAL=0 TEST_POOL_IP=127.0.0.1 cargo test --manifest-path="aries_vcx/Cargo.toml" -F migration --test test_credential_issuance -- --include-ignored; - RUST_TEST_THREADS=1 CARGO_INCREMENTAL=0 TEST_POOL_IP=127.0.0.1 cargo test --manifest-path="aries_vcx/Cargo.toml" -F migration --test test_credential_retrieval -- --include-ignored; - RUST_TEST_THREADS=1 CARGO_INCREMENTAL=0 TEST_POOL_IP=127.0.0.1 cargo test --manifest-path="aries_vcx/Cargo.toml" -F migration --test test_proof_presentation -- --include-ignored; - RUST_TEST_THREADS=1 CARGO_INCREMENTAL=0 TEST_POOL_IP=127.0.0.1 cargo test --manifest-path="aries_vcx/Cargo.toml" -F migration --test test_revocations -- --include-ignored; - test-integration-libvcx: needs: workflow-setup if: ${{ needs.workflow-setup.outputs.SKIP_CI != 'true' }} runs-on: ubuntu-20.04 - strategy: - matrix: - features: [ - "anoncreds_vdrtools", - "anoncreds_credx" - ] steps: - name: "Git checkout" uses: actions/checkout@v3 @@ -421,7 +359,7 @@ jobs: rust-toolchain-version: ${{ env.RUST_TOOLCHAIN_VERSON }} - name: "Run libvcx_core integration tests" run: | - RUST_TEST_THREADS=1 cargo test --features ${{ matrix.features }} --manifest-path="libvcx_core/Cargo.toml" -- --include-ignored; + RUST_TEST_THREADS=1 cargo test --manifest-path="libvcx_core/Cargo.toml" -- --include-ignored; test-integration-did-crate: needs: workflow-setup @@ -438,25 +376,6 @@ jobs: run: | RUST_TEST_THREADS=1 cargo test -p did_doc -p did_parser -p did_resolver -p did_resolver_registry -p did_resolver_sov -p did_resolver_web -p did_doc_sov -p did_key -p did_peer --test "*" - test-node-wrapper: - needs: workflow-setup - if: ${{ needs.workflow-setup.outputs.SKIP_CI != 'true' }} - runs-on: ubuntu-22.04 - strategy: - matrix: - node-version: [18.x] - steps: - - name: "Git checkout" - uses: actions/checkout@v3 - - name: "Setup NodeJS libvcx testing environment" - uses: ./.github/actions/setup-testing-nodejs - with: - rust-toolchain-version: ${{ env.RUST_TOOLCHAIN_VERSON }} - skip-docker-setup: true - node-version: ${{ matrix.node-version }} - - name: "Run tests" - run: cd wrappers/node && npm run test - test-integration-node-wrapper: needs: workflow-setup if: ${{ needs.workflow-setup.outputs.SKIP_CI != 'true' }} @@ -613,7 +532,7 @@ jobs: - test-integration-libvcx - test-integration-aries-vcx - test-integration-aries-vcx-mysql - - test-node-wrapper + # - test-node-wrapper - test-integration-node-wrapper - workflow-setup - build-napi @@ -637,7 +556,7 @@ jobs: - test-integration-libvcx - test-integration-aries-vcx - test-integration-aries-vcx-mysql - - test-node-wrapper + # - test-node-wrapper - test-integration-node-wrapper if: ${{ needs.workflow-setup.outputs.RELEASE == 'true' || needs.workflow-setup.outputs.PRERELEASE == 'true' }} outputs: diff --git a/.gitignore b/.gitignore index 00385ff043..45f9270f02 100644 --- a/.gitignore +++ b/.gitignore @@ -4,10 +4,7 @@ **/node_modules **/*.node **/dist -wrappers/ios/vcx/vcx.framework/** -wrappers/ios/vcx/vcx.framework.dSYM/** -wrappers/ios_legacy/vcx/vcx.framework/** -wrappers/ios_legacy/vcx/vcx.framework.dSYM/** .vscode +*.code-workspace **/tails.txt .session.vim diff --git a/Cargo.lock b/Cargo.lock index c8ab1b0074..9973c131cc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -21,17 +21,17 @@ dependencies = [ [[package]] name = "actix-http" -version = "3.3.1" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2079246596c18b4a33e274ae10c0e50613f4d32a4198e09c7b93771013fed74" +checksum = "a92ef85799cba03f76e4f7c10f533e66d87c9a7e7055f3391f09000ad8351bc9" dependencies = [ "actix-codec", "actix-rt", "actix-service", "actix-utils", - "ahash 0.8.3", - "base64 0.21.2", - "bitflags 1.3.2", + "ahash", + "base64", + "bitflags 2.4.0", "brotli", "bytes", "bytestring", @@ -43,7 +43,7 @@ dependencies = [ "http", "httparse", "httpdate", - "itoa 1.0.9", + "itoa", "language-tags", "local-channel", "mime", @@ -65,7 +65,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" dependencies = [ "quote", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] @@ -83,9 +83,9 @@ dependencies = [ [[package]] name = "actix-rt" -version = "2.8.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15265b6b8e2347670eb363c47fc8c75208b4a4994b27192f345fcbe707804f3e" +checksum = "28f32d40287d3f402ae0028a9d54bef51af15c8769492826a69d28f81893151d" dependencies = [ "futures-core", "tokio", @@ -93,9 +93,9 @@ dependencies = [ [[package]] name = "actix-server" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e8613a75dd50cc45f473cee3c34d59ed677c0f7b44480ce3b8247d7dc519327" +checksum = "3eb13e7eef0423ea6eab0e59f6c72e7cb46d33691ad56a726b3cd07ddec2c2d4" dependencies = [ "actix-rt", "actix-service", @@ -103,8 +103,7 @@ dependencies = [ "futures-core", "futures-util", "mio", - "num_cpus", - "socket2", + "socket2 0.5.4", "tokio", "tracing", ] @@ -132,9 +131,9 @@ dependencies = [ [[package]] name = "actix-web" -version = "4.3.1" +version = "4.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd3cb42f9566ab176e1ef0b8b3a896529062b4efc6be0123046095914c4c1c96" +checksum = "0e4a5b5e29603ca8c94a77c65cf874718ceb60292c5a5c3e5f4ace041af462b9" dependencies = [ "actix-codec", "actix-http", @@ -145,17 +144,16 @@ dependencies = [ "actix-service", "actix-utils", "actix-web-codegen", - "ahash 0.7.6", + "ahash", "bytes", "bytestring", - "cfg-if 1.0.0", + "cfg-if", "cookie", "derive_more", "encoding_rs", "futures-core", "futures-util", - "http", - "itoa 1.0.9", + "itoa", "language-tags", "log", "mime", @@ -166,28 +164,28 @@ dependencies = [ "serde_json", "serde_urlencoded", "smallvec", - "socket2", + "socket2 0.5.4", "time 0.3.20", "url", ] [[package]] name = "actix-web-codegen" -version = "4.2.0" +version = "4.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2262160a7ae29e3415554a3f1fc04c764b1540c116aa524683208078b7a75bc9" +checksum = "eb1f50ebbb30eca122b188319a4398b3f7bb4a8cdf50ecfb73bfc6a3c3ce54f5" dependencies = [ "actix-router", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.38", ] [[package]] name = "addr2line" -version = "0.19.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" dependencies = [ "gimli", ] @@ -198,75 +196,9 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" -[[package]] -name = "aead" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fc95d1bdb8e6666b2b217308eeeb09f2d6728d104be3e31916cc74d15420331" -dependencies = [ - "generic-array 0.14.7", -] - -[[package]] -name = "aes" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "884391ef1066acaa41e766ba8f596341b96e93ce34f9a43e7d24bf0a0eaf0561" -dependencies = [ - "aes-soft", - "aesni", - "cipher 0.2.5", -] - -[[package]] -name = "aes" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8" -dependencies = [ - "cfg-if 1.0.0", - "cipher 0.3.0", - "cpufeatures", - "opaque-debug 0.3.0", -] - -[[package]] -name = "aes-gcm" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5278b5fabbb9bd46e24aa69b2fdea62c99088e0a950a9be40e3e0101298f88da" -dependencies = [ - "aead", - "aes 0.6.0", - "cipher 0.2.5", - "ctr", - "ghash", - "subtle", -] - -[[package]] -name = "aes-soft" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be14c7498ea50828a38d0e24a765ed2effe92a705885b57d029cd67d45744072" -dependencies = [ - "cipher 0.2.5", - "opaque-debug 0.3.0", -] - -[[package]] -name = "aesni" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea2e11f5e94c2f7d386164cc2aa1f97823fed6f259e486940a71c174dd01b0ce" -dependencies = [ - "cipher 0.2.5", - "opaque-debug 0.3.0", -] - [[package]] name = "agency_client" -version = "0.58.0" +version = "0.60.0" dependencies = [ "async-trait", "env_logger 0.9.3", @@ -283,34 +215,23 @@ dependencies = [ "uuid 0.8.2", ] -[[package]] -name = "ahash" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" -dependencies = [ - "getrandom 0.2.9", - "once_cell", - "version_check", -] - [[package]] name = "ahash" version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ - "cfg-if 1.0.0", - "getrandom 0.2.9", + "cfg-if", + "getrandom 0.2.10", "once_cell", "version_check", ] [[package]] name = "aho-corasick" -version = "1.0.3" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b8f9420f797f2d9e935edf629310eb938a0d839f984e25327f3c7eed22300c" +checksum = "ea5d730647d4fadd988536d06fecce94b7b4f2a7efdae548f1cf4b63205518ab" dependencies = [ "memchr", ] @@ -331,29 +252,16 @@ dependencies = [ ] [[package]] -name = "amcl" -version = "0.2.0" +name = "allocator-api2" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee5cca1ddc8b9dceb55b7f1272a9d1e643d73006f350a20ab4926d24e33f0f0d" +checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" [[package]] -name = "amcl_wrapper" -version = "0.4.0" +name = "amcl" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c7c7c7627444413f6a488bf9e6d352aea6fcfa281123cd92ecac0b3c9ef5ef2" -dependencies = [ - "byteorder", - "lazy_static", - "miracl_core", - "rand 0.7.3", - "rayon", - "serde", - "serde_bytes", - "serde_json", - "sha3 0.8.2", - "subtle-encoding", - "zeroize", -] +checksum = "ee5cca1ddc8b9dceb55b7f1272a9d1e643d73006f350a20ab4926d24e33f0f0d" [[package]] name = "android-tzdata" @@ -363,19 +271,20 @@ checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" [[package]] name = "android_log-sys" -version = "0.1.2" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8052e2d8aabbb8d556d6abbcce2a22b9590996c5f849b9c7ce4544a2e3b984e" +checksum = "5ecc8056bf6ab9892dcd53216c83d1597487d7dacac16c8df6b877d127df9937" [[package]] name = "android_logger" -version = "0.5.3" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86983875e7c3a202e31471cc6d60fcc18f30e194f1729cfff3bfb43d646ffced" +checksum = "c494134f746c14dc653a35a4ea5aca24ac368529da5370ecf41fe0341c35772f" dependencies = [ "android_log-sys", - "lazy_static", + "env_logger 0.10.0", "log", + "once_cell", ] [[package]] @@ -389,69 +298,71 @@ dependencies = [ [[package]] name = "anoncreds-clsignatures" -version = "0.2.0" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f683463c4f5558a4eab7fe4625d94f08b7de9691453032bac43facf06fe46d2e" +checksum = "5f0a6071c39753711af4abaac08590f276980ae550d4e8f4b8c4a0b40a3dc39c" dependencies = [ "amcl", "glass_pumpkin", "log", - "num-bigint 0.4.3", + "num-bigint", "num-integer", "num-traits", "once_cell", "openssl", "rand 0.8.5", "serde", - "sha2 0.10.7", + "sha2", ] [[package]] name = "anyhow" -version = "1.0.72" +version = "1.0.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854" +checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" [[package]] name = "aries-vcx" -version = "0.58.0" +version = "0.60.0" dependencies = [ "agency_client", "android_logger", "aries_vcx_core", "async-channel", "async-trait", - "base64 0.10.1", - "bs58 0.4.0", + "base64", + "bs58 0.5.0", "chrono", - "derive_builder 0.10.2", + "derive_builder 0.12.0", "diddoc_legacy", - "env_logger 0.9.3", + "env_logger 0.10.0", "futures", "lazy_static", "log", "messages", - "num-bigint 0.4.3", - "rand 0.7.3", + "mockall", + "num-bigint", + "rand 0.8.5", "regex", + "reqwest", "serde", "serde_derive", "serde_json", - "sha2 0.10.7", + "sha2", "shared_vcx", - "strum 0.16.0", - "strum_macros 0.16.0", + "strum 0.25.0", + "strum_macros 0.25.2", "thiserror", "time 0.3.20", "tokio", "url", - "uuid 0.8.2", + "uuid 1.4.1", "wallet_migrator", ] [[package]] name = "aries-vcx-agent" -version = "0.58.0" +version = "0.60.0" dependencies = [ "aries-vcx", "aries_vcx_core", @@ -481,8 +392,8 @@ dependencies = [ "lazy_static", "libvdrtools", "log", - "lru 0.10.1", - "rand 0.7.3", + "lru", + "rand 0.8.5", "serde", "serde_json", "thiserror", @@ -491,12 +402,6 @@ dependencies = [ "uuid 1.4.1", ] -[[package]] -name = "arrayref" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" - [[package]] name = "askama" version = "0.11.1" @@ -565,14 +470,14 @@ dependencies = [ [[package]] name = "async-executor" -version = "1.5.1" +version = "1.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fa3dc5f2a8564f07759c008b9109dc0d39de92a88d5588b8a5036d286383afb" +checksum = "2c1da3ae8dabd9c00f453a329dfe1fb28da3c0a72e2478cdcd93171740c20499" dependencies = [ "async-lock", "async-task", "concurrent-queue", - "fastrand", + "fastrand 2.0.1", "futures-lite", "slab", ] @@ -599,16 +504,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" dependencies = [ "async-lock", - "autocfg 1.1.0", - "cfg-if 1.0.0", + "autocfg", + "cfg-if", "concurrent-queue", "futures-lite", "log", "parking", "polling", - "rustix", + "rustix 0.37.24", "slab", - "socket2", + "socket2 0.4.9", "waker-fn", ] @@ -667,14 +572,14 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] name = "async-task" -version = "4.4.0" +version = "4.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc7ab41815b3c653ccd2978ec3255c81349336702dfdf62ee6f7069b12a3aae" +checksum = "b9441c6b2fe128a7c2bf680a44c34d0df31ce09e5b7e401fcca3faa483dbc921" [[package]] name = "async-trait" @@ -684,23 +589,23 @@ checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] name = "atoi" -version = "0.4.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "616896e05fc0e2649463a93a15183c6a16bf03413a7af88ef1285ddedfa9cda5" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" dependencies = [ "num-traits", ] [[package]] name = "atomic-waker" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1181e1e0d1fce796a03db1ae795d67167da795f9cf4a39c37589e85ef57f26d3" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "atty" @@ -713,15 +618,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "autocfg" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78" -dependencies = [ - "autocfg 1.1.0", -] - [[package]] name = "autocfg" version = "1.1.0" @@ -730,15 +626,15 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "backtrace" -version = "0.3.67" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" dependencies = [ "addr2line", "cc", - "cfg-if 1.0.0", + "cfg-if", "libc", - "miniz_oxide 0.6.2", + "miniz_oxide", "object", "rustc-demangle", ] @@ -751,24 +647,15 @@ checksum = "4cbbc9d0964165b47557570cce6c952866c2678457aca742aafc9fb771d30270" [[package]] name = "base64" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e" -dependencies = [ - "byteorder", -] - -[[package]] -name = "base64" -version = "0.13.1" +version = "0.21.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" +checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2" [[package]] -name = "base64" -version = "0.21.2" +name = "base64ct" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "bincode" @@ -790,38 +677,8 @@ name = "bitflags" version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" - -[[package]] -name = "blake2" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a4e37d16930f5459780f5621038b6382b9bb37c19016f39fb6b5808d831f174" -dependencies = [ - "crypto-mac 0.8.0", - "digest 0.9.0", - "opaque-debug 0.3.0", -] - -[[package]] -name = "block-buffer" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" -dependencies = [ - "block-padding 0.1.5", - "byte-tools", - "byteorder", - "generic-array 0.12.4", -] - -[[package]] -name = "block-buffer" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ - "block-padding 0.2.1", - "generic-array 0.14.7", + "serde", ] [[package]] @@ -830,54 +687,30 @@ version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ - "generic-array 0.14.7", -] - -[[package]] -name = "block-modes" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57a0e8073e8baa88212fb5823574c02ebccb395136ba9a164ab89379ec6072f0" -dependencies = [ - "block-padding 0.2.1", - "cipher 0.2.5", -] - -[[package]] -name = "block-padding" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" -dependencies = [ - "byte-tools", + "generic-array", ] -[[package]] -name = "block-padding" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" - [[package]] name = "blocking" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77231a1c8f801696fc0123ec6150ce92cffb8e164a02afb9c8ddee0e9b65ad65" +checksum = "94c4ef1f913d78636d78d538eec1f18de81e481f44b1be0a81060090530846e1" dependencies = [ "async-channel", "async-lock", "async-task", - "atomic-waker", - "fastrand", + "fastrand 2.0.1", + "futures-io", "futures-lite", - "log", + "piper", + "tracing", ] [[package]] name = "brotli" -version = "3.3.4" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68" +checksum = "516074a47ef4bce09577a3b379392300159ce5b1ba2e501ff1c819950066100f" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -886,9 +719,9 @@ dependencies = [ [[package]] name = "brotli-decompressor" -version = "2.3.4" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b6561fd3f895a11e8f72af2cb7d22e08366bebc2b6b57f7744c4bda27034744" +checksum = "da74e2b81409b1b743f8f0c62cc6254afefb8b8e50bbfe3735550f7aeefa3448" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -911,27 +744,21 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" - -[[package]] -name = "byte-tools" -version = "0.3.1" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" [[package]] name = "byteorder" -version = "1.4.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" [[package]] name = "bytestring" @@ -942,15 +769,6 @@ dependencies = [ "bytes", ] -[[package]] -name = "c2-chacha" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "217192c943108d8b13bac38a1d51df9ce8a407a3f5a71ab633980665e68fbd9a" -dependencies = [ - "ppv-lite86", -] - [[package]] name = "camino" version = "1.1.6" @@ -985,81 +803,33 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.82" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "305fe645edc1442a0fa8b6726ba61d422798d37a52e12eaecf4b022ebbb88f01" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" dependencies = [ "jobserver", "libc", ] -[[package]] -name = "cfg-if" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" - [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "chacha20" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed8738f14471a99f0e316c327e68fc82a3611cc2895fcb604b89eedaf8f39d95" -dependencies = [ - "cipher 0.2.5", - "zeroize", -] - -[[package]] -name = "chacha20poly1305" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af1fc18e6d90c40164bf6c317476f2a98f04661e310e79830366b7e914c58a8e" -dependencies = [ - "aead", - "chacha20", - "cipher 0.2.5", - "poly1305", - "zeroize", -] - [[package]] name = "chrono" -version = "0.4.26" +version = "0.4.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" +checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", - "time 0.1.45", "wasm-bindgen", - "winapi", -] - -[[package]] -name = "cipher" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f8e7987cbd042a63249497f41aed09f8e65add917ea6566effbc56578d6801" -dependencies = [ - "generic-array 0.14.7", -] - -[[package]] -name = "cipher" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7" -dependencies = [ - "generic-array 0.14.7", + "windows-targets", ] [[package]] @@ -1072,7 +842,7 @@ dependencies = [ "bitflags 1.3.2", "clap_derive", "clap_lex", - "indexmap", + "indexmap 1.9.3", "once_cell", "strsim 0.10.0", "termcolor", @@ -1085,7 +855,7 @@ version = "3.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008" dependencies = [ - "heck 0.4.1", + "heck", "proc-macro-error", "proc-macro2", "quote", @@ -1101,15 +871,6 @@ dependencies = [ "os_str_bytes", ] -[[package]] -name = "cloudabi" -version = "0.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "cmake" version = "0.1.50" @@ -1121,18 +882,28 @@ dependencies = [ [[package]] name = "concurrent-queue" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62ec6771ecfa0762d24683ee5a32ad78487a3d3afdc0fb8cae19d2c5deb50b7c" +checksum = "f057a694a54f12365049b0958a1685bb52d567f5593b355fbf685838e873d400" dependencies = [ "crossbeam-utils", ] +[[package]] +name = "console_error_panic_hook" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" +dependencies = [ + "cfg-if", + "wasm-bindgen", +] + [[package]] name = "const-oid" -version = "0.6.2" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d6f2aa4d0537bcc1c74df8755072bd31c1ef1a3a1b85a68e8404a8c353b7b8b" +checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" [[package]] name = "convert_case" @@ -1194,26 +965,20 @@ dependencies = [ "libc", ] -[[package]] -name = "cpuid-bool" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcb25d077389e53838a8158c8e99174c5a9d902dee4904320db714f3c653ffba" - [[package]] name = "crc" -version = "2.1.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49fc9a695bca7f35f5f4c15cddc84415f66a74ea78eef08e90c5024f2b540e23" +checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe" dependencies = [ "crc-catalog", ] [[package]] name = "crc-catalog" -version = "1.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccaeedb56da03b09f598226e25e80088cb4cd25f316e6e4df7d695f0feeb1403" +checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484" [[package]] name = "crc32fast" @@ -1221,134 +986,74 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] -name = "crossbeam-channel" -version = "0.5.8" +name = "crossbeam-queue" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "crossbeam-utils", ] [[package]] -name = "crossbeam-deque" -version = "0.8.3" +name = "crossbeam-utils" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" dependencies = [ - "cfg-if 1.0.0", - "crossbeam-epoch", - "crossbeam-utils", + "cfg-if", ] [[package]] -name = "crossbeam-epoch" -version = "0.9.15" +name = "crypto-common" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "autocfg 1.1.0", - "cfg-if 1.0.0", - "crossbeam-utils", - "memoffset", - "scopeguard", + "generic-array", + "typenum", ] [[package]] -name = "crossbeam-queue" -version = "0.3.8" +name = "ctor" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +checksum = "37e366bff8cd32dd8754b0991fb66b279dc48f598c3a18914852a6673deef583" dependencies = [ - "cfg-if 1.0.0", - "crossbeam-utils", + "quote", + "syn 2.0.38", ] [[package]] -name = "crossbeam-utils" -version = "0.8.16" +name = "curve25519-dalek" +version = "4.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c" dependencies = [ - "cfg-if 1.0.0", -] - -[[package]] -name = "crypto-bigint" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f83bd3bb4314701c568e340cd8cf78c975aa0ca79e03d3f6d1677d5b0c9c0c03" -dependencies = [ - "generic-array 0.14.7", - "rand_core 0.6.4", + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest", + "fiat-crypto", + "platforms", + "rustc_version", "subtle", "zeroize", ] [[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array 0.14.7", - "typenum", -] - -[[package]] -name = "crypto-mac" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" -dependencies = [ - "generic-array 0.14.7", - "subtle", -] - -[[package]] -name = "crypto-mac" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1d1a86f49236c215f271d40892d5fc950490551400b02ef360692c29815c714" -dependencies = [ - "generic-array 0.14.7", - "subtle", -] - -[[package]] -name = "ctor" -version = "0.2.4" +name = "curve25519-dalek-derive" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f34ba9a9bcb8645379e9de8cb3ecfcf4d1c85ba66d90deb3259206fa5aa193b" +checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b" dependencies = [ + "proc-macro2", "quote", - "syn 2.0.28", -] - -[[package]] -name = "ctr" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb4a30d54f7443bf3d6191dcd486aca19e67cb3c49fa7a06a319966346707e7f" -dependencies = [ - "cipher 0.2.5", -] - -[[package]] -name = "curve25519-dalek" -version = "3.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90f9d052967f590a76e62eb387bd0bbb1b000182c3cefe5364db6b7211651bc0" -dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.5.1", - "subtle", - "zeroize", + "syn 2.0.38", ] [[package]] @@ -1361,16 +1066,6 @@ dependencies = [ "darling_macro 0.10.2", ] -[[package]] -name = "darling" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f2c43f534ea4b0b049015d00269734195e6d3f0f6635cb692251aca6f9f8b3c" -dependencies = [ - "darling_core 0.12.4", - "darling_macro 0.12.4", -] - [[package]] name = "darling" version = "0.14.4" @@ -1405,20 +1100,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "darling_core" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e91455b86830a1c21799d94524df0845183fa55bafd9aa137b01c7d1065fa36" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim 0.10.0", - "syn 1.0.109", -] - [[package]] name = "darling_core" version = "0.14.4" @@ -1444,7 +1125,7 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.10.0", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] @@ -1458,17 +1139,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "darling_macro" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29b5acf0dea37a7f66f7b25d2c5e93fd46f8f6968b1a5d7a3e02e97768afc95a" -dependencies = [ - "darling_core 0.12.4", - "quote", - "syn 1.0.109", -] - [[package]] name = "darling_macro" version = "0.14.4" @@ -1488,7 +1158,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core 0.20.3", "quote", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] @@ -1519,20 +1189,13 @@ dependencies = [ [[package]] name = "der" -version = "0.4.5" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79b71cca7d95d7681a4b3b9cdf63c8dbc3730d0584c2c74e31416d64a90493f4" +checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" dependencies = [ "const-oid", -] - -[[package]] -name = "derive_builder" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d13202debe11181040ae9063d739fa32cfcaaebe2275fe387703460ae2365b30" -dependencies = [ - "derive_builder_macro 0.10.2", + "pem-rfc7468", + "zeroize", ] [[package]] @@ -1553,18 +1216,6 @@ dependencies = [ "derive_builder_macro 0.12.0", ] -[[package]] -name = "derive_builder_core" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66e616858f6187ed828df7c64a6d71720d83767a7f19740b2d1b6fe6327b36e5" -dependencies = [ - "darling 0.12.4", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "derive_builder_core" version = "0.11.2" @@ -1589,16 +1240,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "derive_builder_macro" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58a94ace95092c5acb1e97a7e846b310cfbd499652f72297da7493f618a98d73" -dependencies = [ - "derive_builder_core 0.10.2", - "syn 1.0.109", -] - [[package]] name = "derive_builder_macro" version = "0.11.2" @@ -1636,12 +1277,12 @@ dependencies = [ name = "did_doc" version = "0.1.0" dependencies = [ - "base64 0.21.2", + "base64", "bs58 0.5.0", "did_parser", "hex", "multibase", - "pem 2.0.1", + "pem", "public_key", "serde", "serde_json", @@ -1653,7 +1294,7 @@ dependencies = [ name = "did_doc_sov" version = "0.1.0" dependencies = [ - "base64 0.21.2", + "base64", "did_doc", "did_key", "public_key", @@ -1686,7 +1327,7 @@ name = "did_peer" version = "0.1.0" dependencies = [ "async-trait", - "base64 0.21.2", + "base64", "bs58 0.5.0", "did_doc", "did_doc_sov", @@ -1762,7 +1403,7 @@ dependencies = [ [[package]] name = "diddoc_legacy" -version = "0.58.0" +version = "0.60.0" dependencies = [ "serde", "serde_derive", @@ -1778,60 +1419,44 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" -[[package]] -name = "digest" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" -dependencies = [ - "generic-array 0.12.4", -] - -[[package]] -name = "digest" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" -dependencies = [ - "generic-array 0.14.7", -] - [[package]] name = "digest" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ - "block-buffer 0.10.4", + "block-buffer", + "const-oid", "crypto-common", + "subtle", ] [[package]] name = "dirs" -version = "2.0.2" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13aea89a5c93364a98e9b37b2fa237effbb694d5cfe01c5b70941f7eb087d5e3" +checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" dependencies = [ - "cfg-if 0.1.10", "dirs-sys", ] [[package]] name = "dirs-sys" -version = "0.3.7" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" +checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" dependencies = [ "libc", + "option-ext", "redox_users", - "winapi", + "windows-sys", ] [[package]] -name = "dotenv" -version = "0.15.0" +name = "dotenvy" +version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" [[package]] name = "downcast" @@ -1839,38 +1464,24 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" -[[package]] -name = "ecdsa" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43ee23aa5b4f68c7a092b5c3beb25f50c406adc75e2363634f242f28ab255372" -dependencies = [ - "der", - "elliptic-curve", - "hmac", - "signature", -] - [[package]] name = "ed25519" -version = "1.5.3" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7" +checksum = "60f6d271ca33075c88028be6f04d502853d63a5ece419d269c15315d4fc1cf1d" dependencies = [ "signature", ] [[package]] name = "ed25519-dalek" -version = "1.0.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" +checksum = "7277392b266383ef8396db7fdeb1e77b6c52fed775f5df15bb24f35b72156980" dependencies = [ "curve25519-dalek", "ed25519", - "rand 0.7.3", - "serde", - "sha2 0.9.9", + "sha2", "zeroize", ] @@ -1879,6 +1490,9 @@ name = "either" version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +dependencies = [ + "serde", +] [[package]] name = "elastic-array-plus" @@ -1886,29 +1500,13 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "562cc8504a01eb20c10fb154abd7c4baeb9beba2329cf85838ee2bd48a468b18" -[[package]] -name = "elliptic-curve" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "beca177dcb8eb540133e7680baff45e7cc4d93bf22002676cec549f82343721b" -dependencies = [ - "crypto-bigint", - "ff", - "generic-array 0.14.7", - "group", - "pkcs8", - "rand_core 0.6.4", - "subtle", - "zeroize", -] - [[package]] name = "encoding_rs" -version = "0.8.32" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1937,15 +1535,21 @@ dependencies = [ "termcolor", ] +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + [[package]] name = "errno" -version = "0.3.2" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f" +checksum = "add4f07d43996f76ef320709726a556a9d4f965d9410d8d0271132d2f8293480" dependencies = [ "errno-dragonfly", "libc", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -1964,6 +1568,17 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9435d864e017c3c6afeac1654189b06cdb491cf2ff73dbf0d73b0f292f42ff8" +[[package]] +name = "etcetera" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" +dependencies = [ + "cfg-if", + "home", + "windows-sys", +] + [[package]] name = "etcommon-hexutil" version = "0.2.4" @@ -2020,14 +1635,10 @@ dependencies = [ ] [[package]] -name = "ff" -version = "0.10.1" +name = "fastrand" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0f40b2dcd8bc322217a5f6559ae5f9e9d1de202a2ecee2e9eafcbece7562a4f" -dependencies = [ - "rand_core 0.6.4", - "subtle", -] +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "ffi-support" @@ -2039,14 +1650,26 @@ dependencies = [ "log", ] +[[package]] +name = "fiat-crypto" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0870c84016d4b481be5c9f323c24f65e31e901ae618f0e80f4308fb00de1d2d" + +[[package]] +name = "finl_unicode" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" + [[package]] name = "flate2" -version = "1.0.26" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" +checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010" dependencies = [ "crc32fast", - "miniz_oxide 0.7.1", + "miniz_oxide", ] [[package]] @@ -2058,6 +1681,17 @@ dependencies = [ "num-traits", ] +[[package]] +name = "flume" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" +dependencies = [ + "futures-core", + "futures-sink", + "spin 0.9.8", +] + [[package]] name = "fnv" version = "1.0.7" @@ -2100,12 +1734,6 @@ version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0845fa252299212f0389d64ba26f34fa32cfe41588355f21ed507c59a0f64541" -[[package]] -name = "fuchsia-cprng" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" - [[package]] name = "futures" version = "0.3.28" @@ -2151,13 +1779,13 @@ dependencies = [ [[package]] name = "futures-intrusive" -version = "0.4.2" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a604f7a68fbf8103337523b1fadc8ade7361ee3f112f7c680ad179651616aed5" +checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" dependencies = [ "futures-core", "lock_api", - "parking_lot 0.11.2", + "parking_lot", ] [[package]] @@ -2172,7 +1800,7 @@ version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" dependencies = [ - "fastrand", + "fastrand 1.9.0", "futures-core", "futures-io", "memchr", @@ -2189,7 +1817,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] @@ -2222,15 +1850,6 @@ dependencies = [ "slab", ] -[[package]] -name = "generic-array" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd" -dependencies = [ - "typenum", -] - [[package]] name = "generic-array" version = "0.14.7" @@ -2247,7 +1866,7 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "libc", "wasi 0.9.0+wasi-snapshot-preview1", @@ -2256,30 +1875,20 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "wasi 0.11.0+wasi-snapshot-preview1", ] -[[package]] -name = "ghash" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97304e4cd182c3846f7575ced3890c53012ce534ad9114046b0a9e00bb30a375" -dependencies = [ - "opaque-debug 0.3.0", - "polyval", -] - [[package]] name = "gimli" -version = "0.27.3" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" +checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" [[package]] name = "glass_pumpkin" @@ -2288,7 +1897,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e847fe780e2fd8aa993bef2124361c285349ff0e9315e8285f8126386b54a9" dependencies = [ "core2", - "num-bigint 0.4.3", + "num-bigint", "num-integer", "num-traits", "once_cell", @@ -2324,22 +1933,11 @@ dependencies = [ "scroll", ] -[[package]] -name = "group" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c363a5301b8f153d80747126a04b3c82073b9fe3130571a9d170cacdeaf7912" -dependencies = [ - "ff", - "rand_core 0.6.4", - "subtle", -] - [[package]] name = "h2" -version = "0.3.20" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" +checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" dependencies = [ "bytes", "fnv", @@ -2347,64 +1945,47 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap", + "indexmap 1.9.3", "slab", "tokio", "tokio-util", "tracing", ] -[[package]] -name = "hashbrown" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" -dependencies = [ - "ahash 0.7.6", -] - [[package]] name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" -dependencies = [ - "ahash 0.7.6", -] [[package]] name = "hashbrown" -version = "0.13.2" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12" dependencies = [ - "ahash 0.8.3", + "ahash", + "allocator-api2", ] [[package]] name = "hashlink" -version = "0.7.0" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf" +checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" dependencies = [ - "hashbrown 0.11.2", + "hashbrown 0.14.1", ] [[package]] name = "heck" -version = "0.3.3" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" dependencies = [ "unicode-segmentation", ] -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - [[package]] name = "hermit-abi" version = "0.1.19" @@ -2416,9 +1997,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" [[package]] name = "hex" @@ -2428,22 +2009,29 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hkdf" -version = "0.11.0" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01706d578d5c281058480e673ae4086a9f4710d8df1ad80a5b03e39ece5f886b" +checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" dependencies = [ - "digest 0.9.0", "hmac", ] [[package]] name = "hmac" -version = "0.11.0" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "home" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a2a2320eb7ec0ebe8da8f744d7812d9fc4cb4d09344ac01898dbcb6a20ae69b" +checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" dependencies = [ - "crypto-mac 0.11.1", - "digest 0.9.0", + "windows-sys", ] [[package]] @@ -2454,7 +2042,7 @@ checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" dependencies = [ "bytes", "fnv", - "itoa 1.0.9", + "itoa", ] [[package]] @@ -2501,9 +2089,9 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa 1.0.9", + "itoa", "pin-project-lite", - "socket2", + "socket2 0.4.9", "tokio", "tower-service", "tracing", @@ -2568,16 +2156,25 @@ version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ - "autocfg 1.1.0", + "autocfg", "hashbrown 0.12.3", ] +[[package]] +name = "indexmap" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8adf3ddd720272c6ea8bf59463c04e0f93d0bbf7c5439b691bca2987e0270897" +dependencies = [ + "equivalent", + "hashbrown 0.14.1", +] + [[package]] name = "indy-api-types" version = "0.1.0" dependencies = [ - "aes 0.7.5", - "bs58 0.4.0", + "bs58 0.5.0", "futures", "libc", "log", @@ -2588,7 +2185,6 @@ dependencies = [ "sqlx", "thiserror", "ursa", - "zeroize", ] [[package]] @@ -2600,60 +2196,68 @@ dependencies = [ "amcl", "rand 0.8.5", "serde", - "sha2 0.10.7", - "sha3 0.10.8", + "sha2", + "sha3", ] [[package]] name = "indy-credx" -version = "1.0.1" -source = "git+https://github.com/hyperledger/indy-shared-rs?tag=v1.0.1#32a1943811e719540567769264e14f94e51c60ee" +version = "1.1.0" +source = "git+https://github.com/hyperledger/indy-shared-rs?tag=v1.1.0#0260b93f76573613cedb486bc8836c75c47d4cf4" dependencies = [ "env_logger 0.10.0", "ffi-support", - "indy-data-types 0.6.1 (git+https://github.com/hyperledger/indy-shared-rs?tag=v1.0.1)", - "indy-utils 0.6.0 (git+https://github.com/hyperledger/indy-shared-rs?tag=v1.0.1)", + "indy-data-types 0.7.0 (git+https://github.com/hyperledger/indy-shared-rs?tag=v1.1.0)", "log", "once_cell", "rand 0.8.5", "regex", "serde", "serde_json", - "sha2 0.10.7", - "thiserror", + "sha2", "zeroize", ] [[package]] name = "indy-data-types" -version = "0.6.1" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bc9972c657fd152d394f61b00d677aa8a700abd8c39137164e399a1d9fd0c6d" +checksum = "72a8c97ba7f3a0af57c6895f2b7836fc00aa7ed3a56ce28e2367ded9dea3d9be" dependencies = [ "anoncreds-clsignatures", + "bs58 0.5.0", + "curve25519-dalek", + "ed25519-dalek", "hex", - "indy-utils 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "once_cell", + "rand 0.8.5", "regex", "serde", "serde_json", - "sha2 0.10.7", + "sha2", + "thiserror", + "x25519-dalek", "zeroize", ] [[package]] name = "indy-data-types" -version = "0.6.1" -source = "git+https://github.com/hyperledger/indy-shared-rs?tag=v1.0.1#32a1943811e719540567769264e14f94e51c60ee" +version = "0.7.0" +source = "git+https://github.com/hyperledger/indy-shared-rs?tag=v1.1.0#0260b93f76573613cedb486bc8836c75c47d4cf4" dependencies = [ "anoncreds-clsignatures", + "bs58 0.5.0", + "curve25519-dalek", + "ed25519-dalek", "hex", - "indy-utils 0.6.0 (git+https://github.com/hyperledger/indy-shared-rs?tag=v1.0.1)", "once_cell", + "rand 0.8.5", "regex", "serde", "serde_json", - "sha2 0.10.7", + "sha2", + "thiserror", + "x25519-dalek", "zeroize", ] @@ -2673,7 +2277,7 @@ dependencies = [ name = "indy-utils" version = "0.1.0" dependencies = [ - "base64 0.10.1", + "base64", "dirs", "failure", "indy-api-types", @@ -2681,8 +2285,8 @@ dependencies = [ "libc", "log", "openssl", - "rand 0.7.3", - "rmp-serde 0.13.7", + "rand 0.8.5", + "rmp-serde", "serde", "serde_derive", "serde_json", @@ -2690,44 +2294,12 @@ dependencies = [ "zeroize", ] -[[package]] -name = "indy-utils" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d7e0cdcece8d97982e82aba5b0cb8c7e87ffb5f4fa48b935a3647d24db5439" -dependencies = [ - "bs58 0.5.0", - "curve25519-dalek", - "ed25519-dalek", - "once_cell", - "rand 0.8.5", - "regex", - "serde", - "sha2 0.10.7", - "thiserror", - "x25519-dalek", - "zeroize", -] - -[[package]] -name = "indy-utils" -version = "0.6.0" -source = "git+https://github.com/hyperledger/indy-shared-rs?tag=v1.0.1#32a1943811e719540567769264e14f94e51c60ee" -dependencies = [ - "bs58 0.5.0", - "once_cell", - "regex", - "serde", - "thiserror", - "zeroize", -] - [[package]] name = "indy-vdr" -version = "0.4.0-dev.16" -source = "git+https://github.com/hyperledger/indy-vdr.git?rev=879e29e#879e29e8b174b5ca1f031d8d70fa1fcb32a40cf0" +version = "0.4.0" +source = "git+https://github.com/hyperledger/indy-vdr.git?rev=c143268#c143268c2c86b300f8b8a33b57a4cff06de35626" dependencies = [ - "base64 0.21.2", + "base64", "bs58 0.5.0", "env_logger 0.10.0", "etcommon-rlp", @@ -2737,19 +2309,18 @@ dependencies = [ "futures-util", "hex", "indy-blssignatures", - "indy-data-types 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "indy-utils 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", + "indy-data-types 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "log", "once_cell", "percent-encoding", "pin-utils", "rand 0.8.5", "regex", - "rmp-serde 1.1.2", + "rmp-serde", "serde", "serde_json", - "sha2 0.10.7", - "sha3 0.10.8", + "sha2", + "sha3", "thiserror", "time 0.3.20", "url", @@ -2759,7 +2330,7 @@ dependencies = [ [[package]] name = "indy-vdr-proxy-client" version = "0.1.0" -source = "git+https://github.com/hyperledger/indy-vdr.git?rev=879e29e#879e29e8b174b5ca1f031d8d70fa1fcb32a40cf0" +source = "git+https://github.com/hyperledger/indy-vdr.git?rev=c143268#c143268c2c86b300f8b8a33b57a4cff06de35626" dependencies = [ "indy-vdr", "reqwest", @@ -2773,15 +2344,15 @@ version = "0.1.0" dependencies = [ "async-std", "async-trait", - "bs58 0.4.0", + "bs58 0.5.0", "byteorder", "futures", "indy-api-types", - "indy-utils 0.1.0", + "indy-utils", "libc", "log", - "lru 0.7.8", - "rmp-serde 0.13.7", + "lru", + "rmp-serde", "serde", "serde_derive", "serde_json", @@ -2795,7 +2366,7 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -2810,9 +2381,9 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" dependencies = [ - "hermit-abi 0.3.2", + "hermit-abi 0.3.3", "libc", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -2823,14 +2394,13 @@ checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" [[package]] name = "is-terminal" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f" +checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ - "hermit-abi 0.3.2", - "io-lifetimes", - "rustix", - "windows-sys 0.48.0", + "hermit-abi 0.3.3", + "rustix 0.38.17", + "windows-sys", ] [[package]] @@ -2852,10 +2422,13 @@ dependencies = [ ] [[package]] -name = "itoa" -version = "0.4.8" +name = "itertools" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] [[package]] name = "itoa" @@ -2881,18 +2454,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "k256" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "903ae2481bcdfdb7b68e0a9baa4b7c9aff600b9ae2e8e5bb5833b8c91ab851ea" -dependencies = [ - "cfg-if 1.0.0", - "ecdsa", - "elliptic-curve", - "sha2 0.9.9", -] - [[package]] name = "keccak" version = "0.1.4" @@ -2923,14 +2484,14 @@ version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" dependencies = [ - "spin", + "spin 0.5.2", ] [[package]] name = "libc" -version = "0.2.139" +version = "0.2.148" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" +checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b" [[package]] name = "libloading" @@ -2938,15 +2499,15 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "winapi", ] [[package]] name = "libm" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libsodium-sys" @@ -2960,9 +2521,9 @@ dependencies = [ [[package]] name = "libsqlite3-sys" -version = "0.22.2" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290b64917f8b0cb885d9de0f9959fe1f775d7fa12f1da2db9001c1c8ab60f89d" +checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326" dependencies = [ "cc", "pkg-config", @@ -2971,22 +2532,23 @@ dependencies = [ [[package]] name = "libvcx_core" -version = "0.58.0" +version = "0.60.0" dependencies = [ "agency_client", "aries-vcx", + "aries_vcx_core", "async-trait", - "cfg-if 1.0.0", + "cfg-if", "chrono", "diddoc_legacy", - "env_logger 0.9.3", + "env_logger 0.10.0", "futures", "lazy_static", "libc", "log", "num-traits", "once_cell", - "rand 0.7.3", + "rand 0.8.5", "serde", "serde_derive", "serde_json", @@ -2994,7 +2556,8 @@ dependencies = [ "time 0.3.20", "tokio", "url", - "uuid 0.7.4", + "uuid 1.4.1", + "wallet_migrator", ] [[package]] @@ -3003,33 +2566,23 @@ version = "0.8.6" dependencies = [ "async-std", "async-trait", - "bs58 0.4.0", - "byteorder", + "bs58 0.5.0", "dirs", - "etcommon-rlp", "failure", "futures", "hex", "indy-api-types", - "indy-utils 0.1.0", + "indy-utils", "indy-wallet", "lazy_static", "libc", "log", "log-derive", - "num-derive", - "num-traits", - "rand 0.8.5", "regex", - "rmp-serde 1.1.2", "serde", "serde_derive", "serde_json", - "sha2 0.9.9", - "sha3 0.9.1", - "time 0.3.20", "ursa", - "uuid 0.8.2", "zeroize", ] @@ -3039,15 +2592,20 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +[[package]] +name = "linux-raw-sys" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3852614a3bd9ca9804678ba6be5e3b8ce76dfc902cae004e3e0c44051b6e88db" + [[package]] name = "local-channel" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f303ec0e94c6c54447f84f3b0ef7af769858a9c4ef56ef2a986d3dcd4c3fc9c" +checksum = "e0a493488de5f18c8ffcba89eebb8532ffc562dc400490eb65b84893fae0b178" dependencies = [ "futures-core", "futures-sink", - "futures-util", "local-waker", ] @@ -3063,7 +2621,7 @@ version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" dependencies = [ - "autocfg 1.1.0", + "autocfg", "scopeguard", ] @@ -3078,12 +2636,11 @@ dependencies = [ [[package]] name = "log-derive" -version = "0.3.2" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c7f436d3b5b51857b145075009f3a0d88dd37d2e93f42bb227045f4562a131e" +checksum = "6a42526bb432bcd1b43571d5f163984effa25409a29f1a3242a54d0577d55bcf" dependencies = [ "darling 0.10.2", - "log", "proc-macro2", "quote", "syn 1.0.109", @@ -3091,40 +2648,32 @@ dependencies = [ [[package]] name = "lru" -version = "0.7.8" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999beba7b6e8345721bd280141ed958096a2e4abdf74f67ff4ce49b4b54e47a" +checksum = "1efa59af2ddfad1854ae27d75009d538d0998b4b2fd47083e743ac1a10e46c60" dependencies = [ - "hashbrown 0.12.3", + "hashbrown 0.14.1", ] [[package]] -name = "lru" -version = "0.10.1" +name = "md-5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "718e8fae447df0c7e1ba7f5189829e63fd536945c8988d61444c19039f16b670" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ - "hashbrown 0.13.2", + "cfg-if", + "digest", ] [[package]] name = "memchr" -version = "2.5.0" +version = "2.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" - -[[package]] -name = "memoffset" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" -dependencies = [ - "autocfg 1.1.0", -] +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" [[package]] name = "messages" -version = "0.58.0" +version = "0.60.0" dependencies = [ "chrono", "derive_more", @@ -3152,7 +2701,7 @@ dependencies = [ "proc-macro2", "quote", "shared_vcx", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] @@ -3188,15 +2737,6 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" -[[package]] -name = "miniz_oxide" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" -dependencies = [ - "adler", -] - [[package]] name = "miniz_oxide" version = "0.7.1" @@ -3215,22 +2755,16 @@ dependencies = [ "libc", "log", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.48.0", + "windows-sys", ] -[[package]] -name = "miracl_core" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4330eca86d39f2b52d0481aa1e90fe21bfa61f11b0bf9b48ab95595013cefe48" - [[package]] name = "mockall" version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c84490118f2ee2d74570d114f3d0493cbf02790df303d2707606c3e14e07c96" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "downcast", "fragile", "lazy_static", @@ -3245,7 +2779,7 @@ version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ce75669015c4f47b289fd4d4f56e894e4c96003ffdf3ac51313126f94c6cbb" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "proc-macro2", "quote", "syn 1.0.109", @@ -3264,9 +2798,9 @@ dependencies = [ [[package]] name = "napi" -version = "2.13.2" +version = "2.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ede2d12cd6fce44da537a4be1f5510c73be2506c2e32dfaaafd1f36968f3a0e" +checksum = "fd063c93b900149304e3ba96ce5bf210cd4f81ef5eb80ded0d100df3e85a3ac0" dependencies = [ "bitflags 2.4.0", "ctor", @@ -3288,7 +2822,7 @@ version = "2.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da1c6a8fa84d549aa8708fcd062372bf8ec6e849de39016ab921067d21bde367" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "convert_case 0.6.0", "napi-derive-backend", "proc-macro2", @@ -3356,22 +2890,11 @@ checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" [[package]] name = "num-bigint" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6f7833f2cbf2360a6cfd58cd41a53aa7a90bd4c202f5b1c7dd2ed73c57b2c3" -dependencies = [ - "autocfg 1.1.0", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-bigint" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" dependencies = [ - "autocfg 1.1.0", + "autocfg", "num-integer", "num-traits", "rand 0.8.5", @@ -3379,9 +2902,9 @@ dependencies = [ [[package]] name = "num-bigint-dig" -version = "0.7.1" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9bc3e36fd683e004fd59c64a425e0e991616f5a8b617c3b9a933a93c168facc" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" dependencies = [ "byteorder", "lazy_static", @@ -3394,24 +2917,13 @@ dependencies = [ "zeroize", ] -[[package]] -name = "num-derive" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "num-integer" version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" dependencies = [ - "autocfg 1.1.0", + "autocfg", "num-traits", ] @@ -3421,7 +2933,7 @@ version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" dependencies = [ - "autocfg 1.1.0", + "autocfg", "num-integer", "num-traits", ] @@ -3432,7 +2944,7 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ - "autocfg 1.1.0", + "autocfg", "libm", ] @@ -3442,15 +2954,15 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.3.2", + "hermit-abi 0.3.3", "libc", ] [[package]] name = "object" -version = "0.30.4" +version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385" +checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" dependencies = [ "memchr", ] @@ -3461,26 +2973,14 @@ version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" -[[package]] -name = "opaque-debug" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" - -[[package]] -name = "opaque-debug" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" - [[package]] name = "openssl" -version = "0.10.56" +version = "0.10.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "729b745ad4a5575dd06a3e1af1414bd330ee561c01b3899eb584baeaa8def17e" +checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c" dependencies = [ - "bitflags 1.3.2", - "cfg-if 1.0.0", + "bitflags 2.4.0", + "cfg-if", "foreign-types", "libc", "once_cell", @@ -3496,7 +2996,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] @@ -3507,9 +3007,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.91" +version = "0.9.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "866b5f16f90776b9bb8dc1e1802ac6f0513de3a7a7465867bfbc563dc737faac" +checksum = "db4d56a4c0478783083cfafcc42493dd4a981d41669da64b4572a2a089b51b1d" dependencies = [ "cc", "libc", @@ -3517,6 +3017,12 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "option-ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" + [[package]] name = "os_str_bytes" version = "6.5.1" @@ -3525,20 +3031,9 @@ checksum = "4d5d9eb14b174ee9aa2ef96dc2b94637a2d4b6e7cb873c7e171f0c20c6cf3eac" [[package]] name = "parking" -version = "2.1.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e" - -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.6", -] +checksum = "e52c774a4c39359c1d1c52e43f73dd91a75a614652c825408eec30c95a9b2067" [[package]] name = "parking_lot" @@ -3547,21 +3042,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.8", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if 1.0.0", - "instant", - "libc", - "redox_syscall 0.2.16", - "smallvec", - "winapi", + "parking_lot_core", ] [[package]] @@ -3570,11 +3051,11 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "redox_syscall 0.3.5", "smallvec", - "windows-targets 0.48.1", + "windows-targets", ] [[package]] @@ -3585,23 +3066,21 @@ checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" [[package]] name = "pem" -version = "0.8.3" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd56cbd21fea48d0c440b41cd69c589faacade08c992d9a54e471b79d0fd13eb" +checksum = "6b13fe415cdf3c8e44518e18a7c95a13431d9bdf6d15367d82b23c377fdd441a" dependencies = [ - "base64 0.13.1", - "once_cell", - "regex", + "base64", + "serde", ] [[package]] -name = "pem" -version = "2.0.1" +name = "pem-rfc7468" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b13fe415cdf3c8e44518e18a7c95a13431d9bdf6d15367d82b23c377fdd441a" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" dependencies = [ - "base64 0.21.2", - "serde", + "base64ct", ] [[package]] @@ -3630,9 +3109,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.12" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" [[package]] name = "pin-utils" @@ -3640,11 +3119,33 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "piper" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4" +dependencies = [ + "atomic-waker", + "fastrand 2.0.1", + "futures-io", +] + +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + [[package]] name = "pkcs8" -version = "0.7.6" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee3ef9b64d26bad0536099c816c6734379e45bbd5f14798def6809e5cc350447" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ "der", "spki", @@ -3662,41 +3163,26 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" +[[package]] +name = "platforms" +version = "3.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4503fa043bf02cee09a9582e9554b4c6403b2ef55e4612e96561d294419429f8" + [[package]] name = "polling" version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" dependencies = [ - "autocfg 1.1.0", + "autocfg", "bitflags 1.3.2", - "cfg-if 1.0.0", + "cfg-if", "concurrent-queue", "libc", "log", "pin-project-lite", - "windows-sys 0.48.0", -] - -[[package]] -name = "poly1305" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b7456bc1ad2d4cf82b3a016be4c2ac48daf11bf990c1603ebd447fe6f30fca8" -dependencies = [ - "cpuid-bool", - "universal-hash", -] - -[[package]] -name = "polyval" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eebcc4aa140b9abd2bc40d9c3f7ccec842679cd79045ac3a7ac698c1a064b7cd" -dependencies = [ - "cpuid-bool", - "opaque-debug 0.3.0", - "universal-hash", + "windows-sys", ] [[package]] @@ -3713,7 +3199,7 @@ checksum = "59230a63c37f3e18569bdb90e4a89cbf5bf8b06fea0b84e65ea10cc4df47addd" dependencies = [ "difflib", "float-cmp", - "itertools", + "itertools 0.10.5", "normalize-line-endings", "predicates-core", "regex", @@ -3761,9 +3247,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.66" +version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +checksum = "5b1106fec09662ec6dd98ccac0f81cef56984d0b49f75c92d8cbad76e20c005c" dependencies = [ "unicode-ident", ] @@ -3772,7 +3258,7 @@ dependencies = [ name = "public_key" version = "0.1.0" dependencies = [ - "base64 0.21.2", + "base64", "bs58 0.5.0", "multibase", "serde", @@ -3783,32 +3269,13 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.32" +version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" dependencies = [ "proc-macro2", ] -[[package]] -name = "rand" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" -dependencies = [ - "autocfg 0.1.8", - "libc", - "rand_chacha 0.1.1", - "rand_core 0.4.2", - "rand_hc 0.1.0", - "rand_isaac", - "rand_jitter", - "rand_os", - "rand_pcg", - "rand_xorshift", - "winapi", -] - [[package]] name = "rand" version = "0.7.3" @@ -3817,9 +3284,9 @@ checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" dependencies = [ "getrandom 0.1.16", "libc", - "rand_chacha 0.2.1", + "rand_chacha 0.2.2", "rand_core 0.5.1", - "rand_hc 0.2.0", + "rand_hc", ] [[package]] @@ -3835,21 +3302,11 @@ dependencies = [ [[package]] name = "rand_chacha" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" -dependencies = [ - "autocfg 0.1.8", - "rand_core 0.3.1", -] - -[[package]] -name = "rand_chacha" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" dependencies = [ - "c2-chacha", + "ppv-lite86", "rand_core 0.5.1", ] @@ -3863,21 +3320,6 @@ dependencies = [ "rand_core 0.6.4", ] -[[package]] -name = "rand_core" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" -dependencies = [ - "rand_core 0.4.2", -] - -[[package]] -name = "rand_core" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" - [[package]] name = "rand_core" version = "0.5.1" @@ -3893,16 +3335,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.9", -] - -[[package]] -name = "rand_hc" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" -dependencies = [ - "rand_core 0.3.1", + "getrandom 0.2.10", ] [[package]] @@ -3914,90 +3347,6 @@ dependencies = [ "rand_core 0.5.1", ] -[[package]] -name = "rand_isaac" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" -dependencies = [ - "rand_core 0.3.1", -] - -[[package]] -name = "rand_jitter" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" -dependencies = [ - "libc", - "rand_core 0.4.2", - "winapi", -] - -[[package]] -name = "rand_os" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" -dependencies = [ - "cloudabi", - "fuchsia-cprng", - "libc", - "rand_core 0.4.2", - "rdrand", - "winapi", -] - -[[package]] -name = "rand_pcg" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" -dependencies = [ - "autocfg 0.1.8", - "rand_core 0.4.2", -] - -[[package]] -name = "rand_xorshift" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" -dependencies = [ - "rand_core 0.3.1", -] - -[[package]] -name = "rayon" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" -dependencies = [ - "crossbeam-channel", - "crossbeam-deque", - "crossbeam-utils", - "num_cpus", -] - -[[package]] -name = "rdrand" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" -dependencies = [ - "rand_core 0.3.1", -] - [[package]] name = "redox_syscall" version = "0.2.16" @@ -4022,16 +3371,16 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ - "getrandom 0.2.9", + "getrandom 0.2.10", "redox_syscall 0.2.16", "thiserror", ] [[package]] name = "regex" -version = "1.9.3" +version = "1.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" +checksum = "ebee201405406dbf528b8b672104ae6d6d63e6d118cb10e4d51abbc7b58044ff" dependencies = [ "aho-corasick", "memchr", @@ -4041,9 +3390,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.6" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" +checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9" dependencies = [ "aho-corasick", "memchr", @@ -4052,17 +3401,17 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" +checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" [[package]] name = "reqwest" -version = "0.11.18" +version = "0.11.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cde824a14b7c14f85caff81225f411faacc04a2013f41670f41443742b1c1c55" +checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b" dependencies = [ - "base64 0.21.2", + "base64", "bytes", "encoding_rs", "futures-core", @@ -4083,6 +3432,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", + "system-configuration", "tokio", "tokio-native-tls", "tower-service", @@ -4102,7 +3452,7 @@ dependencies = [ "cc", "libc", "once_cell", - "spin", + "spin 0.5.2", "untrusted", "web-sys", "winapi", @@ -4119,17 +3469,6 @@ dependencies = [ "paste", ] -[[package]] -name = "rmp-serde" -version = "0.13.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "011e1d58446e9fa3af7cdc1fb91295b10621d3ac4cb3a85cc86385ee9ca50cd3" -dependencies = [ - "byteorder", - "rmp", - "serde", -] - [[package]] name = "rmp-serde" version = "1.1.2" @@ -4143,20 +3482,22 @@ dependencies = [ [[package]] name = "rsa" -version = "0.4.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b0aeddcca1082112a6eeb43bf25fd7820b066aaf6eaef776e19d0a1febe38fe" +checksum = "6ab43bb47d23c1a631b4b680199a45255dce26fa9ab2fa902581f624ff13e6a8" dependencies = [ "byteorder", - "digest 0.9.0", - "lazy_static", + "const-oid", + "digest", "num-bigint-dig", "num-integer", "num-iter", "num-traits", - "pem 0.8.3", - "rand 0.8.5", - "simple_asn1", + "pkcs1", + "pkcs8", + "rand_core 0.6.4", + "signature", + "spki", "subtle", "zeroize", ] @@ -4178,29 +3519,59 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.3" +version = "0.37.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b24138615de35e32031d041a09032ef3487a616d901ca4db224e7d557efae2" +checksum = "4279d76516df406a8bd37e7dff53fd37d1a093f997a3c34a5c21658c126db06d" dependencies = [ "bitflags 1.3.2", "errno", "io-lifetimes", "libc", - "linux-raw-sys", - "windows-sys 0.45.0", + "linux-raw-sys 0.3.8", + "windows-sys", +] + +[[package]] +name = "rustix" +version = "0.38.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f25469e9ae0f3d0047ca8b93fc56843f38e6774f0914a107ff8b41be8be8e0b7" +dependencies = [ + "bitflags 2.4.0", + "errno", + "libc", + "linux-raw-sys 0.4.8", + "windows-sys", ] [[package]] name = "rustls" -version = "0.19.1" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35edb675feee39aec9c99fa5ff985081995a06d594114ae14cbe797ad7b7a6d7" +checksum = "cd8d6c9f025a446bc4d18ad9632e69aec8f287aa84499ee335599fabd20c3fd8" dependencies = [ - "base64 0.13.1", - "log", "ring", + "rustls-webpki", "sct", - "webpki", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" +dependencies = [ + "base64", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c7d5dece342910d9ba34d259310cae3e0154b873b35408b787b59bce53d34fe" +dependencies = [ + "ring", + "untrusted", ] [[package]] @@ -4221,7 +3592,7 @@ version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" dependencies = [ - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -4247,39 +3618,19 @@ checksum = "1db149f81d46d2deba7cd3c50772474707729550221e69588478ebf9ada425ae" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] name = "sct" -version = "0.6.1" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b362b83898e0e69f38515b82ee15aa80636befe47c3b6d3d89a911e78fc228ce" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" dependencies = [ "ring", "untrusted", ] -[[package]] -name = "secp256k1" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6179428c22c73ac0fbb7b5579a56353ce78ba29759b3b8575183336ea74cdfb" -dependencies = [ - "rand 0.6.5", - "secp256k1-sys", - "serde", -] - -[[package]] -name = "secp256k1-sys" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11553d210db090930f4432bea123b31f70bbf693ace14504ea2a35e796c28dd2" -dependencies = [ - "cc", -] - [[package]] name = "security-framework" version = "2.9.2" @@ -4305,9 +3656,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" +checksum = "ad977052201c6de01a8ef2aa3378c4bd23217a056337d1d6da40468d267a4fb0" dependencies = [ "serde", ] @@ -4321,15 +3672,6 @@ dependencies = [ "serde_derive", ] -[[package]] -name = "serde_bytes" -version = "0.11.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab33ec92f677585af6d88c65593ae2375adde54efdbf16d597f2cbc7a6d368ff" -dependencies = [ - "serde", -] - [[package]] name = "serde_derive" version = "1.0.188" @@ -4338,16 +3680,16 @@ checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] name = "serde_json" -version = "1.0.104" +version = "1.0.107" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "076066c5f1078eac5b722a31827a8832fe108bed65dfa75e233c89f8206e976c" +checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" dependencies = [ - "itoa 1.0.9", + "itoa", "ryu", "serde", ] @@ -4359,110 +3701,59 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa 1.0.9", + "itoa", "ryu", "serde", ] -[[package]] -name = "sha-1" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" -dependencies = [ - "block-buffer 0.9.0", - "cfg-if 1.0.0", - "cpufeatures", - "digest 0.9.0", - "opaque-debug 0.3.0", -] - [[package]] name = "sha1" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" -dependencies = [ - "cfg-if 1.0.0", - "cpufeatures", - "digest 0.10.7", -] - -[[package]] -name = "sha2" -version = "0.9.9" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ - "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", - "digest 0.9.0", - "opaque-debug 0.3.0", + "digest", ] [[package]] name = "sha2" -version = "0.10.7" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", - "digest 0.10.7", + "digest", ] [[package]] name = "sha256" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f5ed5ebbe2d9fb5c5e67be64aa462053d707941e02ffb5e65b6200c00b6161c" +checksum = "7895c8ae88588ccead14ff438b939b0c569cd619116f14b4d13fdff7b8333386" dependencies = [ "async-trait", "bytes", "hex", - "sha2 0.10.7", + "sha2", "tokio", ] -[[package]] -name = "sha3" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd26bc0e7a2e3a7c959bc494caf58b72ee0c71d67704e9520f736ca7e4853ecf" -dependencies = [ - "block-buffer 0.7.3", - "byte-tools", - "digest 0.8.1", - "keccak", - "opaque-debug 0.2.3", -] - -[[package]] -name = "sha3" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" -dependencies = [ - "block-buffer 0.9.0", - "digest 0.9.0", - "keccak", - "opaque-debug 0.3.0", -] - [[package]] name = "sha3" version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" dependencies = [ - "digest 0.10.7", + "digest", "keccak", ] [[package]] name = "shared_vcx" -version = "0.58.0" +version = "0.60.0" dependencies = [ "bs58 0.4.0", "lazy_static", @@ -4483,26 +3774,14 @@ dependencies = [ [[package]] name = "signature" -version = "1.3.2" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2807892cfa58e081aa1f1111391c7a0649d4fa127a4ffbe34bcbfb35a1171a4" +checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" dependencies = [ - "digest 0.9.0", + "digest", "rand_core 0.6.4", ] -[[package]] -name = "simple_asn1" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eb4ea60fb301dc81dfc113df680571045d375ab7345d171c5dc7d7e13107a80" -dependencies = [ - "chrono", - "num-bigint 0.4.3", - "num-traits", - "thiserror", -] - [[package]] name = "simple_message_relay" version = "0.1.0" @@ -4512,24 +3791,24 @@ dependencies = [ [[package]] name = "siphasher" -version = "0.3.10" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" [[package]] name = "slab" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ - "autocfg 1.1.0", + "autocfg", ] [[package]] name = "smallvec" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" +checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a" [[package]] name = "socket2" @@ -4541,6 +3820,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "socket2" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4031e820eb552adee9295814c0ced9e5cf38ddf1e8b7d566d6de8e2538ea989e" +dependencies = [ + "libc", + "windows-sys", +] + [[package]] name = "sodiumoxide" version = "0.0.16" @@ -4558,115 +3847,231 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + [[package]] name = "spki" -version = "0.4.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c01a0c15da1b0b0e1494112e7af814a678fec9bd157881b49beac661e9b6f32" +checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" dependencies = [ + "base64ct", "der", ] [[package]] -name = "sqlformat" -version = "0.1.8" +name = "sqlformat" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b7b278788e7be4d0d29c0f39497a0eef3fba6bbc8e70d8bf7fde46edeaa9e85" +dependencies = [ + "itertools 0.11.0", + "nom", + "unicode_categories", +] + +[[package]] +name = "sqlx" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e50c216e3624ec8e7ecd14c6a6a6370aad6ee5d8cfc3ab30b5162eeeef2ed33" +dependencies = [ + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", +] + +[[package]] +name = "sqlx-core" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d6753e460c998bbd4cd8c6f0ed9a64346fcca0723d6e75e52fdc351c5d2169d" +dependencies = [ + "ahash", + "atoi", + "byteorder", + "bytes", + "crc", + "crossbeam-queue", + "dotenvy", + "either", + "event-listener", + "futures-channel", + "futures-core", + "futures-intrusive", + "futures-io", + "futures-util", + "hashlink", + "hex", + "indexmap 2.0.2", + "log", + "memchr", + "once_cell", + "paste", + "percent-encoding", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "sha2", + "smallvec", + "sqlformat", + "thiserror", + "tokio", + "tokio-stream", + "tracing", + "url", + "webpki-roots", +] + +[[package]] +name = "sqlx-macros" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4b7922be017ee70900be125523f38bdd644f4f06a1b16e8fa5a8ee8c34bffd4" +checksum = "9a793bb3ba331ec8359c1853bd39eed32cdd7baaf22c35ccf5c92a7e8d1189ec" dependencies = [ - "itertools", - "nom", - "unicode_categories", + "proc-macro2", + "quote", + "sqlx-core", + "sqlx-macros-core", + "syn 1.0.109", ] [[package]] -name = "sqlx" -version = "0.5.8" -source = "git+https://github.com/jovfer/sqlx?branch=feature/json_no_preserve_order_v5#7b9b4b371071e7d29d3b10da5a205460b3fc2de4" +name = "sqlx-macros-core" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a4ee1e104e00dedb6aa5ffdd1343107b0a4702e862a84320ee7cc74782d96fc" dependencies = [ + "dotenvy", + "either", + "heck", + "hex", + "once_cell", + "proc-macro2", + "quote", + "serde", + "serde_json", + "sha2", "sqlx-core", - "sqlx-macros", + "sqlx-mysql", + "sqlx-sqlite", + "syn 1.0.109", + "tempfile", + "tokio", + "url", ] [[package]] -name = "sqlx-core" -version = "0.5.8" -source = "git+https://github.com/jovfer/sqlx?branch=feature/json_no_preserve_order_v5#7b9b4b371071e7d29d3b10da5a205460b3fc2de4" +name = "sqlx-mysql" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "864b869fdf56263f4c95c45483191ea0af340f9f3e3e7b4d57a61c7c87a970db" dependencies = [ - "ahash 0.7.6", "atoi", - "base64 0.13.1", - "bitflags 1.3.2", + "base64", + "bitflags 2.4.0", "byteorder", "bytes", "crc", - "crossbeam-channel", - "crossbeam-queue", - "crossbeam-utils", - "digest 0.9.0", + "digest", + "dotenvy", "either", "futures-channel", "futures-core", - "futures-intrusive", + "futures-io", "futures-util", - "generic-array 0.14.7", - "hashlink", + "generic-array", "hex", - "indexmap", - "itoa 0.4.8", - "libc", - "libsqlite3-sys", + "hkdf", + "hmac", + "itoa", "log", + "md-5", "memchr", - "num-bigint 0.3.3", "once_cell", - "parking_lot 0.11.2", "percent-encoding", "rand 0.8.5", "rsa", - "rustls", "serde", - "serde_json", - "sha-1", - "sha2 0.9.9", + "sha1", + "sha2", "smallvec", - "sqlformat", - "sqlx-rt", + "sqlx-core", "stringprep", "thiserror", - "tokio-stream", - "url", - "webpki", - "webpki-roots", + "tracing", "whoami", ] [[package]] -name = "sqlx-macros" -version = "0.5.8" -source = "git+https://github.com/jovfer/sqlx?branch=feature/json_no_preserve_order_v5#7b9b4b371071e7d29d3b10da5a205460b3fc2de4" +name = "sqlx-postgres" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb7ae0e6a97fb3ba33b23ac2671a5ce6e3cabe003f451abd5a56e7951d975624" dependencies = [ - "dotenv", - "either", - "heck 0.3.3", + "atoi", + "base64", + "bitflags 2.4.0", + "byteorder", + "crc", + "dotenvy", + "etcetera", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "hex", + "hkdf", + "hmac", + "home", + "itoa", + "log", + "md-5", + "memchr", "once_cell", - "proc-macro2", - "quote", + "rand 0.8.5", + "serde", "serde_json", - "sha2 0.9.9", + "sha1", + "sha2", + "smallvec", "sqlx-core", - "sqlx-rt", - "syn 1.0.109", - "url", + "stringprep", + "thiserror", + "tracing", + "whoami", ] [[package]] -name = "sqlx-rt" -version = "0.5.8" -source = "git+https://github.com/jovfer/sqlx?branch=feature/json_no_preserve_order_v5#7b9b4b371071e7d29d3b10da5a205460b3fc2de4" +name = "sqlx-sqlite" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d59dc83cf45d89c555a577694534fcd1b55c545a816c816ce51f20bbe56a4f3f" dependencies = [ - "once_cell", - "tokio", - "tokio-rustls", + "atoi", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "sqlx-core", + "tracing", + "url", ] [[package]] @@ -4677,10 +4082,11 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "stringprep" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3737bde7edce97102e0e2b15365bf7a20bfdb5f60f4f9e8d7004258a51a8da" +checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" dependencies = [ + "finl_unicode", "unicode-bidi", "unicode-normalization", ] @@ -4699,55 +4105,47 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "strum" -version = "0.16.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6138f8f88a16d90134763314e3fc76fa3ed6a7db4725d6acf9a3ef95a3188d22" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" [[package]] name = "strum" -version = "0.24.1" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" [[package]] name = "strum_macros" -version = "0.16.0" +version = "0.24.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0054a7df764039a6cd8592b9de84be4bec368ff081d203a7d5371cbfa8e65c81" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ - "heck 0.3.3", + "heck", "proc-macro2", "quote", + "rustversion", "syn 1.0.109", ] [[package]] name = "strum_macros" -version = "0.24.3" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +checksum = "ad8d03b598d3d0fff69bf533ee3ef19b8eeb342729596df84bcc7e1f96ec4059" dependencies = [ - "heck 0.4.1", + "heck", "proc-macro2", "quote", "rustversion", - "syn 1.0.109", + "syn 2.0.38", ] [[package]] name = "subtle" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" - -[[package]] -name = "subtle-encoding" -version = "0.5.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dcb1ed7b8330c5eed5441052651dd7a12c75e2ed88f2ec024ae1fa3a5e59945" -dependencies = [ - "zeroize", -] +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "syn" @@ -4762,9 +4160,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.28" +version = "2.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567" +checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b" dependencies = [ "proc-macro2", "quote", @@ -4783,24 +4181,45 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "tempfile" -version = "3.5.0" +version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" +checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ - "cfg-if 1.0.0", - "fastrand", + "cfg-if", + "fastrand 2.0.1", "redox_syscall 0.3.5", - "rustix", - "windows-sys 0.45.0", + "rustix 0.38.17", + "windows-sys", ] [[package]] name = "termcolor" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +checksum = "6093bad37da69aab9d123a8091e4be0aa4a03e4d601ec641c327398315f62b64" dependencies = [ "winapi-util", ] @@ -4819,22 +4238,22 @@ checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] name = "thiserror" -version = "1.0.44" +version = "1.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90" +checksum = "1177e8c6d7ede7afde3585fd2513e611227efd6481bd78d2e82ba1ce16557ed4" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.44" +version = "1.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96" +checksum = "10712f02019e9288794769fba95cd6847df9874d49d871d062172f9dd41bc4cc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] @@ -4854,7 +4273,7 @@ version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890" dependencies = [ - "itoa 1.0.9", + "itoa", "serde", "time-core", "time-macros", @@ -4892,21 +4311,21 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.28.2" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94d7b1cfd2aa4011f2de74c2c4c63665e27a71006b0a192dcd2710272e73dfa2" +checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" dependencies = [ - "autocfg 1.1.0", + "backtrace", "bytes", "libc", "mio", "num_cpus", - "parking_lot 0.12.1", + "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.5.4", "tokio-macros", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -4917,7 +4336,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] @@ -4930,17 +4349,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-rustls" -version = "0.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6844de72e57df1980054b38be3a9f4702aba4858be64dd700181a8a6d0e1b6" -dependencies = [ - "rustls", - "tokio", - "webpki", -] - [[package]] name = "tokio-stream" version = "0.1.14" @@ -4954,9 +4362,9 @@ dependencies = [ [[package]] name = "tokio-test" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53474327ae5e166530d17f2d956afcb4f8a004de581b3cae10f12006bc8163e3" +checksum = "e89b3cbabd3ae862100094ae433e1def582cf86451b4e9bf83aa7ac1d8a7d719" dependencies = [ "async-stream", "bytes", @@ -4967,9 +4375,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.8" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +checksum = "1d68074620f57a0b21594d9735eb2e98ab38b17f80d3fcb189fca266771ca60d" dependencies = [ "bytes", "futures-core", @@ -5006,12 +4414,24 @@ version = "0.1.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "log", "pin-project-lite", + "tracing-attributes", "tracing-core", ] +[[package]] +name = "tracing-attributes" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.38", +] + [[package]] name = "tracing-core" version = "0.1.31" @@ -5030,7 +4450,7 @@ dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] @@ -5041,35 +4461,35 @@ checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" [[package]] name = "typed-builder" -version = "0.16.0" +version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6605aaa56cce0947127ffa0675a8a1b181f87773364390174de60a86ab9085f1" +checksum = "34085c17941e36627a879208083e25d357243812c30e7d7387c3b954f30ade16" dependencies = [ "typed-builder-macro", ] [[package]] name = "typed-builder-macro" -version = "0.16.0" +version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a6a6884f6a890a012adcc20ce498f30ebdc70fb1ea242c333cc5f435b0b3871" +checksum = "f03ca4cb38206e2bef0700092660bb74d696f808514dae47fa1467cbfe26e96e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] name = "typenum" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "unicase" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" dependencies = [ "version_check", ] @@ -5082,9 +4502,9 @@ checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" [[package]] name = "unicode-ident" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" @@ -5130,7 +4550,7 @@ dependencies = [ [[package]] name = "uniffi_aries_vcx" -version = "0.58.0" +version = "0.60.0" dependencies = [ "aries-vcx", "async-trait", @@ -5157,7 +4577,7 @@ dependencies = [ "fs-err", "glob", "goblin", - "heck 0.4.1", + "heck", "once_cell", "paste", "serde", @@ -5261,21 +4681,11 @@ dependencies = [ "thiserror", ] -[[package]] -name = "universal-hash" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f214e8f697e925001e66ec2c6e37a4ef93f0f78c2eed7814394e10c62025b05" -dependencies = [ - "generic-array 0.14.7", - "subtle", -] - [[package]] name = "unsigned-varint" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d86a8dc7f45e4c1b0d30e43038c38f274e77af056aa5f74b93c2cf9eb3c1c836" +checksum = "6889a77d49f1f013504cec6bf97a2c730394adedaeb1deb5ea08949a50541105" [[package]] name = "untrusted" @@ -5285,9 +4695,9 @@ checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" [[package]] name = "url" -version = "2.4.0" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" dependencies = [ "form_urlencoded", "idna", @@ -5301,55 +4711,30 @@ version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8760a62e18e4d3e3f599e15c09a9f9567fd9d4a90594d45166162be8d232e63b" dependencies = [ - "aead", - "aes 0.6.0", - "aes-gcm", "amcl", - "amcl_wrapper", - "arrayref", - "blake2", - "block-modes", - "block-padding 0.2.1", - "chacha20poly1305", - "curve25519-dalek", - "ed25519-dalek", + "console_error_panic_hook", "failure", "hex", - "hkdf", - "hmac", "int_traits", - "k256", + "js-sys", "lazy_static", "log", "openssl", "rand 0.7.3", - "rand_chacha 0.2.1", - "secp256k1", "serde", - "sha2 0.9.9", - "sha3 0.9.1", - "subtle", + "serde_json", "time 0.1.45", - "x25519-dalek", + "wasm-bindgen", "zeroize", ] -[[package]] -name = "uuid" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90dbc611eb48397705a6b0f6e917da23ae517e4d127123d2cf7674206627d32a" -dependencies = [ - "rand 0.6.5", -] - [[package]] name = "uuid" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" dependencies = [ - "getrandom 0.2.9", + "getrandom 0.2.10", ] [[package]] @@ -5358,7 +4743,7 @@ version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" dependencies = [ - "getrandom 0.2.9", + "getrandom 0.2.10", ] [[package]] @@ -5375,7 +4760,7 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "vcx-napi-rs" -version = "0.58.0" +version = "0.60.0" dependencies = [ "chrono", "libvcx_core", @@ -5384,6 +4769,7 @@ dependencies = [ "napi-build", "napi-derive", "uuid 0.8.2", + "wallet_migrator", ] [[package]] @@ -5394,9 +4780,9 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "waker-fn" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" +checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" [[package]] name = "wallet_migrator" @@ -5444,7 +4830,9 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", + "serde", + "serde_json", "wasm-bindgen-macro", ] @@ -5459,7 +4847,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.28", + "syn 2.0.38", "wasm-bindgen-shared", ] @@ -5469,7 +4857,7 @@ version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "wasm-bindgen", "web-sys", @@ -5493,7 +4881,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn 2.0.38", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5514,23 +4902,13 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "webpki" -version = "0.21.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8e38c0608262c46d4a56202ebabdeb094cef7e560ca7a226c6bf055188aa4ea" -dependencies = [ - "ring", - "untrusted", -] - [[package]] name = "webpki-roots" -version = "0.21.1" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aabe153544e473b775453675851ecc86863d2a81d786d741f6b76778f2a48940" +checksum = "b291546d5d9d1eab74f069c77749f2cb8504a12caa20f0f2de93ddbf6f411888" dependencies = [ - "webpki", + "rustls-webpki", ] [[package]] @@ -5547,10 +4925,6 @@ name = "whoami" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50" -dependencies = [ - "wasm-bindgen", - "web-sys", -] [[package]] name = "winapi" @@ -5570,9 +4944,9 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" dependencies = [ "winapi", ] @@ -5589,16 +4963,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" dependencies = [ - "windows-targets 0.48.1", -] - -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", + "windows-targets", ] [[package]] @@ -5607,148 +4972,92 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.48.1", + "windows-targets", ] [[package]] name = "windows-targets" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", ] -[[package]] -name = "windows-targets" -version = "0.48.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" -dependencies = [ - "windows_aarch64_gnullvm 0.48.0", - "windows_aarch64_msvc 0.48.0", - "windows_i686_gnu 0.48.0", - "windows_i686_msvc 0.48.0", - "windows_x86_64_gnu 0.48.0", - "windows_x86_64_gnullvm 0.48.0", - "windows_x86_64_msvc 0.48.0", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" - [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_i686_gnu" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" - -[[package]] -name = "windows_i686_gnu" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" - -[[package]] -name = "windows_i686_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_msvc" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_x86_64_gnu" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.42.2" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "winreg" -version = "0.10.1" +version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "winapi", + "cfg-if", + "windows-sys", ] [[package]] name = "x25519-dalek" -version = "1.2.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2392b6b94a576b4e2bf3c5b2757d63f10ada8020a2e4d08ac849ebcf6ea8e077" +checksum = "fb66477291e7e8d2b0ff1bcb900bf29489a9692816d79874bea351e7a8b6de96" dependencies = [ "curve25519-dalek", - "rand_core 0.5.1", + "rand_core 0.6.4", "zeroize", ] [[package]] name = "zeroize" -version = "1.3.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd" +checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" dependencies = [ "zeroize_derive", ] @@ -5761,7 +5070,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn 2.0.38", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 1a9e504f8b..831240db36 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,4 +1,6 @@ [workspace] +resolver = "2" + members = [ "aries_vcx", "libvcx_core", @@ -28,7 +30,7 @@ members = [ ] [workspace.package] -version = "0.58.0" +version = "0.60.0" authors = ["Absa Group Limited", "Hyperledger Indy Contributors "] description = "Library to work with Aries protocols & collection of supporting components" license = "Apache-2.0" diff --git a/agents/node/vcxagent-core/demo/alice.js b/agents/node/vcxagent-core/demo/alice.js index 5b3b0df949..e7ec0aa58e 100644 --- a/agents/node/vcxagent-core/demo/alice.js +++ b/agents/node/vcxagent-core/demo/alice.js @@ -12,10 +12,13 @@ const { extractProofRequestAttachement } = require('../src/utils/proofs') const assert = require('assert') const { getStorageInfoMysql } = require('./wallet-common') const { initRustLogger } = require('../src') +const mkdirp = require('mkdirp') -const mapRevRegIdToTailsFile = (_revRegId) => '/tmp/tails' +const tailsDir = '/tmp/tails' +const mapRevRegIdToTailsFile = (_revRegId) => tailsDir async function getInvitationString (fetchInviteUrl) { + mkdirp.sync(tailsDir) let invitationString if (fetchInviteUrl) { const fetchInviteAttemptThreshold = 30 diff --git a/agents/node/vcxagent-core/demo/faber.js b/agents/node/vcxagent-core/demo/faber.js index 8a03749fc4..c3ceba3c75 100644 --- a/agents/node/vcxagent-core/demo/faber.js +++ b/agents/node/vcxagent-core/demo/faber.js @@ -13,11 +13,13 @@ require('@hyperledger/node-vcx-wrapper') const { getStorageInfoMysql } = require('./wallet-common') const sleep = require('sleep-promise') const { testTailsUrl, initRustLogger } = require('../src') +const mkdirp = require('mkdirp') const tailsDir = '/tmp/tails' -async function runFaber (options) { +async function runFaber(options) { logger.info(`Starting Faber. Revocation enabled=${options.revocation}`) + mkdirp.sync(tailsDir) initRustLogger(process.env.RUST_LOG || 'vcx=error') let faberServer @@ -47,10 +49,10 @@ async function runFaber (options) { await vcxAgent.acceptTaa() } - const schemaId = await vcxAgent.serviceLedgerSchema.createSchema(getSampleSchemaData()) - await sleep(500) - const vcxCredDef = await vcxAgent.serviceLedgerCredDef.createCredentialDefinitionV2(schemaId, getFaberCredDefName(), true, 'tag1') const issuerDid = vcxAgent.getInstitutionDid() + const schemaId = await vcxAgent.serviceLedgerSchema.createSchema(getSampleSchemaData(), issuerDid) + await sleep(500) + const vcxCredDef = await vcxAgent.serviceLedgerCredDef.createCredentialDefinitionV2(issuerDid, schemaId, getFaberCredDefName(), true, 'tag1') const { revReg, revRegId } = await vcxAgent.serviceLedgerRevReg.createRevocationRegistry(issuerDid, await vcxCredDef.getCredDefId(), 1, tailsDir, 5, testTailsUrl) await vcxAgent.serviceConnections.inviterConnectionCreateAndAccept(connectionId, (invitationString) => { @@ -87,7 +89,7 @@ async function runFaber (options) { logger.info('Faber is revoking issued credential') await vcxAgent.serviceCredIssuer.revokeCredentialLocal(issuerCredId) logger.info('Faber is publishing revocation') - await revReg.publishRevocations() + await revReg.publishRevocations(issuerDid) } logger.info('#19 Create a Proof object') @@ -193,7 +195,7 @@ const usage = [ } ] -function areOptionsValid (_options) { +function areOptionsValid(_options) { return true } diff --git a/agents/node/vcxagent-core/src/agent.js b/agents/node/vcxagent-core/src/agent.js index b1997a0b7b..392b31b580 100644 --- a/agents/node/vcxagent-core/src/agent.js +++ b/agents/node/vcxagent-core/src/agent.js @@ -15,7 +15,6 @@ const { createServiceLedgerRevocationRegistry } = require('./services/service-re const { provisionAgentInAgency } = require('./utils/vcx-workflows') const { createAgencyClientForMainWallet, - initIssuerConfig, openMainWallet, openMainPool, vcxUpdateWebhookUrl, @@ -25,7 +24,7 @@ const { createStorageService } = require('./storage/storage-service') const { waitUntilAgencyIsReady, getAgencyConfig } = require('./common') const path = require('path') -async function createVcxAgent ({ agentName, genesisPath, agencyUrl, seed, walletExtraConfigs, endpointInfo, logger }) { +async function createVcxAgent({ agentName, genesisPath, agencyUrl, seed, walletExtraConfigs, endpointInfo, logger }) { genesisPath = genesisPath || path.join(__dirname, '/../resources/docker.txn') await waitUntilAgencyIsReady(agencyUrl, logger) @@ -39,12 +38,10 @@ async function createVcxAgent ({ agentName, genesisPath, agencyUrl, seed, wallet const agentProvision = await storageService.loadAgentProvision() const issuerDid = agentProvision.issuerConfig.institution_did - async function agentInitVcx () { + async function agentInitVcx() { logger.info(`Initializing ${agentName} vcx session.`) logger.silly(`Using following agent provision to initialize VCX settings ${JSON.stringify(agentProvision, null, 2)}`) - logger.silly('Initializing issuer config') - await initIssuerConfig(agentProvision.issuerConfig) logger.silly('Opening main wallet') await openMainWallet(agentProvision.walletConfig) logger.silly('Creating cloud agency config') @@ -53,17 +50,17 @@ async function createVcxAgent ({ agentName, genesisPath, agencyUrl, seed, wallet await openMainPool({ genesis_path: genesisPath }) } - async function agentShutdownVcx () { + async function agentShutdownVcx() { logger.debug(`Shutting down ${agentName} vcx session.`) shutdownVcx() } - async function updateWebhookUrl (webhookUrl) { + async function updateWebhookUrl(webhookUrl) { logger.info(`Updating webhook url to ${webhookUrl}`) await vcxUpdateWebhookUrl({ webhookUrl }) } - async function acceptTaa () { + async function acceptTaa() { const taa = await getLedgerAuthorAgreement() const taaJson = JSON.parse(taa) const acceptanceMechanism = Object.keys(taaJson.aml)[0] @@ -71,7 +68,7 @@ async function createVcxAgent ({ agentName, genesisPath, agencyUrl, seed, wallet await setActiveTxnAuthorAgreementMeta(taaJson.text, taaJson.version, acceptanceMechanism) } - function getInstitutionDid () { + function getInstitutionDid() { return issuerDid } diff --git a/agents/node/vcxagent-core/src/services/service-ledger-creddef.js b/agents/node/vcxagent-core/src/services/service-ledger-creddef.js index 2e6aeaf29a..da6f373347 100644 --- a/agents/node/vcxagent-core/src/services/service-ledger-creddef.js +++ b/agents/node/vcxagent-core/src/services/service-ledger-creddef.js @@ -1,8 +1,9 @@ const { CredentialDef } = require('@hyperledger/node-vcx-wrapper') -module.exports.createServiceLedgerCredDef = function createServiceLedgerCredDef ({ logger, saveCredDef, loadCredDef, listCredDefIds }) { - async function createCredentialDefinitionV2 (schemaId, credDefId, supportRevocation, tag = 'tag1') { +module.exports.createServiceLedgerCredDef = function createServiceLedgerCredDef({ logger, saveCredDef, loadCredDef, listCredDefIds }) { + async function createCredentialDefinitionV2(issuerDid, schemaId, credDefId, supportRevocation, tag = 'tag1') { const data = { + issuerDid, supportRevocation, schemaId, sourceId: credDefId, @@ -16,11 +17,11 @@ module.exports.createServiceLedgerCredDef = function createServiceLedgerCredDef return credDef } - async function listIds () { + async function listIds() { return listCredDefIds() } - async function printInfo (credDefIds) { + async function printInfo(credDefIds) { for (const id of credDefIds) { const credDef = await loadCredDef(id) const serCredDef = credDef.serialize() @@ -28,7 +29,7 @@ module.exports.createServiceLedgerCredDef = function createServiceLedgerCredDef } } - async function getCredDefId (credDefId) { + async function getCredDefId(credDefId) { const credDef = await loadCredDef(credDefId) logger.info(`Getting credDefId for credential definition ${credDefId}`) return credDef.getCredDefId() diff --git a/agents/node/vcxagent-core/src/services/service-ledger-schema.js b/agents/node/vcxagent-core/src/services/service-ledger-schema.js index 9f5465924b..db2249f746 100644 --- a/agents/node/vcxagent-core/src/services/service-ledger-schema.js +++ b/agents/node/vcxagent-core/src/services/service-ledger-schema.js @@ -1,20 +1,20 @@ const { Schema } = require('@hyperledger/node-vcx-wrapper') -module.exports.createServiceLedgerSchema = function createServiceLedgerSchema ({ logger, saveSchema, loadSchema, listSchemaIds }) { - async function createSchema (schemaData) { +module.exports.createServiceLedgerSchema = function createServiceLedgerSchema({ logger, saveSchema, loadSchema, listSchemaIds }) { + async function createSchema(schemaData, issuerDid) { logger.info(`Creating a new schema on the ledger: ${JSON.stringify(schemaData, null, 2)}`) - const schema = await Schema.create(schemaData) + const schema = await Schema.create(schemaData, issuerDid) const schemaId = await schema.getSchemaId() await saveSchema(schemaId, schema) return schemaId } - async function listIds () { + async function listIds() { return listSchemaIds() } - async function printInfo (schemaIds) { + async function printInfo(schemaIds) { for (const id of schemaIds) { const serSchema = await loadSchema(id) logger.info(`Schema ${id}: ${JSON.stringify(serSchema)}`) diff --git a/agents/node/vcxagent-core/src/services/service-revocation-registry.js b/agents/node/vcxagent-core/src/services/service-revocation-registry.js index f0ebcaacd5..1130c11cc9 100644 --- a/agents/node/vcxagent-core/src/services/service-revocation-registry.js +++ b/agents/node/vcxagent-core/src/services/service-revocation-registry.js @@ -32,10 +32,10 @@ module.exports.createServiceLedgerRevocationRegistry = function createServiceLed return { revReg: newRevReg, revRegId: newRevRegId } } - async function publishRevocations (revRegId) { + async function publishRevocations (revRegId, submittderDid) { logger.info(`Publishing revocations for revocation registry ${revRegId}`) const revReg = await loadRevReg(revRegId) - await revReg.publishRevocations() + await revReg.publishRevocations(submittderDid) } async function getTailsFile (credDefId) { diff --git a/agents/node/vcxagent-core/test/distribute-tails.spec.js b/agents/node/vcxagent-core/test/distribute-tails.spec.js index 6a465c448d..cbe9ef3a61 100644 --- a/agents/node/vcxagent-core/test/distribute-tails.spec.js +++ b/agents/node/vcxagent-core/test/distribute-tails.spec.js @@ -9,12 +9,16 @@ const uuid = require('uuid') const sleep = require('sleep-promise') const fs = require('fs') const mkdirp = require('mkdirp') -const path = require('path') const { proofRequestDataStandard } = require('./utils/data') +const TAILS_DIR_ALICE = '/tmp/alice/tails' +const TAILS_DIR_FABER = '/tmp/faber/tails' + beforeAll(async () => { jest.setTimeout(1000 * 60 * 4) initRustLogger(process.env.RUST_LOG || 'vcx=error') + mkdirp(TAILS_DIR_ALICE) + mkdirp(TAILS_DIR_FABER) }) describe('test tails distribution', () => { @@ -26,7 +30,7 @@ describe('test tails distribution', () => { const port = 5468 const tailsUrlId = uuid.v4() const tailsUrl = `http://127.0.0.1:${port}/${tailsUrlId}` - await faber.buildLedgerPrimitives(buildRevocationDetails({ supportRevocation: true, tailsDir: path.join(__dirname, '/tmp/faber/tails'), maxCreds: 5, tailsUrl })) + await faber.buildLedgerPrimitives(buildRevocationDetails({ supportRevocation: true, tailsDir: TAILS_DIR_FABER, maxCreds: 5, tailsUrl })) await faber.sendCredentialOffer() await alice.acceptCredentialOffer() await faber.updateStateCredential(IssuerStateType.RequestReceived) @@ -35,20 +39,18 @@ describe('test tails distribution', () => { const faberTailsHash = await faber.getTailsHash() const app = express() - app.use(`/${tailsUrlId}`, express.static(path.join(__dirname, `/tmp/faber/tails/${faberTailsHash}`))) + app.use(`/${tailsUrlId}`, express.static(`${TAILS_DIR_FABER}/${faberTailsHash}`)) server = app.listen(port) const aliceTailsLocation = await alice.getTailsLocation() const aliceTailsHash = await alice.getTailsHash() - const aliceTailsFileDir = path.join(__dirname, '/tmp/alice/tails') - const aliceTailsFilePath = aliceTailsFileDir + `/${aliceTailsHash}` - await mkdirp(aliceTailsFileDir) + const aliceTailsFilePath = TAILS_DIR_ALICE + `/${aliceTailsHash}` axios.default.get(`${aliceTailsLocation}`, { responseType: 'stream' }).then(res => { res.data.pipe(fs.createWriteStream(aliceTailsFilePath)) }) const issuerDid = faber.getFaberDid() const request = await faber.requestProofFromAlice(proofRequestDataStandard(issuerDid)) - await alice.sendHolderProof(JSON.parse(request), revRegId => aliceTailsFileDir, { attr_nickname: 'Smith' }) + await alice.sendHolderProof(JSON.parse(request), revRegId => TAILS_DIR_ALICE, { attr_nickname: 'Smith' }) await faber.updateStateVerifierProof(VerifierStateType.Finished) await alice.updateStateHolderProof(ProverStateType.Finished) } catch (err) { diff --git a/agents/node/vcxagent-core/test/issue-verify.spec.js b/agents/node/vcxagent-core/test/issue-verify.spec.js index 5e86e114f3..08445453a7 100644 --- a/agents/node/vcxagent-core/test/issue-verify.spec.js +++ b/agents/node/vcxagent-core/test/issue-verify.spec.js @@ -11,11 +11,14 @@ const { const sleep = require('sleep-promise') const { initRustLogger } = require('../src') const { proofRequestDataStandard, proofRequestDataSelfAttest } = require('./utils/data') -const path = require('path') +const mkdirp = require('mkdirp') + +const TAILS_DIR = '/tmp/faber/tails' beforeAll(async () => { jest.setTimeout(1000 * 60 * 4) initRustLogger(process.env.RUST_LOG || 'vcx=error') + mkdirp(TAILS_DIR) }) afterAll(async () => { @@ -26,9 +29,8 @@ describe('test update state', () => { it('Faber should issue credential, verify proof', async () => { const { alice, faber } = await createPairedAliceAndFaber() const issuerDid = faber.getFaberDid() - const tailsDir = path.join(__dirname, '/tmp/faber/tails') - await faber.buildLedgerPrimitives({ tailsDir, maxCreds: 5 }) - await faber.rotateRevReg(tailsDir, 5) + await faber.buildLedgerPrimitives({ tailsDir: TAILS_DIR, maxCreds: 5 }) + await faber.rotateRevReg(TAILS_DIR, 5) await faber.sendCredentialOffer() await alice.acceptCredentialOffer() @@ -38,7 +40,7 @@ describe('test update state', () => { await faber.receiveCredentialAck() const request = await faber.requestProofFromAlice(proofRequestDataStandard(issuerDid)) - await alice.sendHolderProof(JSON.parse(request), revRegId => tailsDir, { attr_nickname: 'Smith' }) + await alice.sendHolderProof(JSON.parse(request), revRegId => TAILS_DIR, { attr_nickname: 'Smith' }) await faber.updateStateVerifierProof(VerifierStateType.Finished) await alice.updateStateHolderProof(ProverStateType.Finished) const { @@ -124,8 +126,7 @@ describe('test update state', () => { it('Faber should issue credential, revoke credential, verify proof', async () => { const { alice, faber } = await createPairedAliceAndFaber() const issuerDid = faber.getFaberDid() - const tailsDir = path.join(__dirname, '/tmp/faber/tails') - await faber.buildLedgerPrimitives({ tailsDir, maxCreds: 5 }) + await faber.buildLedgerPrimitives({ tailsDir: TAILS_DIR, maxCreds: 5 }) await faber.sendCredentialOffer() await alice.acceptCredentialOffer() @@ -136,7 +137,7 @@ describe('test update state', () => { await faber.revokeCredential() const request = await faber.requestProofFromAlice(proofRequestDataStandard(issuerDid)) - await alice.sendHolderProof(JSON.parse(request), revRegId => tailsDir, { attr_nickname: 'Smith' }) + await alice.sendHolderProof(JSON.parse(request), revRegId => TAILS_DIR, { attr_nickname: 'Smith' }) await faber.updateStateVerifierProof(VerifierStateType.Finished) await alice.updateStateHolderProof(ProverStateType.Failed) const { diff --git a/agents/node/vcxagent-core/test/out-of-band.spec.js b/agents/node/vcxagent-core/test/out-of-band.spec.js index 3eaa8d99d1..2d5ff29882 100644 --- a/agents/node/vcxagent-core/test/out-of-band.spec.js +++ b/agents/node/vcxagent-core/test/out-of-band.spec.js @@ -7,12 +7,15 @@ const { createPairedAliceAndFaberViaOobMsg, createAliceAndFaber, connectViaOobMe const { IssuerStateType, HolderStateType, OutOfBandReceiver } = require('@hyperledger/node-vcx-wrapper') const { initRustLogger } = require('../src') const { proofRequestDataStandard } = require('./utils/data') -const path = require('path') +const mkdirp = require('mkdirp') const logger = require('../demo/logger')('out-of-band-test') +const TAILS_DIR = '/tmp/faber/tails' + beforeAll(async () => { jest.setTimeout(1000 * 60 * 4) initRustLogger(process.env.RUST_LOG || 'vcx=error') + mkdirp(TAILS_DIR) }) describe('test out of band communication', () => { @@ -56,8 +59,7 @@ describe('test out of band communication', () => { it('Faber issues credential via OOB', async () => { try { const { alice, faber } = await createAliceAndFaber() - const tailsDir = path.join(__dirname, '/tmp/faber/tails') - await faber.buildLedgerPrimitives({ tailsDir, maxCreds: 5 }) + await faber.buildLedgerPrimitives({ tailsDir: TAILS_DIR, maxCreds: 5 }) const oobCredOfferMsg = await faber.createOobCredOffer() await connectViaOobMessage(alice, faber, oobCredOfferMsg) @@ -75,8 +77,7 @@ describe('test out of band communication', () => { it('Faber requests proof via OOB', async () => { try { const { alice, faber } = await createPairedAliceAndFaber() - const tailsDir = path.join(__dirname, '/tmp/faber/tails') - await faber.buildLedgerPrimitives({ tailsDir, maxCreds: 5 }) + await faber.buildLedgerPrimitives({ tailsDir: TAILS_DIR, maxCreds: 5 }) await faber.sendCredentialOffer() await alice.acceptCredentialOffer() await faber.updateStateCredential(IssuerStateType.RequestReceived) @@ -88,7 +89,7 @@ describe('test out of band communication', () => { const oobReceiver = await OutOfBandReceiver.createWithMessage(oobPresentationRequestMsg) const presentationRequest = oobReceiver.extractMessage() - await alice.sendHolderProof(presentationRequest, revRegId => tailsDir, { attr_nickname: 'Smith' }) + await alice.sendHolderProof(presentationRequest, revRegId => TAILS_DIR, { attr_nickname: 'Smith' }) await faber.updateStateVerifierProof(VerifierStateType.Finished) } catch (e) { console.error(e.stack) diff --git a/agents/node/vcxagent-core/test/utils/alice.js b/agents/node/vcxagent-core/test/utils/alice.js index b404307b58..962794ef1f 100644 --- a/agents/node/vcxagent-core/test/utils/alice.js +++ b/agents/node/vcxagent-core/test/utils/alice.js @@ -127,7 +127,7 @@ module.exports.createAlice = async function createAlice (serviceEndpoint = 'http logger.debug(`acceptOobCredentialOffer >>> attached message: ${credOffer}`) await vcxAgent.serviceCredHolder.createCredentialFromOfferAndSendRequest(connectionId, holderCredentialId, credOffer) const state = await vcxAgent.serviceCredHolder.getState(holderCredentialId) - expect(state).toBe(HolderStateType.RequestSent) + expect(state).toBe(HolderStateType.RequestSet) await vcxAgent.agentShutdownVcx() } diff --git a/agents/node/vcxagent-core/test/utils/faber.js b/agents/node/vcxagent-core/test/utils/faber.js index c3cc72ccec..68aec8f4be 100644 --- a/agents/node/vcxagent-core/test/utils/faber.js +++ b/agents/node/vcxagent-core/test/utils/faber.js @@ -8,7 +8,7 @@ const { getAliceSchemaAttrs, getFaberCredDefName } = require('./data') const sleep = require('sleep-promise') const assert = require('assert') -module.exports.createFaber = async function createFaber (serviceEndpoint = 'http://localhost:5400') { +module.exports.createFaber = async function createFaber(serviceEndpoint = 'http://localhost:5400') { const agentName = `faber-${Math.floor(new Date() / 1000)}` const connectionId = 'connection-faber-to-alice' const issuerCredId = 'credential-for-alice' @@ -34,7 +34,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentInitVcx() await vcxAgent.agentShutdownVcx() - async function createInvite () { + async function createInvite() { logger.info('Faber is going to generate invite') await vcxAgent.agentInitVcx() @@ -48,7 +48,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return invite } - async function createPublicInvite () { + async function createPublicInvite() { logger.info('Faber is going to generate public invite') await vcxAgent.agentInitVcx() @@ -62,7 +62,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return publicInvitation } - async function publishService (endpoint) { + async function publishService(endpoint) { logger.info('Faber is going to write nonmediated service on the ledger') await vcxAgent.agentInitVcx() @@ -76,7 +76,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return pwInfo } - async function readServiceFromLedger () { + async function readServiceFromLedger() { logger.info('Faber is going to read service from the ledger') await vcxAgent.agentInitVcx() @@ -87,7 +87,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return service } - async function unpackMsg (encryptedMsg) { + async function unpackMsg(encryptedMsg) { assert(encryptedMsg) logger.info(`Faber is going to unpack message of length ${encryptedMsg.length}`) await vcxAgent.agentInitVcx() @@ -100,7 +100,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return { message, senderVerkey } } - async function createOobMessageWithDid (wrappedMessage) { + async function createOobMessageWithDid(wrappedMessage) { logger.info('Faber is going to generate out of band message') await vcxAgent.agentInitVcx() @@ -112,7 +112,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return oobMsg } - async function createOobCredOffer () { + async function createOobCredOffer() { await vcxAgent.agentInitVcx() const schemaAttrs = getAliceSchemaAttrs() const credOfferMsg = await vcxAgent.serviceCredIssuer.buildOfferAndMarkAsSent(issuerCredId, credDefId, revRegId, schemaAttrs) @@ -120,11 +120,11 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return await createOobMessageWithDid(credOfferMsg) } - function getFaberDid () { + function getFaberDid() { return vcxAgent.getInstitutionDid() } - async function createOobProofRequest (proofData) { + async function createOobProofRequest(proofData) { await vcxAgent.agentInitVcx() // todo: address @@ -135,7 +135,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return await createOobMessageWithDid(presentationRequestMsg) } - async function sendConnectionResponse () { + async function sendConnectionResponse() { logger.info('Faber is going to generate invite') await vcxAgent.agentInitVcx() @@ -144,7 +144,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentShutdownVcx() } - async function updateConnection (expectedNextState) { + async function updateConnection(expectedNextState) { logger.info(`Faber is going to update connection, expecting new state of ${expectedNextState}`) await vcxAgent.agentInitVcx() @@ -153,7 +153,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentShutdownVcx() } - async function handleMessage (ariesMsg) { + async function handleMessage(ariesMsg) { logger.info('Faber is going to try handle incoming messages') await vcxAgent.agentInitVcx() @@ -162,16 +162,18 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentShutdownVcx() } - async function buildLedgerPrimitives (revocationDetails) { + async function buildLedgerPrimitives(revocationDetails) { await vcxAgent.agentInitVcx() logger.info('Faber writing schema on ledger') - const schemaId = await vcxAgent.serviceLedgerSchema.createSchema(getSampleSchemaData()) + const issuerDid = vcxAgent.getInstitutionDid() + const schemaId = await vcxAgent.serviceLedgerSchema.createSchema(getSampleSchemaData(), issuerDid) await sleep(500) logger.info('Faber writing credential definition on ledger') const supportRevocation = !!revocationDetails await vcxAgent.serviceLedgerCredDef.createCredentialDefinitionV2( + issuerDid, schemaId, getFaberCredDefName(), supportRevocation @@ -186,7 +188,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentShutdownVcx() } - async function rotateRevReg (tailsDir, maxCreds) { + async function rotateRevReg(tailsDir, maxCreds) { await vcxAgent.agentInitVcx() logger.info('Faber rotating revocation registry') @@ -197,7 +199,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentShutdownVcx() } - async function sendCredentialOffer () { + async function sendCredentialOffer() { await vcxAgent.agentInitVcx() logger.info('Issuer sending credential offer') @@ -207,7 +209,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentShutdownVcx() } - async function updateStateCredential (expectedState) { + async function updateStateCredential(expectedState) { await vcxAgent.agentInitVcx() logger.info('Issuer updating state of credential with connection') @@ -216,25 +218,26 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentShutdownVcx() } - async function sendCredential () { + async function sendCredential() { await vcxAgent.agentInitVcx() logger.info('Issuer sending credential') - expect(await vcxAgent.serviceCredIssuer.sendCredential(issuerCredId, connectionId)).toBe(IssuerStateType.CredentialSent) + await vcxAgent.serviceCredIssuer.sendCredential(issuerCredId, connectionId) const revocationId = await vcxAgent.serviceCredIssuer.getRevocationId(issuerCredId) logger.info(`Sent credential with revocation id ${revocationId}`) await vcxAgent.agentShutdownVcx() } - async function revokeCredential () { + async function revokeCredential() { await vcxAgent.agentInitVcx() await vcxAgent.serviceCredIssuer.revokeCredentialLocal(issuerCredId) - await vcxAgent.serviceLedgerRevReg.publishRevocations(revRegId) + const issuerDid = vcxAgent.getInstitutionDid() + await vcxAgent.serviceLedgerRevReg.publishRevocations(revRegId, issuerDid) await vcxAgent.agentShutdownVcx() } - async function receiveCredentialAck () { + async function receiveCredentialAck() { await vcxAgent.agentInitVcx() logger.info('Issuer waiting for credential ack') @@ -244,7 +247,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentShutdownVcx() } - async function requestProofFromAlice (proofData) { + async function requestProofFromAlice(proofData) { logger.info('Faber going to request proof from Alice') await vcxAgent.agentInitVcx() logger.info(`Faber is creating proof ${proofId}`) @@ -256,7 +259,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return proofRequestMessage } - async function updateStateVerifierProof (expectedNextState) { + async function updateStateVerifierProof(expectedNextState) { logger.info(`Verifier updating state of proof, expecting it to be in state ${expectedNextState}`) await vcxAgent.agentInitVcx() @@ -265,7 +268,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentShutdownVcx() } - async function verifySignature (dataBase64, signatureBase64) { + async function verifySignature(dataBase64, signatureBase64) { logger.debug(`Faber is going to verify signed data. Data=${dataBase64} signature=${signatureBase64}`) await vcxAgent.agentInitVcx() @@ -275,7 +278,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return isValid } - async function downloadReceivedMessages () { + async function downloadReceivedMessages() { logger.info('Faber is going to download messages using getMessages') await vcxAgent.agentInitVcx() const agencyMessages = await vcxAgent.serviceConnections.getMessages(connectionId, ['MS-103']) @@ -283,7 +286,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return agencyMessages } - async function createNonmediatedConnectionWithInvite () { + async function createNonmediatedConnectionWithInvite() { logger.info('Faber is going to create a connection with invite') await vcxAgent.agentInitVcx() @@ -294,7 +297,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return invite } - async function nonmediatedConnectionProcessRequest (request) { + async function nonmediatedConnectionProcessRequest(request) { logger.info('Faber is going to process a connection request') await vcxAgent.agentInitVcx() @@ -305,7 +308,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentShutdownVcx() } - async function createNonmediatedConnectionFromRequest (request, pwInfo) { + async function createNonmediatedConnectionFromRequest(request, pwInfo) { logger.info(`Faber is going to create a connection from a request: ${request}`) await vcxAgent.agentInitVcx() @@ -315,7 +318,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentShutdownVcx() } - async function nonmediatedConnectionProcessAck (ack) { + async function nonmediatedConnectionProcessAck(ack) { logger.info(`Faber is processing ack: ${ack}`) await vcxAgent.agentInitVcx() @@ -325,7 +328,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentShutdownVcx() } - async function createConnectionFromReceivedRequestV2 (pwInfo, request) { + async function createConnectionFromReceivedRequestV2(pwInfo, request) { logger.info(`Faber is going to create a connection from a request: ${request}, using pwInfo: ${JSON.stringify(pwInfo)}`) await vcxAgent.agentInitVcx() @@ -335,19 +338,19 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentShutdownVcx() } - async function updateMessageStatus (uids) { + async function updateMessageStatus(uids) { await vcxAgent.agentInitVcx() await vcxAgent.serviceConnections.updateMessagesStatus(connectionId, uids) await vcxAgent.agentShutdownVcx() } - async function updateAllReceivedMessages () { + async function updateAllReceivedMessages() { await vcxAgent.agentInitVcx() await vcxAgent.serviceConnections.updateAllReceivedMessages(connectionId) await vcxAgent.agentShutdownVcx() } - async function downloadReceivedMessagesV2 () { + async function downloadReceivedMessagesV2() { logger.info('Faber is going to download messages using getMessagesV2') await vcxAgent.agentInitVcx() const agencyMessages = await vcxAgent.serviceConnections.getMessagesV2(connectionId, ['MS-103']) @@ -355,7 +358,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return agencyMessages } - async function getCredentialRevRegId () { + async function getCredentialRevRegId() { logger.info(`Faber is going to obtain rev reg id for cred id ${issuerCredId}`) await vcxAgent.agentInitVcx() const revRegId = await vcxAgent.serviceCredIssuer.getRevRegId(issuerCredId) @@ -364,7 +367,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return revRegId } - async function getTailsFile () { + async function getTailsFile() { logger.info(`Faber is going to obtain tails file for rev reg id ${revRegId}`) await vcxAgent.agentInitVcx() const tailsFile = await vcxAgent.serviceLedgerCredDef.getTailsFile(issuerCredId) @@ -373,7 +376,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return tailsFile } - async function getTailsHash () { + async function getTailsHash() { logger.info(`Faber is going to obtain tails hash for rev reg id ${revRegId}`) await vcxAgent.agentInitVcx() const tailsHash = await vcxAgent.serviceLedgerRevReg.getTailsHash(revRegId) @@ -382,14 +385,14 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http return tailsHash } - async function sendMessage (message) { + async function sendMessage(message) { logger.info('Faber is going to send message') await vcxAgent.agentInitVcx() await vcxAgent.serviceConnections.sendMessage(connectionId, message) await vcxAgent.agentShutdownVcx() } - async function nonmediatedConnectionSendMessage (message) { + async function nonmediatedConnectionSendMessage(message) { logger.info('Faber is going to send message') await vcxAgent.agentInitVcx() @@ -398,7 +401,7 @@ module.exports.createFaber = async function createFaber (serviceEndpoint = 'http await vcxAgent.agentShutdownVcx() } - async function getPresentationInfo () { + async function getPresentationInfo() { logger.info('Faber is gather info about received presentation') await vcxAgent.agentInitVcx() const presentationMsg = await vcxAgent.serviceVerifier.getPresentationMsg(proofId) diff --git a/agents/rust/aries-vcx-agent/Cargo.toml b/agents/rust/aries-vcx-agent/Cargo.toml index fc5f284cb1..58194a7b0a 100644 --- a/agents/rust/aries-vcx-agent/Cargo.toml +++ b/agents/rust/aries-vcx-agent/Cargo.toml @@ -8,7 +8,7 @@ edition.workspace = true [dependencies] serde = "1.0.145" aries-vcx = { path = "../../../aries_vcx" } -aries_vcx_core = { path = "../../../aries_vcx_core" } +aries_vcx_core = { path = "../../../aries_vcx_core", features = ["credx", "vdrtools_wallet"] } async-trait = "0.1.64" derive_builder = "0.11.2" serde_json = "1.0.85" diff --git a/agents/rust/aries-vcx-agent/src/agent/agent_struct.rs b/agents/rust/aries-vcx-agent/src/agent/agent_struct.rs index 9cf3d676eb..48e4f3d1b8 100644 --- a/agents/rust/aries-vcx-agent/src/agent/agent_struct.rs +++ b/agents/rust/aries-vcx-agent/src/agent/agent_struct.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use aries_vcx::core::profile::profile::Profile; +use aries_vcx::core::profile::modular_libs_profile::ModularLibsProfile; use crate::{ agent::agent_config::AgentConfig, @@ -14,7 +14,7 @@ use crate::{ #[derive(Clone)] pub struct Agent { - pub(super) profile: Arc, + pub(super) profile: Arc, pub(super) config: AgentConfig, pub(super) connections: Arc, pub(super) schemas: Arc, @@ -27,8 +27,8 @@ pub struct Agent { } impl Agent { - pub fn profile(&self) -> Arc { - Arc::clone(&self.profile) + pub fn profile(&self) -> &ModularLibsProfile { + &self.profile } pub fn agent_config(&self) -> AgentConfig { diff --git a/agents/rust/aries-vcx-agent/src/agent/init.rs b/agents/rust/aries-vcx-agent/src/agent/init.rs index a416f9573f..9108a1d9a2 100644 --- a/agents/rust/aries-vcx-agent/src/agent/init.rs +++ b/agents/rust/aries-vcx-agent/src/agent/init.rs @@ -1,17 +1,12 @@ use std::sync::Arc; use aries_vcx::{ - core::profile::{ - ledger::{build_ledger_components, VcxPoolConfig}, - profile::Profile, - vdrtools_profile::VdrtoolsProfile, - }, - global::settings::{init_issuer_config, DEFAULT_LINK_SECRET_ALIAS}, + core::profile::{ledger::VcxPoolConfig, modular_libs_profile::ModularLibsProfile, Profile}, + global::settings::DEFAULT_LINK_SECRET_ALIAS, }; use aries_vcx_core::{ - ledger::base_ledger::{ - AnoncredsLedgerRead, AnoncredsLedgerWrite, IndyLedgerRead, IndyLedgerWrite, - }, + self, + anoncreds::base_anoncreds::BaseAnonCreds, wallet::indy::{ wallet::{create_and_open_wallet, wallet_configure_issuer}, IndySdkWallet, WalletConfig, @@ -68,7 +63,7 @@ impl Agent { let config_issuer = wallet_configure_issuer(wallet_handle, &init_config.enterprise_seed) .await .unwrap(); - init_issuer_config(&config_issuer.institution_did).unwrap(); + let wallet = Arc::new(IndySdkWallet::new(wallet_handle)); let pool_config = VcxPoolConfig { @@ -76,22 +71,10 @@ impl Agent { indy_vdr_config: None, response_cache_config: None, }; - let (ledger_read, ledger_write) = - build_ledger_components(wallet.clone(), pool_config).unwrap(); - let anoncreds_ledger_read: Arc = ledger_read.clone(); - let anoncreds_ledger_write: Arc = ledger_write.clone(); - let indy_ledger_read: Arc = ledger_read.clone(); - let indy_ledger_write: Arc = ledger_write.clone(); - let indy_profile = VdrtoolsProfile::init( - wallet, - anoncreds_ledger_read, - anoncreds_ledger_write, - indy_ledger_read, - indy_ledger_write, - ); - let profile: Arc = Arc::new(indy_profile); - let anoncreds = profile.inject_anoncreds(); + let indy_profile = ModularLibsProfile::init(wallet, pool_config).unwrap(); + let profile = Arc::new(indy_profile); + let anoncreds = profile.anoncreds(); anoncreds .prover_create_link_secret(DEFAULT_LINK_SECRET_ALIAS) .await diff --git a/agents/rust/aries-vcx-agent/src/services/connection.rs b/agents/rust/aries-vcx-agent/src/services/connection.rs index 41ef2dd2b9..d349d0ec7f 100644 --- a/agents/rust/aries-vcx-agent/src/services/connection.rs +++ b/agents/rust/aries-vcx-agent/src/services/connection.rs @@ -1,7 +1,7 @@ use std::sync::{Arc, Mutex}; use aries_vcx::{ - core::profile::profile::Profile, + core::profile::{modular_libs_profile::ModularLibsProfile, Profile}, handlers::util::AnyInvitation, messages::msg_fields::protocols::{ connection::{request::Request, response::Response}, @@ -22,13 +22,13 @@ use crate::{ pub type ServiceEndpoint = Url; pub struct ServiceConnections { - profile: Arc, + profile: Arc, service_endpoint: ServiceEndpoint, connections: Arc>, } impl ServiceConnections { - pub fn new(profile: Arc, service_endpoint: ServiceEndpoint) -> Self { + pub fn new(profile: Arc, service_endpoint: ServiceEndpoint) -> Self { Self { profile, service_endpoint, @@ -40,7 +40,7 @@ impl ServiceConnections { &self, pw_info: Option, ) -> AgentResult { - let pw_info = pw_info.unwrap_or(PairwiseInfo::create(&self.profile.inject_wallet()).await?); + let pw_info = pw_info.unwrap_or(PairwiseInfo::create(self.profile.wallet()).await?); let inviter = Connection::new_inviter("".to_owned(), pw_info) .create_invitation(vec![], self.service_endpoint.clone()); let invite = inviter.get_invitation().clone(); @@ -52,9 +52,9 @@ impl ServiceConnections { } pub async fn receive_invitation(&self, invite: AnyInvitation) -> AgentResult { - let pairwise_info = PairwiseInfo::create(&self.profile.inject_wallet()).await?; + let pairwise_info = PairwiseInfo::create(self.profile.wallet()).await?; let invitee = Connection::new_invitee("".to_owned(), pairwise_info) - .accept_invitation(&self.profile.inject_indy_ledger_read(), invite) + .accept_invitation(self.profile.ledger_read(), invite) .await?; let thread_id = invitee.thread_id().to_owned(); @@ -69,7 +69,7 @@ impl ServiceConnections { .await?; let request = invitee.get_request().clone(); invitee - .send_message(&self.profile.inject_wallet(), &request.into(), &HttpClient) + .send_message(self.profile.wallet(), &request.into(), &HttpClient) .await?; self.connections.insert(thread_id, invitee.into())?; Ok(()) @@ -94,11 +94,10 @@ impl ServiceConnections { let inviter = inviter .handle_request( - &self.profile.inject_wallet(), + self.profile.wallet(), request, self.service_endpoint.clone(), vec![], - &HttpClient, ) .await?; @@ -111,7 +110,7 @@ impl ServiceConnections { let inviter: Connection<_, _> = self.connections.get(thread_id)?.try_into()?; let response = inviter.get_connection_response_msg(); inviter - .send_message(&self.profile.inject_wallet(), &response.into(), &HttpClient) + .send_message(self.profile.wallet(), &response.into(), &HttpClient) .await?; self.connections.insert(thread_id, inviter.into())?; @@ -122,7 +121,7 @@ impl ServiceConnections { pub async fn accept_response(&self, thread_id: &str, response: Response) -> AgentResult<()> { let invitee: Connection<_, _> = self.connections.get(thread_id)?.try_into()?; let invitee = invitee - .handle_response(&self.profile.inject_wallet(), response, &HttpClient) + .handle_response(self.profile.wallet(), response) .await?; self.connections.insert(thread_id, invitee.into())?; @@ -134,7 +133,7 @@ impl ServiceConnections { let invitee: Connection<_, _> = self.connections.get(thread_id)?.try_into()?; invitee .send_message( - &self.profile.inject_wallet(), + self.profile.wallet(), &invitee.get_ack().into(), &HttpClient, ) diff --git a/agents/rust/aries-vcx-agent/src/services/credential_definition.rs b/agents/rust/aries-vcx-agent/src/services/credential_definition.rs index 7177f6c2eb..3b013615ec 100644 --- a/agents/rust/aries-vcx-agent/src/services/credential_definition.rs +++ b/agents/rust/aries-vcx-agent/src/services/credential_definition.rs @@ -2,7 +2,7 @@ use std::sync::{Arc, Mutex}; use aries_vcx::{ common::primitives::credential_definition::{CredentialDef, CredentialDefConfig}, - core::profile::profile::Profile, + core::profile::{modular_libs_profile::ModularLibsProfile, Profile}, }; use crate::{ @@ -11,12 +11,12 @@ use crate::{ }; pub struct ServiceCredentialDefinitions { - profile: Arc, + profile: Arc, cred_defs: ObjectCache, } impl ServiceCredentialDefinitions { - pub fn new(profile: Arc) -> Self { + pub fn new(profile: Arc) -> Self { Self { profile, cred_defs: ObjectCache::new("cred-defs"), @@ -25,8 +25,8 @@ impl ServiceCredentialDefinitions { pub async fn create_cred_def(&self, config: CredentialDefConfig) -> AgentResult { let cd = CredentialDef::create( - &self.profile.inject_anoncreds_ledger_read(), - &self.profile.inject_anoncreds(), + self.profile.ledger_read(), + self.profile.anoncreds(), "".to_string(), config, true, @@ -38,10 +38,7 @@ impl ServiceCredentialDefinitions { pub async fn publish_cred_def(&self, thread_id: &str) -> AgentResult<()> { let cred_def = self.cred_defs.get(thread_id)?; let cred_def = cred_def - .publish_cred_def( - &self.profile.inject_anoncreds_ledger_read(), - &self.profile.inject_anoncreds_ledger_write(), - ) + .publish_cred_def(self.profile.ledger_read(), self.profile.ledger_write()) .await?; self.cred_defs.insert(thread_id, cred_def)?; Ok(()) diff --git a/agents/rust/aries-vcx-agent/src/services/holder.rs b/agents/rust/aries-vcx-agent/src/services/holder.rs index 85f7e9eb7d..6c9a9a39f0 100644 --- a/agents/rust/aries-vcx-agent/src/services/holder.rs +++ b/agents/rust/aries-vcx-agent/src/services/holder.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use aries_vcx::{ - core::profile::profile::Profile, + core::profile::{modular_libs_profile::ModularLibsProfile, Profile}, handlers::issuance::holder::Holder, messages::{ msg_fields::protocols::cred_issuance::v1::{ @@ -36,13 +36,16 @@ impl HolderWrapper { } pub struct ServiceCredentialsHolder { - profile: Arc, + profile: Arc, creds_holder: ObjectCache, service_connections: Arc, } impl ServiceCredentialsHolder { - pub fn new(profile: Arc, service_connections: Arc) -> Self { + pub fn new( + profile: Arc, + service_connections: Arc, + ) -> Self { Self { profile, service_connections, @@ -66,12 +69,12 @@ impl ServiceCredentialsHolder { propose_credential: ProposeCredentialV1, ) -> AgentResult { let connection = self.service_connections.get_by_id(connection_id)?; - let wallet = self.profile.inject_wallet(); + let wallet = self.profile.wallet(); let mut holder = Holder::create("")?; holder.set_proposal(propose_credential.clone())?; connection - .send_message(&wallet, &propose_credential.into(), &HttpClient) + .send_message(wallet, &propose_credential.into(), &HttpClient) .await?; self.creds_holder.insert( @@ -105,16 +108,16 @@ impl ServiceCredentialsHolder { (None, None) => return Err(AgentError::from_kind(AgentErrorKind::InvalidArguments)), }; let connection = self.service_connections.get_by_id(&connection_id)?; - let wallet = self.profile.inject_wallet(); + let wallet = self.profile.wallet(); let pw_did = connection.pairwise_info().pw_did.to_string(); let send_closure: SendClosure = Box::new(|msg: AriesMessage| { - Box::pin(async move { connection.send_message(&wallet, &msg, &HttpClient).await }) + Box::pin(async move { connection.send_message(wallet, &msg, &HttpClient).await }) }); let msg_response = holder .prepare_credential_request( - &self.profile.inject_anoncreds_ledger_read(), - &self.profile.inject_anoncreds(), + self.profile.ledger_read(), + self.profile.anoncreds(), pw_did, ) .await?; @@ -133,12 +136,12 @@ impl ServiceCredentialsHolder { let mut holder = self.get_holder(thread_id)?; let connection_id = self.get_connection_id(thread_id)?; let connection = self.service_connections.get_by_id(&connection_id)?; - let wallet = self.profile.inject_wallet(); + let wallet = self.profile.wallet(); holder .process_credential( - &self.profile.inject_anoncreds_ledger_read(), - &self.profile.inject_anoncreds(), + self.profile.ledger_read(), + self.profile.anoncreds(), msg_issue_credential.clone(), ) .await?; @@ -147,7 +150,7 @@ impl ServiceCredentialsHolder { Some(msg_response) => { let send_closure: SendClosure = Box::new(|msg: AriesMessage| { Box::pin( - async move { connection.send_message(&wallet, &msg, &HttpClient).await }, + async move { connection.send_message(wallet, &msg, &HttpClient).await }, ) }); send_closure(msg_response).await?; @@ -165,7 +168,7 @@ impl ServiceCredentialsHolder { pub async fn is_revokable(&self, thread_id: &str) -> AgentResult { self.get_holder(thread_id)? - .is_revokable(&self.profile.inject_anoncreds_ledger_read()) + .is_revokable(self.profile.ledger_read()) .await .map_err(|err| err.into()) } diff --git a/agents/rust/aries-vcx-agent/src/services/issuer.rs b/agents/rust/aries-vcx-agent/src/services/issuer.rs index 0449917cf1..0e828b5fa6 100644 --- a/agents/rust/aries-vcx-agent/src/services/issuer.rs +++ b/agents/rust/aries-vcx-agent/src/services/issuer.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use aries_vcx::{ - core::profile::profile::Profile, + core::profile::{modular_libs_profile::ModularLibsProfile, Profile}, handlers::{issuance::issuer::Issuer, util::OfferInfo}, messages::{ msg_fields::protocols::cred_issuance::v1::{ @@ -36,13 +36,16 @@ impl IssuerWrapper { } pub struct ServiceCredentialsIssuer { - profile: Arc, + profile: Arc, creds_issuer: ObjectCache, service_connections: Arc, } impl ServiceCredentialsIssuer { - pub fn new(profile: Arc, service_connections: Arc) -> Self { + pub fn new( + profile: Arc, + service_connections: Arc, + ) -> Self { Self { profile, service_connections, @@ -86,13 +89,13 @@ impl ServiceCredentialsIssuer { }; let connection = self.service_connections.get_by_id(&connection_id)?; issuer - .build_credential_offer_msg(&self.profile.inject_anoncreds(), offer_info, None) + .build_credential_offer_msg(self.profile.anoncreds(), offer_info, None) .await?; - let wallet = self.profile.inject_wallet(); + let wallet = self.profile.wallet(); let send_closure: SendClosure = Box::new(|msg: AriesMessage| { - Box::pin(async move { connection.send_message(&wallet, &msg, &HttpClient).await }) + Box::pin(async move { connection.send_message(wallet, &msg, &HttpClient).await }) }); let credential_offer = issuer.get_credential_offer_msg()?; @@ -140,15 +143,13 @@ impl ServiceCredentialsIssuer { } = self.creds_issuer.get(thread_id)?; let connection = self.service_connections.get_by_id(&connection_id)?; - let wallet = self.profile.inject_wallet(); + let wallet = self.profile.wallet(); let send_closure: SendClosure = Box::new(|msg: AriesMessage| { - Box::pin(async move { connection.send_message(&wallet, &msg, &HttpClient).await }) + Box::pin(async move { connection.send_message(wallet, &msg, &HttpClient).await }) }); - issuer - .build_credential(&self.profile.inject_anoncreds()) - .await?; + issuer.build_credential(self.profile.anoncreds()).await?; match issuer.get_state() { IssuerState::Failed => { let problem_report = issuer.get_problem_report()?; diff --git a/agents/rust/aries-vcx-agent/src/services/prover.rs b/agents/rust/aries-vcx-agent/src/services/prover.rs index cfa34c6bf1..7f9f92765e 100644 --- a/agents/rust/aries-vcx-agent/src/services/prover.rs +++ b/agents/rust/aries-vcx-agent/src/services/prover.rs @@ -1,7 +1,7 @@ use std::{collections::HashMap, sync::Arc}; use aries_vcx::{ - core::profile::profile::Profile, + core::profile::{modular_libs_profile::ModularLibsProfile, Profile}, handlers::{ proof_presentation::{prover::Prover, types::SelectedCredentials}, util::PresentationProposalData, @@ -39,13 +39,16 @@ impl ProverWrapper { } pub struct ServiceProver { - profile: Arc, + profile: Arc, provers: ObjectCache, service_connections: Arc, } impl ServiceProver { - pub fn new(profile: Arc, service_connections: Arc) -> Self { + pub fn new( + profile: Arc, + service_connections: Arc, + ) -> Self { Self { profile, service_connections, @@ -69,7 +72,7 @@ impl ServiceProver { tails_dir: Option<&str>, ) -> AgentResult { let credentials = prover - .retrieve_credentials(&self.profile.inject_anoncreds()) + .retrieve_credentials(self.profile.anoncreds()) .await?; let mut res_credentials = SelectedCredentials::default(); @@ -106,10 +109,10 @@ impl ServiceProver { let connection = self.service_connections.get_by_id(connection_id)?; let mut prover = Prover::create("")?; - let wallet = self.profile.inject_wallet(); + let wallet = self.profile.wallet(); let send_closure: SendClosure = Box::new(|msg: AriesMessage| { - Box::pin(async move { connection.send_message(&wallet, &msg, &HttpClient).await }) + Box::pin(async move { connection.send_message(wallet, &msg, &HttpClient).await }) }); let proposal = prover.build_presentation_proposal(proposal).await?; @@ -142,17 +145,17 @@ impl ServiceProver { .await?; prover .generate_presentation( - &self.profile.inject_anoncreds_ledger_read(), - &self.profile.inject_anoncreds(), + self.profile.ledger_read(), + self.profile.anoncreds(), credentials, HashMap::new(), ) .await?; - let wallet = self.profile.inject_wallet(); + let wallet = self.profile.wallet(); let send_closure: SendClosure = Box::new(|msg: AriesMessage| { - Box::pin(async move { connection.send_message(&wallet, &msg, &HttpClient).await }) + Box::pin(async move { connection.send_message(wallet, &msg, &HttpClient).await }) }); let message = prover.mark_presentation_sent()?; diff --git a/agents/rust/aries-vcx-agent/src/services/revocation_registry.rs b/agents/rust/aries-vcx-agent/src/services/revocation_registry.rs index b7334b2010..312f20c491 100644 --- a/agents/rust/aries-vcx-agent/src/services/revocation_registry.rs +++ b/agents/rust/aries-vcx-agent/src/services/revocation_registry.rs @@ -4,7 +4,8 @@ use std::{ }; use aries_vcx::{ - common::primitives::revocation_registry::RevocationRegistry, core::profile::profile::Profile, + common::primitives::revocation_registry::RevocationRegistry, + core::profile::{modular_libs_profile::ModularLibsProfile, Profile}, }; use crate::{ @@ -13,13 +14,13 @@ use crate::{ }; pub struct ServiceRevocationRegistries { - profile: Arc, + profile: Arc, issuer_did: String, rev_regs: ObjectCache, } impl ServiceRevocationRegistries { - pub fn new(profile: Arc, issuer_did: String) -> Self { + pub fn new(profile: Arc, issuer_did: String) -> Self { Self { profile, issuer_did, @@ -39,7 +40,7 @@ impl ServiceRevocationRegistries { pub async fn create_rev_reg(&self, cred_def_id: &str, max_creds: u32) -> AgentResult { let rev_reg = RevocationRegistry::create( - &self.profile.inject_anoncreds(), + self.profile.anoncreds(), &self.issuer_did, cred_def_id, "/tmp", @@ -66,7 +67,7 @@ impl ServiceRevocationRegistries { pub async fn publish_rev_reg(&self, thread_id: &str, tails_url: &str) -> AgentResult<()> { let mut rev_reg = self.rev_regs.get(thread_id)?; rev_reg - .publish_revocation_primitives(&self.profile.inject_anoncreds_ledger_write(), tails_url) + .publish_revocation_primitives(self.profile.ledger_write(), tails_url) .await?; self.rev_regs.insert(thread_id, rev_reg)?; Ok(()) @@ -75,7 +76,7 @@ impl ServiceRevocationRegistries { pub async fn revoke_credential_locally(&self, id: &str, cred_rev_id: &str) -> AgentResult<()> { let rev_reg = self.rev_regs.get(id)?; rev_reg - .revoke_credential_local(&self.profile.inject_anoncreds(), cred_rev_id) + .revoke_credential_local(self.profile.anoncreds(), cred_rev_id) .await?; Ok(()) } @@ -84,8 +85,8 @@ impl ServiceRevocationRegistries { let rev_reg = self.rev_regs.get(id)?; rev_reg .publish_local_revocations( - &self.profile.inject_anoncreds(), - &self.profile.inject_anoncreds_ledger_write(), + self.profile.anoncreds(), + self.profile.ledger_write(), &self.issuer_did, ) .await?; diff --git a/agents/rust/aries-vcx-agent/src/services/schema.rs b/agents/rust/aries-vcx-agent/src/services/schema.rs index 346dce77b5..f7d238ff60 100644 --- a/agents/rust/aries-vcx-agent/src/services/schema.rs +++ b/agents/rust/aries-vcx-agent/src/services/schema.rs @@ -1,6 +1,10 @@ use std::sync::{Arc, Mutex}; -use aries_vcx::{common::primitives::credential_schema::Schema, core::profile::profile::Profile}; +use aries_vcx::{ + common::primitives::credential_schema::Schema, + core::profile::{modular_libs_profile::ModularLibsProfile, Profile}, +}; +use aries_vcx_core::ledger::base_ledger::AnoncredsLedgerRead; use crate::{ error::*, @@ -8,13 +12,13 @@ use crate::{ }; pub struct ServiceSchemas { - profile: Arc, + profile: Arc, issuer_did: String, schemas: ObjectCache, } impl ServiceSchemas { - pub fn new(profile: Arc, issuer_did: String) -> Self { + pub fn new(profile: Arc, issuer_did: String) -> Self { Self { profile, issuer_did, @@ -29,7 +33,7 @@ impl ServiceSchemas { attributes: &Vec, ) -> AgentResult { let schema = Schema::create( - &self.profile.inject_anoncreds(), + self.profile.anoncreds(), "", &self.issuer_did, name, @@ -42,15 +46,13 @@ impl ServiceSchemas { pub async fn publish_schema(&self, thread_id: &str) -> AgentResult<()> { let schema = self.schemas.get(thread_id)?; - let schema = schema - .publish(&self.profile.inject_anoncreds_ledger_write(), None) - .await?; + let schema = schema.publish(self.profile.ledger_write()).await?; self.schemas.insert(thread_id, schema)?; Ok(()) } pub async fn schema_json(&self, thread_id: &str) -> AgentResult { - let ledger = Arc::clone(&self.profile).inject_anoncreds_ledger_read(); + let ledger = self.profile.ledger_read(); Ok(ledger.get_schema(thread_id, None).await?) } diff --git a/agents/rust/aries-vcx-agent/src/services/verifier.rs b/agents/rust/aries-vcx-agent/src/services/verifier.rs index a7d7acdc33..2fc10cf15b 100644 --- a/agents/rust/aries-vcx-agent/src/services/verifier.rs +++ b/agents/rust/aries-vcx-agent/src/services/verifier.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use aries_vcx::{ common::proofs::proof_request::PresentationRequestData, - core::profile::profile::Profile, + core::profile::{modular_libs_profile::ModularLibsProfile, Profile}, handlers::proof_presentation::verifier::Verifier, messages::{ msg_fields::protocols::present_proof::{ @@ -41,13 +41,16 @@ impl VerifierWrapper { } pub struct ServiceVerifier { - profile: Arc, + profile: Arc, verifiers: ObjectCache, service_connections: Arc, } impl ServiceVerifier { - pub fn new(profile: Arc, service_connections: Arc) -> Self { + pub fn new( + profile: Arc, + service_connections: Arc, + ) -> Self { Self { profile, service_connections, @@ -68,10 +71,10 @@ impl ServiceVerifier { Verifier::create_from_request("".to_string(), &request)? }; - let wallet = self.profile.inject_wallet(); + let wallet = self.profile.wallet(); let send_closure: SendClosure = Box::new(|msg: AriesMessage| { - Box::pin(async move { connection.send_message(&wallet, &msg, &HttpClient).await }) + Box::pin(async move { connection.send_message(wallet, &msg, &HttpClient).await }) }); let message = verifier.mark_presentation_request_sent()?; @@ -100,16 +103,16 @@ impl ServiceVerifier { connection_id, } = self.verifiers.get(thread_id)?; let connection = self.service_connections.get_by_id(&connection_id)?; - let wallet = self.profile.inject_wallet(); + let wallet = self.profile.wallet(); let send_closure: SendClosure = Box::new(|msg: AriesMessage| { - Box::pin(async move { connection.send_message(&wallet, &msg, &HttpClient).await }) + Box::pin(async move { connection.send_message(wallet, &msg, &HttpClient).await }) }); let message = verifier .verify_presentation( - &self.profile.inject_anoncreds_ledger_read(), - &self.profile.inject_anoncreds(), + self.profile.ledger_read(), + self.profile.anoncreds(), presentation, ) .await?; diff --git a/aries_vcx/Cargo.toml b/aries_vcx/Cargo.toml index 43b78a29c5..800799572b 100644 --- a/aries_vcx/Cargo.toml +++ b/aries_vcx/Cargo.toml @@ -11,54 +11,51 @@ path = "src/lib.rs" doctest = false [features] -default = ["vdrtools"] -vdrtools = ["aries_vcx_core/vdrtools_anoncreds", "aries_vcx_core/vdrtools_wallet"] +default = ["credx", "vdrtools_wallet"] # Feature flag to include the 'modular library' dependencies (vdrtools alternatives; indy-vdr, indy-credx) -modular_libs = ["aries_vcx_core/modular_libs"] -# TODO: Remove using "vdrtools" feature flag for vdr_proxy_ledger once IndyCredxAnonCreds -# is fully implemented -vdr_proxy_ledger = ["aries_vcx_core/vdr_proxy_ledger", "vdrtools"] +credx = ["aries_vcx_core/credx"] +vdr_proxy_ledger = ["aries_vcx_core/vdr_proxy_ledger", "aries_vcx_core/vdrtools_wallet"] +vdrtools_wallet = ["aries_vcx_core/vdrtools_wallet"] # Feature for allowing legacy proof verification legacy_proof = ["aries_vcx_core/legacy_proof"] -# Used for testing the migrator -migration = ["vdrtools", "modular_libs", "legacy_proof"] - [dependencies] agency_client = { path = "../agency_client" } messages = { path = "../messages" } diddoc_legacy = { path = "../diddoc_legacy" } aries_vcx_core = { path = "../aries_vcx_core" } shared_vcx = { path = "../shared_vcx" } -bs58 = "0.4.0" +bs58 = "0.5.0" async-trait = "0.1.53" -env_logger = "0.9.0" +env_logger = "0.10.0" log = "0.4.16" chrono = "0.4.23" -time = "=0.3.20" +time = "0.3.20" lazy_static = "1.3" -rand = "0.7.3" +rand = "0.8.5" serde = "1.0.97" serde_json = "1.0.40" serde_derive = "1.0.97" regex = "1.1.0" -base64 = "0.10" +base64 = "0.21.4" sha2 = "0.10.7" num-bigint = "0.4.3" futures = { version = "0.3", default-features = false } -uuid = { version = "0.8", default-features = false, features = ["v4"] } -strum = "0.16.0" -strum_macros = "0.16.0" -derive_builder = "0.10.2" +uuid = { version = "1.4.1", default-features = false, features = ["v4"] } +strum = "0.25.0" +strum_macros = "0.25.2" +derive_builder = "0.12.0" tokio = { version = "1.20.4" } thiserror = "1.0.37" url = { version = "2.3", features = ["serde"] } [target.'cfg(target_os = "android")'.dependencies] -android_logger = "0.5" +android_logger = "0.13.3" [dev-dependencies] wallet_migrator = { path = "../wallet_migrator" } async-channel = "1.7.1" tokio = { version = "1.20", features = ["rt", "macros", "rt-multi-thread"] } +mockall = "0.11.4" +reqwest = "0.11.18" # TODO - DELETE ONLY FOR TEMPORARY TEST!! diff --git a/aries_vcx/README.md b/aries_vcx/README.md index 90f8ca0fd7..38305df1a4 100644 --- a/aries_vcx/README.md +++ b/aries_vcx/README.md @@ -26,7 +26,7 @@ Aries [pick-up protocol](https://github.com/hyperledger/aries-rfcs/tree/main/fea To use `aries-vcx` in your project, you need to add GitHub dependency to your `Cargo.toml`, and best define a version through a `tag`: ```toml -aries-vcx = { tag = "0.58.0", git = "https://github.com/hyperledger/aries-vcx" } +aries-vcx = { tag = "0.60.0", git = "https://github.com/hyperledger/aries-vcx" } ``` It's also advisable to follow these [instructions](TUTORIAL.md) to check your environment is properly configured. diff --git a/aries_vcx/src/common/anoncreds.rs b/aries_vcx/src/common/anoncreds.rs index 843991c403..98c264a04a 100644 --- a/aries_vcx/src/common/anoncreds.rs +++ b/aries_vcx/src/common/anoncreds.rs @@ -2,29 +2,25 @@ #[cfg(test)] #[allow(clippy::unwrap_used)] pub mod integration_tests { - use std::sync::Arc; - use aries_vcx_core::{ - errors::error::AriesVcxCoreErrorKind, ledger::indy::pool::test_utils::get_temp_dir_path, + errors::error::AriesVcxCoreErrorKind, + ledger::{base_ledger::AnoncredsLedgerRead, indy::pool::test_utils::get_temp_dir_path}, }; - use crate::{ - common::{ - credentials::get_cred_rev_id, - test_utils::{ - create_and_write_credential, create_and_write_test_cred_def, - create_and_write_test_rev_reg, create_and_write_test_schema, - }, + use crate::common::{ + credentials::get_cred_rev_id, + test_utils::{ + create_and_publish_test_rev_reg, create_and_write_credential, + create_and_write_test_cred_def, create_and_write_test_schema, }, - utils::devsetup::SetupProfile, }; #[tokio::test] #[ignore] async fn test_pool_returns_error_if_proof_request_is_malformed() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let proof_req = "{"; - let anoncreds = Arc::clone(&setup.profile).inject_anoncreds(); + let anoncreds = setup.profile.anoncreds(); let result = anoncreds .prover_get_credentials_for_proof_req(proof_req) .await; @@ -39,7 +35,7 @@ pub mod integration_tests { #[tokio::test] #[ignore] async fn test_pool_prover_get_credentials() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let proof_req = json!({ "nonce":"123432421212", "name":"proof_req_1", @@ -56,7 +52,7 @@ pub mod integration_tests { }) .to_string(); - let anoncreds = Arc::clone(&setup.profile).inject_anoncreds(); + let anoncreds = setup.profile.anoncreds(); let _result = anoncreds .prover_get_credentials_for_proof_req(&proof_req) .await @@ -74,11 +70,10 @@ pub mod integration_tests { .await; } - // #[cfg(feature = "modular_libs")] #[tokio::test] #[ignore] async fn test_pool_proof_req_attribute_names() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let proof_req = json!({ "nonce":"123432421212", "name":"proof_req_1", @@ -101,7 +96,7 @@ pub mod integration_tests { }) .to_string(); - let anoncreds = Arc::clone(&setup.profile).inject_anoncreds(); + let anoncreds = setup.profile.anoncreds(); let _result = anoncreds .prover_get_credentials_for_proof_req(&proof_req) .await @@ -113,43 +108,43 @@ pub mod integration_tests { #[tokio::test] #[ignore] async fn test_pool_revoke_credential() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let schema = create_and_write_test_schema( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_write(), &setup.institution_did, crate::utils::constants::DEFAULT_SCHEMA_ATTRS, ) .await; let cred_def = create_and_write_test_cred_def( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), &setup.institution_did, &schema.schema_id, true, ) .await; - let rev_reg = create_and_write_test_rev_reg( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_write(), + let rev_reg = create_and_publish_test_rev_reg( + setup.profile.anoncreds(), + setup.profile.ledger_write(), &setup.institution_did, &cred_def.get_cred_def_id(), ) .await; let cred_id = create_and_write_credential( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds(), + setup.profile.anoncreds(), + setup.profile.anoncreds(), &setup.institution_did, &cred_def, Some(&rev_reg), ) .await; - let cred_rev_id = get_cred_rev_id(&setup.profile.inject_anoncreds(), &cred_id) + let cred_rev_id = get_cred_rev_id(setup.profile.anoncreds(), &cred_id) .await .unwrap(); - let ledger = Arc::clone(&setup.profile).inject_anoncreds_ledger_read(); + let ledger = setup.profile.ledger_read(); let (_, first_rev_reg_delta, first_timestamp) = ledger .get_rev_reg_delta_json(&rev_reg.rev_reg_id, None, None) @@ -164,7 +159,7 @@ pub mod integration_tests { assert_eq!(first_rev_reg_delta, test_same_delta); assert_eq!(first_timestamp, test_same_timestamp); - let anoncreds = Arc::clone(&setup.profile).inject_anoncreds(); + let anoncreds = setup.profile.anoncreds(); anoncreds .revoke_credential_local( @@ -177,8 +172,8 @@ pub mod integration_tests { rev_reg .publish_local_revocations( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_write(), &setup.institution_did, ) .await diff --git a/aries_vcx/src/common/credentials/mod.rs b/aries_vcx/src/common/credentials/mod.rs index 2f40123c9f..7f734580c7 100644 --- a/aries_vcx/src/common/credentials/mod.rs +++ b/aries_vcx/src/common/credentials/mod.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, sync::Arc}; +use std::collections::HashMap; use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, @@ -20,10 +20,7 @@ struct ProverCredential { cred_rev_id: Option, } -pub async fn get_cred_rev_id( - anoncreds: &Arc, - cred_id: &str, -) -> VcxResult { +pub async fn get_cred_rev_id(anoncreds: &impl BaseAnonCreds, cred_id: &str) -> VcxResult { let cred_json = anoncreds.prover_get_credential(cred_id).await?; let prover_cred = serde_json::from_str::(&cred_json).map_err(|err| { AriesVcxError::from_msg( @@ -38,7 +35,7 @@ pub async fn get_cred_rev_id( } pub async fn is_cred_revoked( - ledger: &Arc, + ledger: &impl AnoncredsLedgerRead, rev_reg_id: &str, rev_id: &str, ) -> VcxResult { @@ -55,56 +52,53 @@ pub async fn is_cred_revoked( #[allow(clippy::unwrap_used)] mod integration_tests { use super::*; - use crate::{ - common::test_utils::{ - create_and_write_credential, create_and_write_test_cred_def, - create_and_write_test_rev_reg, create_and_write_test_schema, - }, - utils::devsetup::SetupProfile, + use crate::common::test_utils::{ + create_and_publish_test_rev_reg, create_and_write_credential, + create_and_write_test_cred_def, create_and_write_test_schema, }; #[tokio::test] #[ignore] async fn test_pool_prover_get_credential() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let schema = create_and_write_test_schema( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_write(), &setup.institution_did, crate::utils::constants::DEFAULT_SCHEMA_ATTRS, ) .await; let cred_def = create_and_write_test_cred_def( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), &setup.institution_did, &schema.schema_id, true, ) .await; - let rev_reg = create_and_write_test_rev_reg( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_write(), + let rev_reg = create_and_publish_test_rev_reg( + setup.profile.anoncreds(), + setup.profile.ledger_write(), &setup.institution_did, &cred_def.get_cred_def_id(), ) .await; let cred_id = create_and_write_credential( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds(), + setup.profile.anoncreds(), + setup.profile.anoncreds(), &setup.institution_did, &cred_def, Some(&rev_reg), ) .await; - let cred_rev_id = get_cred_rev_id(&setup.profile.inject_anoncreds(), &cred_id) + let cred_rev_id = get_cred_rev_id(setup.profile.anoncreds(), &cred_id) .await .unwrap(); let cred_json = setup .profile - .inject_anoncreds() + .anoncreds() .prover_get_credential(&cred_id) .await .unwrap(); @@ -112,7 +106,7 @@ mod integration_tests { assert_eq!(prover_cred.schema_id, schema.schema_id); assert_eq!(prover_cred.cred_def_id, cred_def.get_cred_def_id()); - assert_eq!(prover_cred.cred_rev_id.unwrap().to_string(), cred_rev_id); + assert_eq!(prover_cred.cred_rev_id.unwrap(), cred_rev_id); assert_eq!(prover_cred.rev_reg_id.unwrap(), rev_reg.rev_reg_id); }) .await; @@ -121,44 +115,44 @@ mod integration_tests { #[tokio::test] #[ignore] async fn test_pool_is_cred_revoked() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let schema = create_and_write_test_schema( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_write(), &setup.institution_did, crate::utils::constants::DEFAULT_SCHEMA_ATTRS, ) .await; let cred_def = create_and_write_test_cred_def( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), &setup.institution_did, &schema.schema_id, true, ) .await; - let rev_reg = create_and_write_test_rev_reg( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_write(), + let rev_reg = create_and_publish_test_rev_reg( + setup.profile.anoncreds(), + setup.profile.ledger_write(), &setup.institution_did, &cred_def.get_cred_def_id(), ) .await; let cred_id = create_and_write_credential( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds(), + setup.profile.anoncreds(), + setup.profile.anoncreds(), &setup.institution_did, &cred_def, Some(&rev_reg), ) .await; - let cred_rev_id = get_cred_rev_id(&setup.profile.inject_anoncreds(), &cred_id) + let cred_rev_id = get_cred_rev_id(setup.profile.anoncreds(), &cred_id) .await .unwrap(); assert!(!is_cred_revoked( - &setup.profile.inject_anoncreds_ledger_read(), + setup.profile.ledger_read(), &rev_reg.rev_reg_id, &cred_rev_id ) @@ -167,7 +161,7 @@ mod integration_tests { setup .profile - .inject_anoncreds() + .anoncreds() .revoke_credential_local( &rev_reg.get_tails_dir(), &rev_reg.rev_reg_id, @@ -177,8 +171,8 @@ mod integration_tests { .unwrap(); rev_reg .publish_local_revocations( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_write(), &setup.institution_did, ) .await @@ -187,7 +181,7 @@ mod integration_tests { std::thread::sleep(std::time::Duration::from_millis(500)); assert!(is_cred_revoked( - &setup.profile.inject_anoncreds_ledger_read(), + setup.profile.ledger_read(), &rev_reg.rev_reg_id, &cred_rev_id ) diff --git a/aries_vcx/src/common/keys.rs b/aries_vcx/src/common/keys.rs index 476f92d4a2..79f1b4f724 100644 --- a/aries_vcx/src/common/keys.rs +++ b/aries_vcx/src/common/keys.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use aries_vcx_core::{ ledger::base_ledger::{IndyLedgerRead, IndyLedgerWrite}, wallet::base_wallet::BaseWallet, @@ -9,8 +7,8 @@ use serde_json::Value; use crate::errors::error::prelude::*; pub async fn rotate_verkey_apply( - wallet: &Arc, - indy_ledger_write: &Arc, + wallet: &impl BaseWallet, + indy_ledger_write: &impl IndyLedgerWrite, did: &str, temp_vk: &str, ) -> VcxResult<()> { @@ -51,8 +49,8 @@ pub async fn rotate_verkey_apply( } pub async fn rotate_verkey( - wallet: &Arc, - indy_ledger_write: &Arc, + wallet: &impl BaseWallet, + indy_ledger_write: &impl IndyLedgerWrite, did: &str, ) -> VcxResult<()> { let trustee_temp_verkey = wallet.replace_did_keys_start(did).await?; @@ -60,7 +58,7 @@ pub async fn rotate_verkey( } pub async fn get_verkey_from_ledger( - indy_ledger: &Arc, + indy_ledger: &impl IndyLedgerRead, did: &str, ) -> VcxResult { let nym_response: String = indy_ledger.get_nym(did).await?; @@ -110,7 +108,7 @@ pub async fn get_verkey_from_ledger( // // #[tokio::test] // #[ignore] -// #[cfg(all(not(feature = "vdr_proxy_ledger"), not(feature = "modular_libs"),))] +// #[cfg(all(not(feature = "vdr_proxy_ledger"), not(feature = "credx"),))] // async fn test_pool_rotate_verkey_fails() { // use super::*; // diff --git a/aries_vcx/src/common/ledger/transactions.rs b/aries_vcx/src/common/ledger/transactions.rs index 3a4ec26983..3f34c8723c 100644 --- a/aries_vcx/src/common/ledger/transactions.rs +++ b/aries_vcx/src/common/ledger/transactions.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, sync::Arc}; +use std::collections::HashMap; use aries_vcx_core::{ ledger::{ @@ -18,7 +18,6 @@ use serde_json::Value; use crate::{ common::{keys::get_verkey_from_ledger, ledger::service_didsov::EndpointDidSov}, errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}, - global::settings, handlers::util::AnyInvitation, }; @@ -75,7 +74,7 @@ const DID_KEY_PREFIX: &str = "did:key:"; const ED25519_MULTIBASE_CODEC: [u8; 2] = [0xed, 0x01]; pub async fn resolve_service( - indy_ledger: &Arc, + indy_ledger: &impl IndyLedgerRead, service: &OobService, ) -> VcxResult { match service { @@ -85,8 +84,8 @@ pub async fn resolve_service( } pub async fn add_new_did( - wallet: &Arc, - indy_ledger_write: &Arc, + wallet: &impl BaseWallet, + indy_ledger_write: &impl IndyLedgerWrite, submitter_did: &str, role: Option<&str>, ) -> VcxResult<(String, String)> { @@ -101,7 +100,7 @@ pub async fn add_new_did( } pub async fn into_did_doc( - indy_ledger: &Arc, + indy_ledger: &impl IndyLedgerRead, invitation: &AnyInvitation, ) -> VcxResult { let mut did_doc: AriesDidDoc = AriesDidDoc::default(); @@ -235,10 +234,7 @@ fn normalize_keys_as_naked(keys_list: Vec) -> VcxResult> { Ok(result) } -pub async fn get_service( - ledger: &Arc, - did: &String, -) -> VcxResult { +pub async fn get_service(ledger: &impl IndyLedgerRead, did: &String) -> VcxResult { let did_raw = did.to_string(); let did_raw = match did_raw.rsplit_once(':') { None => did_raw, @@ -260,7 +256,7 @@ pub async fn get_service( } pub async fn parse_legacy_endpoint_attrib( - indy_ledger: &Arc, + indy_ledger: &impl IndyLedgerRead, did_raw: &str, ) -> VcxResult { let attr_resp = indy_ledger.get_attr(did_raw, "service").await?; @@ -288,7 +284,7 @@ pub async fn parse_legacy_endpoint_attrib( } pub async fn write_endorser_did( - indy_ledger_write: &Arc, + indy_ledger_write: &impl IndyLedgerWrite, submitter_did: &str, target_did: &str, target_vk: &str, @@ -308,7 +304,7 @@ pub async fn write_endorser_did( } pub async fn write_endpoint_legacy( - indy_ledger_write: &Arc, + indy_ledger_write: &impl IndyLedgerWrite, did: &str, service: &AriesService, ) -> VcxResult { @@ -319,7 +315,7 @@ pub async fn write_endpoint_legacy( } pub async fn write_endpoint( - indy_ledger_write: &Arc, + indy_ledger_write: &impl IndyLedgerWrite, did: &str, service: &EndpointDidSov, ) -> VcxResult { @@ -330,7 +326,7 @@ pub async fn write_endpoint( } pub async fn add_attr( - indy_ledger_write: &Arc, + indy_ledger_write: &impl IndyLedgerWrite, did: &str, attr: &str, ) -> VcxResult<()> { @@ -339,7 +335,7 @@ pub async fn add_attr( } pub async fn get_attr( - ledger: &Arc, + ledger: &impl IndyLedgerRead, did: &str, attr_name: &str, ) -> VcxResult { @@ -353,7 +349,7 @@ pub async fn get_attr( } pub async fn clear_attr( - indy_ledger_write: &Arc, + indy_ledger_write: &impl IndyLedgerWrite, did: &str, attr_name: &str, ) -> VcxResult { @@ -364,9 +360,6 @@ pub async fn clear_attr( } fn check_response(response: &str) -> VcxResult<()> { - if settings::indy_mocks_enabled() { - return Ok(()); - } match parse_response(response)? { Response::Reply(_) => Ok(()), Response::Reject(res) | Response::ReqNACK(res) => Err(AriesVcxError::from_msg( diff --git a/aries_vcx/src/common/mod.rs b/aries_vcx/src/common/mod.rs index a763cd7bc8..53055f2af3 100644 --- a/aries_vcx/src/common/mod.rs +++ b/aries_vcx/src/common/mod.rs @@ -5,6 +5,4 @@ pub mod ledger; pub mod primitives; pub mod proofs; pub mod signing; -#[cfg(feature = "vdrtools")] -// TODO: Used by tests/ so not "hideable" by #[cfg(test)] pub mod test_utils; diff --git a/aries_vcx/src/common/primitives/credential_definition.rs b/aries_vcx/src/common/primitives/credential_definition.rs index f661d6cda5..edffa25892 100644 --- a/aries_vcx/src/common/primitives/credential_definition.rs +++ b/aries_vcx/src/common/primitives/credential_definition.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, errors::error::AriesVcxCoreErrorKind, @@ -8,11 +6,7 @@ use aries_vcx_core::{ use crate::{ errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}, - global::settings::indy_mocks_enabled, - utils::{ - constants::{CRED_DEF_ID, CRED_DEF_JSON, DEFAULT_SERIALIZE_VERSION}, - serialization::ObjectWithVersion, - }, + utils::{constants::DEFAULT_SERIALIZE_VERSION, serialization::ObjectWithVersion}, }; #[derive(Clone, Copy, Debug, Eq, PartialEq, Deserialize, Default)] @@ -81,13 +75,10 @@ pub struct RevocationDetails { } async fn _try_get_cred_def_from_ledger( - ledger: &Arc, + ledger: &impl AnoncredsLedgerRead, issuer_did: &str, cred_def_id: &str, ) -> VcxResult> { - if indy_mocks_enabled() { - return Ok(None); - } match ledger.get_cred_def(cred_def_id, Some(issuer_did)).await { Ok(cred_def) => Ok(Some(cred_def)), Err(err) if err.kind() == AriesVcxCoreErrorKind::LedgerItemNotFound => Ok(None), @@ -102,8 +93,8 @@ async fn _try_get_cred_def_from_ledger( } impl CredentialDef { pub async fn create( - ledger_read: &Arc, - anoncreds: &Arc, + ledger_read: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, source_id: String, config: CredentialDefConfig, support_revocation: bool, @@ -156,8 +147,8 @@ impl CredentialDef { pub async fn publish_cred_def( self, - ledger_read: &Arc, - ledger_write: &Arc, + ledger_read: &impl AnoncredsLedgerRead, + ledger_write: &impl AnoncredsLedgerWrite, ) -> VcxResult { trace!( "publish_cred_def >>> issuer_did: {}, cred_def_id: {}", @@ -226,7 +217,7 @@ impl CredentialDef { self.source_id = source_id; } - pub async fn update_state(&mut self, ledger: &Arc) -> VcxResult { + pub async fn update_state(&mut self, ledger: &impl AnoncredsLedgerRead) -> VcxResult { if (ledger.get_cred_def(&self.id, None).await).is_ok() { self.state = PublicEntityStateType::Published } @@ -239,7 +230,7 @@ impl CredentialDef { } pub async fn generate_cred_def( - anoncreds: &Arc, + anoncreds: &impl BaseAnonCreds, issuer_did: &str, schema_json: &str, tag: &str, @@ -255,9 +246,6 @@ pub async fn generate_cred_def( sig_type, support_revocation ); - if indy_mocks_enabled() { - return Ok((CRED_DEF_ID.to_string(), CRED_DEF_JSON.to_string())); - } let config_json = json!({"support_revocation": support_revocation.unwrap_or(false)}).to_string(); @@ -277,9 +265,10 @@ pub async fn generate_cred_def( #[cfg(test)] #[allow(clippy::unwrap_used)] pub mod integration_tests { - use std::sync::Arc; - - use aries_vcx_core::ledger::indy::pool::test_utils::get_temp_dir_path; + use aries_vcx_core::ledger::{ + base_ledger::{AnoncredsLedgerRead, AnoncredsLedgerWrite}, + indy::pool::test_utils::get_temp_dir_path, + }; use crate::{ common::{ @@ -288,30 +277,30 @@ pub mod integration_tests { }, test_utils::create_and_write_test_schema, }, - utils::{constants::DEFAULT_SCHEMA_ATTRS, devsetup::SetupProfile}, + utils::constants::DEFAULT_SCHEMA_ATTRS, }; #[tokio::test] #[ignore] async fn test_pool_create_cred_def_real() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let schema = create_and_write_test_schema( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_write(), &setup.institution_did, DEFAULT_SCHEMA_ATTRS, ) .await; - let ledger_read = Arc::clone(&setup.profile).inject_anoncreds_ledger_read(); - let ledger_write = Arc::clone(&setup.profile).inject_anoncreds_ledger_write(); + let ledger_read = setup.profile.ledger_read(); + let ledger_write = setup.profile.ledger_write(); let schema_json = ledger_read .get_schema(&schema.schema_id, None) .await .unwrap(); let (cred_def_id, cred_def_json_local) = generate_cred_def( - &setup.profile.inject_anoncreds(), + setup.profile.anoncreds(), &setup.institution_did, &schema_json, "tag_1", @@ -342,23 +331,23 @@ pub mod integration_tests { #[tokio::test] #[ignore] async fn test_pool_create_rev_reg_def() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let schema = create_and_write_test_schema( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_write(), &setup.institution_did, DEFAULT_SCHEMA_ATTRS, ) .await; - let ledger_read = Arc::clone(&setup.profile).inject_anoncreds_ledger_read(); - let ledger_write = Arc::clone(&setup.profile).inject_anoncreds_ledger_write(); + let ledger_read = setup.profile.ledger_read(); + let ledger_write = setup.profile.ledger_write(); let schema_json = ledger_read .get_schema(&schema.schema_id, None) .await .unwrap(); let (cred_def_id, cred_def_json) = generate_cred_def( - &setup.profile.inject_anoncreds(), + setup.profile.anoncreds(), &setup.institution_did, &schema_json, "tag_1", @@ -375,7 +364,7 @@ pub mod integration_tests { let path = get_temp_dir_path(); let (rev_reg_def_id, rev_reg_def_json, rev_reg_entry_json) = generate_rev_reg( - &setup.profile.inject_anoncreds(), + setup.profile.anoncreds(), &setup.institution_did, &cred_def_id, path.to_str().unwrap(), diff --git a/aries_vcx/src/common/primitives/credential_schema.rs b/aries_vcx/src/common/primitives/credential_schema.rs index b624ab6546..e19b730bc1 100644 --- a/aries_vcx/src/common/primitives/credential_schema.rs +++ b/aries_vcx/src/common/primitives/credential_schema.rs @@ -8,11 +8,7 @@ use aries_vcx_core::{ use super::credential_definition::PublicEntityStateType; use crate::{ errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}, - global::settings, - utils::{ - constants::{DEFAULT_SERIALIZE_VERSION, SCHEMA_ID, SCHEMA_JSON}, - serialization::ObjectWithVersion, - }, + utils::{constants::DEFAULT_SERIALIZE_VERSION, serialization::ObjectWithVersion}, }; #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] @@ -40,7 +36,7 @@ pub struct Schema { impl Schema { pub async fn create( - anoncreds: &Arc, + anoncreds: &impl BaseAnonCreds, source_id: &str, submitter_did: &str, name: &str, @@ -55,19 +51,6 @@ impl Schema { data ); - if settings::indy_mocks_enabled() { - return Ok(Self { - source_id: source_id.to_string(), - version: version.to_string(), - submitter_did: submitter_did.to_string(), - schema_id: SCHEMA_ID.to_string(), - schema_json: SCHEMA_JSON.to_string(), - name: name.to_string(), - state: PublicEntityStateType::Built, - ..Self::default() - }); - } - let data_str = serde_json::to_string(data).map_err(|err| { AriesVcxError::from_msg( AriesVcxErrorKind::SerializationError, @@ -91,46 +74,15 @@ impl Schema { }) } - pub fn create_from_ledger_json( - schema_json: &str, - source_id: &str, - schema_id: &str, - ) -> VcxResult { - let schema_data: SchemaData = serde_json::from_str(schema_json).map_err(|err| { - AriesVcxError::from_msg( - AriesVcxErrorKind::InvalidJson, - format!("Cannot deserialize schema: {}", err), - ) - })?; - - Ok(Self { - source_id: source_id.to_string(), - schema_id: schema_id.to_string(), - schema_json: schema_json.to_string(), - name: schema_data.name, - version: schema_data.version, - data: schema_data.attr_names, - submitter_did: "".to_string(), - state: PublicEntityStateType::Published, - }) + pub async fn submitter_did(&self) -> String { + self.submitter_did.clone() } - pub async fn publish( - self, - ledger: &Arc, - endorser_did: Option, - ) -> VcxResult { + pub async fn publish(self, ledger: &impl AnoncredsLedgerWrite) -> VcxResult { trace!("Schema::publish >>>"); - if settings::indy_mocks_enabled() { - return Ok(Self { - state: PublicEntityStateType::Published, - ..self - }); - } - ledger - .publish_schema(&self.schema_json, &self.submitter_did, endorser_did) + .publish_schema(&self.schema_json, &self.submitter_did, None) .await?; Ok(Self { @@ -159,7 +111,7 @@ impl Schema { .map_err(|err: AriesVcxError| err.extend("Cannot deserialize Schema")) } - pub async fn update_state(&mut self, ledger: &Arc) -> VcxResult { + pub async fn update_state(&mut self, ledger: &impl AnoncredsLedgerRead) -> VcxResult { if ledger.get_schema(&self.schema_id, None).await.is_ok() { self.state = PublicEntityStateType::Published } diff --git a/aries_vcx/src/common/primitives/revocation_registry.rs b/aries_vcx/src/common/primitives/revocation_registry.rs index 62b520ae11..3c0cf77b55 100644 --- a/aries_vcx/src/common/primitives/revocation_registry.rs +++ b/aries_vcx/src/common/primitives/revocation_registry.rs @@ -1,16 +1,10 @@ -use std::sync::Arc; - use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, errors::error::AriesVcxCoreErrorKind, ledger::base_ledger::AnoncredsLedgerWrite, }; use super::credential_definition::PublicEntityStateType; -use crate::{ - errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}, - global::settings, - utils::constants::REV_REG_ID, -}; +use crate::errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}; #[derive(Clone, Deserialize, Debug, Serialize, PartialEq, Eq)] pub struct RevocationRegistry { @@ -28,7 +22,7 @@ pub struct RevocationRegistry { impl RevocationRegistry { pub async fn create( - anoncreds: &Arc, + anoncreds: &impl BaseAnonCreds, issuer_did: &str, cred_def_id: &str, tails_dir: &str, @@ -102,7 +96,7 @@ impl RevocationRegistry { pub async fn publish_rev_reg_def( &mut self, - ledger: &Arc, + ledger: &impl AnoncredsLedgerWrite, issuer_did: &str, tails_url: &str, ) -> VcxResult<()> { @@ -129,7 +123,7 @@ impl RevocationRegistry { pub async fn publish_rev_reg_delta( &mut self, - ledger_write: &Arc, + ledger_write: &impl AnoncredsLedgerWrite, issuer_did: &str, ) -> VcxResult<()> { trace!( @@ -152,7 +146,7 @@ impl RevocationRegistry { pub async fn publish_revocation_primitives( &mut self, - ledger_write: &Arc, + ledger_write: &impl AnoncredsLedgerWrite, tails_url: &str, ) -> VcxResult<()> { trace!( @@ -166,7 +160,7 @@ impl RevocationRegistry { async fn publish_built_rev_reg_delta( &mut self, - ledger_write: &Arc, + ledger_write: &impl AnoncredsLedgerWrite, ) -> VcxResult<()> { let issuer_did = &self.issuer_did.clone(); if self.was_rev_reg_delta_published() { @@ -179,7 +173,7 @@ impl RevocationRegistry { async fn publish_built_rev_reg_def( &mut self, - ledger_write: &Arc, + ledger_write: &impl AnoncredsLedgerWrite, tails_url: &str, ) -> VcxResult<()> { let issuer_did = &self.issuer_did.clone(); @@ -212,7 +206,7 @@ impl RevocationRegistry { pub async fn revoke_credential_local( &self, - anoncreds: &Arc, + anoncreds: &impl BaseAnonCreds, cred_rev_id: &str, ) -> VcxResult<()> { anoncreds @@ -223,8 +217,8 @@ impl RevocationRegistry { pub async fn publish_local_revocations( &self, - anoncreds: &Arc, - ledger_write: &Arc, + anoncreds: &impl BaseAnonCreds, + ledger_write: &impl AnoncredsLedgerWrite, submitter_did: &str, ) -> VcxResult<()> { if let Some(delta) = anoncreds.get_rev_reg_delta(&self.rev_reg_id).await? { @@ -288,7 +282,7 @@ pub struct RevocationRegistryDefinition { pub ver: String, } pub async fn generate_rev_reg( - anoncreds: &Arc, + anoncreds: &impl BaseAnonCreds, issuer_did: &str, cred_def_id: &str, tails_dir: &str, @@ -304,14 +298,6 @@ pub async fn generate_rev_reg( max_creds, tag ); - if settings::indy_mocks_enabled() { - debug!("generate_rev_reg >>> returning mocked value"); - return Ok(( - REV_REG_ID.to_string(), - RevocationRegistryDefinition::default(), - "".to_string(), - )); - } let (rev_reg_id, rev_reg_def_json, rev_reg_entry_json) = anoncreds .issuer_create_and_store_revoc_reg(issuer_did, cred_def_id, tails_dir, max_creds, tag) @@ -330,6 +316,3 @@ pub async fn generate_rev_reg( Ok((rev_reg_id, rev_reg_def, rev_reg_entry_json)) } - -// consider impl revoke_credential_local in a generic (non-vdrtools) fashion -// consider impl publish_local_revocations in a generic (non-vdrtools) fashion diff --git a/aries_vcx/src/common/proofs/proof_request.rs b/aries_vcx/src/common/proofs/proof_request.rs index 02cf300aa6..e0f88eb464 100644 --- a/aries_vcx/src/common/proofs/proof_request.rs +++ b/aries_vcx/src/common/proofs/proof_request.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, sync::Arc, vec::Vec}; +use std::{collections::HashMap, vec::Vec}; use aries_vcx_core::anoncreds::base_anoncreds::BaseAnonCreds; use serde_json; @@ -24,7 +24,7 @@ pub struct ProofRequestData { impl ProofRequestData { const DEFAULT_VERSION: &'static str = "1.0"; - pub async fn create(anoncreds: &Arc, name: &str) -> VcxResult { + pub async fn create(anoncreds: &impl BaseAnonCreds, name: &str) -> VcxResult { let nonce = anoncreds.generate_nonce().await?; Ok(Self { name: name.to_string(), @@ -207,7 +207,7 @@ mod unit_tests { async fn test_proof_request_msg() { let _setup = SetupDefaults::init(); - let anoncreds: Arc = Arc::new(MockAnoncreds {}); + let anoncreds = MockAnoncreds; let request = ProofRequestData::create(&anoncreds, "Test") .await .unwrap() @@ -242,7 +242,7 @@ mod unit_tests { async fn test_requested_attrs_constructed_correctly() { let _setup = SetupDefaults::init(); - let anoncreds: Arc = Arc::new(MockAnoncreds {}); + let anoncreds = MockAnoncreds; let request = ProofRequestData::create(&anoncreds, "") .await .unwrap() @@ -258,7 +258,7 @@ mod unit_tests { let expected_req_attrs = _expected_req_attrs(); let req_attrs_string = serde_json::to_string(&expected_req_attrs).unwrap(); - let anoncreds: Arc = Arc::new(MockAnoncreds {}); + let anoncreds = MockAnoncreds; let request = ProofRequestData::create(&anoncreds, "") .await .unwrap() @@ -300,7 +300,7 @@ mod unit_tests { .unwrap(); check_predicates.insert("predicate_0".to_string(), attr_info1); - let anoncreds: Arc = Arc::new(MockAnoncreds {}); + let anoncreds = MockAnoncreds; let request = ProofRequestData::create(&anoncreds, "") .await .unwrap() @@ -324,7 +324,7 @@ mod unit_tests { let requested_attrs = json!([attr_info, attr_info_2]).to_string(); - let anoncreds: Arc = Arc::new(MockAnoncreds {}); + let anoncreds = MockAnoncreds; let request = ProofRequestData::create(&anoncreds, "") .await .unwrap() @@ -355,7 +355,7 @@ mod unit_tests { let requested_attrs = json!([attr_info]).to_string(); - let anoncreds: Arc = Arc::new(MockAnoncreds {}); + let anoncreds = MockAnoncreds; let err = ProofRequestData::create(&anoncreds, "") .await .unwrap() diff --git a/aries_vcx/src/common/proofs/prover/mod.rs b/aries_vcx/src/common/proofs/prover/mod.rs index e09cafae7a..0f364f3afe 100644 --- a/aries_vcx/src/common/proofs/prover/mod.rs +++ b/aries_vcx/src/common/proofs/prover/mod.rs @@ -1,2 +1,76 @@ -pub mod prover; mod prover_internal; + +use std::collections::HashMap; + +use aries_vcx_core::{ + anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, +}; + +use crate::{ + common::proofs::{ + proof_request::ProofRequestData, + prover::prover_internal::{ + build_cred_defs_json_prover, build_requested_credentials_json, build_rev_states_json, + build_schemas_json_prover, credential_def_identifiers, + }, + }, + errors::error::prelude::*, + global::settings, + handlers::proof_presentation::types::SelectedCredentials, + utils::mockdata::mock_settings::get_mock_generate_indy_proof, +}; + +pub async fn generate_indy_proof( + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, + credentials: &SelectedCredentials, + self_attested_attrs: &HashMap, + proof_req_data_json: &str, +) -> VcxResult { + trace!( + "generate_indy_proof >>> credentials: {:?}, self_attested_attrs: {:?}", + secret!(&credentials), + secret!(&self_attested_attrs) + ); + + match get_mock_generate_indy_proof() { + None => {} + Some(mocked_indy_proof) => { + warn!("generate_indy_proof :: returning mocked response"); + return Ok(mocked_indy_proof); + } + } + let proof_request: ProofRequestData = + serde_json::from_str(proof_req_data_json).map_err(|err| { + AriesVcxError::from_msg( + AriesVcxErrorKind::InvalidJson, + format!("Cannot deserialize proof request: {}", err), + ) + })?; + + let mut credentials_identifiers = credential_def_identifiers(credentials, &proof_request)?; + + let revoc_states_json = + build_rev_states_json(ledger, anoncreds, &mut credentials_identifiers).await?; + let requested_credentials = build_requested_credentials_json( + &credentials_identifiers, + self_attested_attrs, + &proof_request, + )?; + + let schemas_json = build_schemas_json_prover(ledger, &credentials_identifiers).await?; + let credential_defs_json = + build_cred_defs_json_prover(ledger, &credentials_identifiers).await?; + + let proof = anoncreds + .prover_create_proof( + proof_req_data_json, + &requested_credentials, + settings::DEFAULT_LINK_SECRET_ALIAS, + &schemas_json, + &credential_defs_json, + Some(&revoc_states_json), + ) + .await?; + Ok(proof) +} diff --git a/aries_vcx/src/common/proofs/prover/prover.rs b/aries_vcx/src/common/proofs/prover/prover.rs deleted file mode 100644 index 12bf03dc0c..0000000000 --- a/aries_vcx/src/common/proofs/prover/prover.rs +++ /dev/null @@ -1,74 +0,0 @@ -use std::{collections::HashMap, sync::Arc}; - -use aries_vcx_core::{ - anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, -}; - -use crate::{ - common::proofs::{ - proof_request::ProofRequestData, - prover::prover_internal::{ - build_cred_defs_json_prover, build_requested_credentials_json, build_rev_states_json, - build_schemas_json_prover, credential_def_identifiers, - }, - }, - errors::error::prelude::*, - global::settings, - handlers::proof_presentation::types::SelectedCredentials, - utils::mockdata::mock_settings::get_mock_generate_indy_proof, -}; - -pub async fn generate_indy_proof( - ledger: &Arc, - anoncreds: &Arc, - credentials: &SelectedCredentials, - self_attested_attrs: &HashMap, - proof_req_data_json: &str, -) -> VcxResult { - trace!( - "generate_indy_proof >>> credentials: {:?}, self_attested_attrs: {:?}", - secret!(&credentials), - secret!(&self_attested_attrs) - ); - - match get_mock_generate_indy_proof() { - None => {} - Some(mocked_indy_proof) => { - warn!("generate_indy_proof :: returning mocked response"); - return Ok(mocked_indy_proof); - } - } - let proof_request: ProofRequestData = - serde_json::from_str(proof_req_data_json).map_err(|err| { - AriesVcxError::from_msg( - AriesVcxErrorKind::InvalidJson, - format!("Cannot deserialize proof request: {}", err), - ) - })?; - - let mut credentials_identifiers = credential_def_identifiers(credentials, &proof_request)?; - - let revoc_states_json = - build_rev_states_json(ledger, anoncreds, &mut credentials_identifiers).await?; - let requested_credentials = build_requested_credentials_json( - &credentials_identifiers, - self_attested_attrs, - &proof_request, - )?; - - let schemas_json = build_schemas_json_prover(ledger, &credentials_identifiers).await?; - let credential_defs_json = - build_cred_defs_json_prover(ledger, &credentials_identifiers).await?; - - let proof = anoncreds - .prover_create_proof( - proof_req_data_json, - &requested_credentials, - settings::DEFAULT_LINK_SECRET_ALIAS, - &schemas_json, - &credential_defs_json, - Some(&revoc_states_json), - ) - .await?; - Ok(proof) -} diff --git a/aries_vcx/src/common/proofs/prover/prover_internal.rs b/aries_vcx/src/common/proofs/prover/prover_internal.rs index 164cf45bbf..733c64d12a 100644 --- a/aries_vcx/src/common/proofs/prover/prover_internal.rs +++ b/aries_vcx/src/common/proofs/prover/prover_internal.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, sync::Arc}; +use std::collections::HashMap; use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, @@ -28,7 +28,7 @@ pub struct CredInfoProver { } pub async fn build_schemas_json_prover( - ledger: &Arc, + ledger: &impl AnoncredsLedgerRead, credentials_identifiers: &Vec, ) -> VcxResult { trace!( @@ -60,7 +60,7 @@ pub async fn build_schemas_json_prover( } pub async fn build_cred_defs_json_prover( - ledger: &Arc, + ledger: &impl AnoncredsLedgerRead, credentials_identifiers: &Vec, ) -> VcxResult { trace!( @@ -148,8 +148,8 @@ fn _get_revocation_interval( } pub async fn build_rev_states_json( - ledger_read: &Arc, - anoncreds: &Arc, + ledger_read: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, credentials_identifiers: &mut Vec, ) -> VcxResult { trace!( @@ -281,21 +281,18 @@ pub mod pool_tests { use crate::{ common::proofs::prover::prover_internal::{build_rev_states_json, CredInfoProver}, - utils::{ - constants::{CRED_DEF_ID, CRED_REV_ID, LICENCE_CRED_ID, SCHEMA_ID}, - devsetup::SetupProfile, - }, + utils::constants::{CRED_DEF_ID, CRED_REV_ID, LICENCE_CRED_ID, SCHEMA_ID}, }; #[tokio::test] #[ignore] async fn test_pool_build_rev_states_json_empty() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { // empty vector assert_eq!( build_rev_states_json( - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds(), + setup.profile.ledger_read(), + setup.profile.anoncreds(), Vec::new().as_mut() ) .await @@ -318,8 +315,8 @@ pub mod pool_tests { }; assert_eq!( build_rev_states_json( - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds(), + setup.profile.ledger_read(), + setup.profile.anoncreds(), vec![cred1].as_mut() ) .await @@ -394,7 +391,7 @@ pub mod unit_tests { }; let creds = vec![cred1, cred2]; - let ledger_read: Arc = Arc::new(MockLedger {}); + let ledger_read = MockLedger; let credential_def = build_cred_defs_json_prover(&ledger_read, &creds) .await .unwrap(); @@ -407,7 +404,7 @@ pub mod unit_tests { async fn test_find_schemas() { let _setup = SetupMocks::init(); - let ledger_read: Arc = Arc::new(MockLedger {}); + let ledger_read = MockLedger; assert_eq!( build_schemas_json_prover(&ledger_read, &Vec::new()) .await @@ -441,7 +438,7 @@ pub mod unit_tests { }; let creds = vec![cred1, cred2]; - let ledger_read: Arc = Arc::new(MockLedger {}); + let ledger_read = MockLedger; let schemas = build_schemas_json_prover(&ledger_read, &creds) .await .unwrap(); @@ -717,8 +714,8 @@ pub mod unit_tests { revealed: None, }; let mut cred_info = vec![cred1]; - let anoncreds: Arc = Arc::new(MockAnoncreds {}); - let ledger_read: Arc = Arc::new(MockLedger {}); + let anoncreds = MockAnoncreds; + let ledger_read = MockLedger; let states = build_rev_states_json(&ledger_read, &anoncreds, cred_info.as_mut()) .await .unwrap(); diff --git a/aries_vcx/src/common/proofs/verifier/mod.rs b/aries_vcx/src/common/proofs/verifier/mod.rs index 6ce73fb9af..ed0ec3e4c7 100644 --- a/aries_vcx/src/common/proofs/verifier/mod.rs +++ b/aries_vcx/src/common/proofs/verifier/mod.rs @@ -1,2 +1,645 @@ -pub mod verifier; mod verifier_internal; + +use aries_vcx_core::{ + anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, +}; + +use crate::{ + common::proofs::verifier::verifier_internal::{ + build_cred_defs_json_verifier, build_rev_reg_defs_json, build_rev_reg_json, + build_schemas_json_verifier, get_credential_info, validate_proof_revealed_attributes, + }, + errors::error::prelude::*, + utils::mockdata::mock_settings::get_mock_result_for_validate_indy_proof, +}; + +pub async fn validate_indy_proof( + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, + proof_json: &str, + proof_req_json: &str, +) -> VcxResult { + if let Some(mock_result) = get_mock_result_for_validate_indy_proof() { + return mock_result; + } + validate_proof_revealed_attributes(proof_json)?; + + let credential_data = get_credential_info(proof_json)?; + debug!("validate_indy_proof >> credential_data: {credential_data:?}"); + let credential_defs_json = build_cred_defs_json_verifier(ledger, &credential_data).await?; + let schemas_json = build_schemas_json_verifier(ledger, &credential_data).await?; + let rev_reg_defs_json = build_rev_reg_defs_json(ledger, &credential_data) + .await + .unwrap_or(json!({}).to_string()); + let rev_regs_json = build_rev_reg_json(ledger, &credential_data) + .await + .unwrap_or(json!({}).to_string()); + + debug!("validate_indy_proof >> credential_defs_json: {credential_defs_json}"); + debug!("validate_indy_proof >> schemas_json: {schemas_json}"); + trace!("validate_indy_proof >> proof_json: {proof_json}"); + debug!("validate_indy_proof >> proof_req_json: {proof_req_json}"); + debug!("validate_indy_proof >> rev_reg_defs_json: {rev_reg_defs_json}"); + debug!("validate_indy_proof >> rev_regs_json: {rev_regs_json}"); + anoncreds + .verifier_verify_proof( + proof_req_json, + proof_json, + &schemas_json, + &credential_defs_json, + &rev_reg_defs_json, + &rev_regs_json, + ) + .await + .map_err(|err| err.into()) +} + +#[cfg(test)] +#[allow(clippy::unwrap_used)] +pub mod integration_tests { + use std::time::Duration; + + use aries_vcx_core::{ + anoncreds::base_anoncreds::BaseAnonCreds, + ledger::base_ledger::{AnoncredsLedgerRead, AnoncredsLedgerWrite}, + }; + + use super::*; + use crate::{ + common::{ + primitives::{credential_definition::CredentialDef, credential_schema::Schema}, + proofs::proof_request::ProofRequestData, + test_utils::{ + create_and_write_credential, create_and_write_test_cred_def, + create_and_write_test_schema, + }, + }, + errors::error::AriesVcxErrorKind, + utils::{self, constants::DEFAULT_SCHEMA_ATTRS}, + }; + + // FUTURE - issuer and holder seperation only needed whilst modular deps not fully implemented + async fn create_indy_proof( + anoncreds_issuer: &impl BaseAnonCreds, + anoncreds_holder: &impl BaseAnonCreds, + ledger_read: &impl AnoncredsLedgerRead, + ledger_write: &impl AnoncredsLedgerWrite, + did: &str, + ) -> (String, String, String, String) { + let (schema, cred_def, cred_id) = create_and_store_nonrevocable_credential( + anoncreds_issuer, + anoncreds_holder, + ledger_read, + ledger_write, + did, + DEFAULT_SCHEMA_ATTRS, + ) + .await; + let proof_req = json!({ + "nonce":"123432421212", + "name":"proof_req_1", + "version":"0.1", + "requested_attributes": json!({ + "address1_1": json!({ + "name":"address1", + "restrictions": [json!({ "issuer_did": did })] + }), + "zip_2": json!({ + "name":"zip", + "restrictions": [json!({ "issuer_did": did })] + }), + "self_attest_3": json!({ + "name":"self_attest", + }), + }), + "requested_predicates": json!({}), + }) + .to_string(); + let requested_credentials_json = json!({ + "self_attested_attributes":{ + "self_attest_3": "my_self_attested_val" + }, + "requested_attributes":{ + "address1_1": {"cred_id": cred_id, "revealed": true}, + "zip_2": {"cred_id": cred_id, "revealed": true} + }, + "requested_predicates":{} + }) + .to_string(); + + let schema_id = schema.schema_id.clone(); + let schema_json: serde_json::Value = serde_json::from_str(&schema.schema_json).unwrap(); + let schemas = json!({ + schema_id: schema_json, + }) + .to_string(); + + let cred_def_json: serde_json::Value = + serde_json::from_str(cred_def.get_cred_def_json()).unwrap(); + let cred_defs = json!({ + cred_def.get_cred_def_id(): cred_def_json, + }) + .to_string(); + + anoncreds_holder + .prover_get_credentials_for_proof_req(&proof_req) + .await + .unwrap(); + + let proof = anoncreds_holder + .prover_create_proof( + &proof_req, + &requested_credentials_json, + "main", + &schemas, + &cred_defs, + None, + ) + .await + .unwrap(); + (schemas, cred_defs, proof_req, proof) + } + + async fn create_proof_with_predicate( + anoncreds_issuer: &impl BaseAnonCreds, + anoncreds_holder: &impl BaseAnonCreds, + ledger_read: &impl AnoncredsLedgerRead, + ledger_write: &impl AnoncredsLedgerWrite, + did: &str, + include_predicate_cred: bool, + ) -> (String, String, String, String) { + let (schema, cred_def, cred_id) = create_and_store_nonrevocable_credential( + anoncreds_issuer, + anoncreds_holder, + ledger_read, + ledger_write, + did, + DEFAULT_SCHEMA_ATTRS, + ) + .await; + + let proof_req = json!({ + "nonce":"123432421212", + "name":"proof_req_1", + "version":"0.1", + "requested_attributes": json!({ + "address1_1": json!({ + "name":"address1", + "restrictions": [json!({ "issuer_did": did })] + }), + "self_attest_3": json!({ + "name":"self_attest", + }), + }), + "requested_predicates": json!({ + "zip_3": {"name":"zip", "p_type":">=", "p_value":18} + }), + }) + .to_string(); + + let requested_credentials_json = if include_predicate_cred { + json!({ + "self_attested_attributes":{ + "self_attest_3": "my_self_attested_val" + }, + "requested_attributes":{ + "address1_1": {"cred_id": cred_id, "revealed": true} + }, + "requested_predicates":{ + "zip_3": {"cred_id": cred_id} + } + }) + .to_string() + } else { + json!({ + "self_attested_attributes":{ + "self_attest_3": "my_self_attested_val" + }, + "requested_attributes":{ + "address1_1": {"cred_id": cred_id, "revealed": true} + }, + "requested_predicates":{ + } + }) + .to_string() + }; + + let schema_json: serde_json::Value = serde_json::from_str(&schema.schema_json).unwrap(); + let schemas = json!({ + schema.schema_id: schema_json, + }) + .to_string(); + + let cred_def_json: serde_json::Value = + serde_json::from_str(cred_def.get_cred_def_json()).unwrap(); + let cred_defs = json!({ + cred_def.get_cred_def_id(): cred_def_json, + }) + .to_string(); + + anoncreds_holder + .prover_get_credentials_for_proof_req(&proof_req) + .await + .unwrap(); + + let proof = anoncreds_holder + .prover_create_proof( + &proof_req, + &requested_credentials_json, + "main", + &schemas, + &cred_defs, + None, + ) + .await + .unwrap(); + (schemas, cred_defs, proof_req, proof) + } + + async fn create_and_store_nonrevocable_credential( + anoncreds_issuer: &impl BaseAnonCreds, + anoncreds_holder: &impl BaseAnonCreds, + ledger_read: &impl AnoncredsLedgerRead, + ledger_write: &impl AnoncredsLedgerWrite, + issuer_did: &str, + attr_list: &str, + ) -> (Schema, CredentialDef, String) { + let schema = + create_and_write_test_schema(anoncreds_issuer, ledger_write, issuer_did, attr_list) + .await; + + let cred_def = create_and_write_test_cred_def( + anoncreds_issuer, + ledger_read, + ledger_write, + issuer_did, + &schema.schema_id, + false, + ) + .await; + tokio::time::sleep(Duration::from_millis(500)).await; + + let cred_id = create_and_write_credential( + anoncreds_issuer, + anoncreds_holder, + issuer_did, + &cred_def, + None, + ) + .await; + (schema, cred_def, cred_id) + } + + #[tokio::test] + #[ignore] + async fn test_pool_proof_self_attested_proof_validation() { + run_setup!(|setup| async move { + let requested_attrs = json!([ + json!({ + "name":"address1", + "self_attest_allowed": true, + }), + json!({ + "name":"zip", + "self_attest_allowed": true, + }), + ]) + .to_string(); + let requested_predicates = json!([]).to_string(); + let revocation_details = r#"{"support_revocation":false}"#.to_string(); + let name = "Optional".to_owned(); + + let proof_req_json = ProofRequestData::create(setup.profile.anoncreds(), &name) + .await + .unwrap() + .set_requested_attributes_as_string(requested_attrs) + .unwrap() + .set_requested_predicates_as_string(requested_predicates) + .unwrap() + .set_not_revoked_interval(revocation_details) + .unwrap(); + + let proof_req_json = serde_json::to_string(&proof_req_json).unwrap(); + + let anoncreds = setup.profile.anoncreds(); + let prover_proof_json = anoncreds + .prover_create_proof( + &proof_req_json, + &json!({ + "self_attested_attributes":{ + "attribute_0": "my_self_attested_address", + "attribute_1": "my_self_attested_zip" + }, + "requested_attributes":{}, + "requested_predicates":{} + }) + .to_string(), + "main", + &json!({}).to_string(), + &json!({}).to_string(), + None, + ) + .await + .unwrap(); + + assert!(validate_indy_proof( + setup.profile.ledger_read(), + setup.profile.anoncreds(), + &prover_proof_json, + &proof_req_json + ) + .await + .unwrap()); + }) + .await; + } + + #[tokio::test] + #[ignore] + async fn test_pool_proof_restrictions() { + run_setup!(|setup| async move { + let requested_attrs = json!([ + json!({ + "name":"address1", + "restrictions": [{ "issuer_did": "Not Here" }], + }), + json!({ + "name":"zip", + }), + json!({ + "name":"self_attest", + "self_attest_allowed": true, + }), + ]) + .to_string(); + let requested_predicates = json!([]).to_string(); + let revocation_details = r#"{"support_revocation":true}"#.to_string(); + let name = "Optional".to_owned(); + + let proof_req_json = ProofRequestData::create(setup.profile.anoncreds(), &name) + .await + .unwrap() + .set_requested_attributes_as_string(requested_attrs) + .unwrap() + .set_requested_predicates_as_string(requested_predicates) + .unwrap() + .set_not_revoked_interval(revocation_details) + .unwrap(); + + let proof_req_json = serde_json::to_string(&proof_req_json).unwrap(); + + let (schema, cred_def, cred_id) = create_and_store_nonrevocable_credential( + setup.profile.anoncreds(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), + &setup.institution_did, + utils::constants::DEFAULT_SCHEMA_ATTRS, + ) + .await; + let cred_def_json: serde_json::Value = + serde_json::from_str(cred_def.get_cred_def_json()).unwrap(); + let schema_json: serde_json::Value = serde_json::from_str(&schema.schema_json).unwrap(); + + let anoncreds = setup.profile.anoncreds(); + let prover_proof_json = anoncreds + .prover_create_proof( + &proof_req_json, + &json!({ + "self_attested_attributes":{ + "attribute_2": "my_self_attested_val" + }, + "requested_attributes":{ + "attribute_0": {"cred_id": cred_id, "revealed": true}, + "attribute_1": {"cred_id": cred_id, "revealed": true} + }, + "requested_predicates":{} + }) + .to_string(), + "main", + &json!({ schema.schema_id: schema_json }).to_string(), + &json!({ cred_def.get_cred_def_id(): cred_def_json }).to_string(), + None, + ) + .await + .unwrap(); + assert_eq!( + validate_indy_proof( + setup.profile.ledger_read(), + setup.profile.anoncreds(), + &prover_proof_json, + &proof_req_json + ) + .await + .unwrap_err() + .kind(), + AriesVcxErrorKind::ProofRejected + ); + + let mut proof_req_json: serde_json::Value = + serde_json::from_str(&proof_req_json).unwrap(); + proof_req_json["requested_attributes"]["attribute_0"]["restrictions"] = json!({}); + assert!(validate_indy_proof( + setup.profile.ledger_read(), + setup.profile.anoncreds(), + &prover_proof_json, + &proof_req_json.to_string() + ) + .await + .unwrap()); + }) + .await; + } + + #[tokio::test] + #[ignore] + async fn test_pool_proof_validate_attribute() { + run_setup!(|setup| async move { + let requested_attrs = json!([ + json!({ + "name":"address1", + "restrictions": [json!({ "issuer_did": setup.institution_did })] + }), + json!({ + "name":"zip", + "restrictions": [json!({ "issuer_did": setup.institution_did })] + }), + json!({ + "name":"self_attest", + "self_attest_allowed": true, + }), + ]) + .to_string(); + let requested_predicates = json!([]).to_string(); + let revocation_details = r#"{"support_revocation":true}"#.to_string(); + let name = "Optional".to_owned(); + + let proof_req_json = ProofRequestData::create(setup.profile.anoncreds(), &name) + .await + .unwrap() + .set_requested_attributes_as_string(requested_attrs) + .unwrap() + .set_requested_predicates_as_string(requested_predicates) + .unwrap() + .set_not_revoked_interval(revocation_details) + .unwrap(); + + let proof_req_json = serde_json::to_string(&proof_req_json).unwrap(); + + let (schema, cred_def, cred_id) = create_and_store_nonrevocable_credential( + setup.profile.anoncreds(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), + &setup.institution_did, + utils::constants::DEFAULT_SCHEMA_ATTRS, + ) + .await; + let cred_def_json: serde_json::Value = + serde_json::from_str(cred_def.get_cred_def_json()).unwrap(); + let schema_json: serde_json::Value = serde_json::from_str(&schema.schema_json).unwrap(); + + let anoncreds = setup.profile.anoncreds(); + let prover_proof_json = anoncreds + .prover_create_proof( + &proof_req_json, + &json!({ + "self_attested_attributes":{ + "attribute_2": "my_self_attested_val" + }, + "requested_attributes":{ + "attribute_0": {"cred_id": cred_id, "revealed": true}, + "attribute_1": {"cred_id": cred_id, "revealed": true} + }, + "requested_predicates":{} + }) + .to_string(), + "main", + &json!({ schema.schema_id: schema_json }).to_string(), + &json!({ cred_def.get_cred_def_id(): cred_def_json }).to_string(), + None, + ) + .await + .unwrap(); + assert!(validate_indy_proof( + setup.profile.ledger_read(), + setup.profile.anoncreds(), + &prover_proof_json, + &proof_req_json + ) + .await + .unwrap()); + + let mut proof_obj: serde_json::Value = + serde_json::from_str(&prover_proof_json).unwrap(); + { + proof_obj["requested_proof"]["revealed_attrs"]["address1_1"]["raw"] = + json!("Other Value"); + let prover_proof_json = serde_json::to_string(&proof_obj).unwrap(); + + assert_eq!( + validate_indy_proof( + setup.profile.ledger_read(), + setup.profile.anoncreds(), + &prover_proof_json, + &proof_req_json + ) + .await + .unwrap_err() + .kind(), + AriesVcxErrorKind::InvalidProof + ); + } + { + proof_obj["requested_proof"]["revealed_attrs"]["address1_1"]["encoded"] = + json!("1111111111111111111111111111111111111111111111111111111111"); + let prover_proof_json = serde_json::to_string(&proof_obj).unwrap(); + + assert_eq!( + validate_indy_proof( + setup.profile.ledger_read(), + setup.profile.anoncreds(), + &prover_proof_json, + &proof_req_json + ) + .await + .unwrap_err() + .kind(), + AriesVcxErrorKind::InvalidProof + ); + } + }) + .await; + } + #[tokio::test] + #[ignore] + async fn test_pool_prover_verify_proof() { + run_setup!(|setup| async move { + let (schemas, cred_defs, proof_req, proof) = create_indy_proof( + setup.profile.anoncreds(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), + &setup.institution_did, + ) + .await; + + let anoncreds = setup.profile.anoncreds(); + let proof_validation = anoncreds + .verifier_verify_proof(&proof_req, &proof, &schemas, &cred_defs, "{}", "{}") + .await + .unwrap(); + + assert!(proof_validation); + }) + .await; + } + + #[tokio::test] + #[ignore] + async fn test_pool_prover_verify_proof_with_predicate_success_case() { + run_setup!(|setup| async move { + let (schemas, cred_defs, proof_req, proof) = create_proof_with_predicate( + setup.profile.anoncreds(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), + &setup.institution_did, + true, + ) + .await; + + let anoncreds = setup.profile.anoncreds(); + let proof_validation = anoncreds + .verifier_verify_proof(&proof_req, &proof, &schemas, &cred_defs, "{}", "{}") + .await + .unwrap(); + + assert!(proof_validation); + }) + .await; + } + + #[tokio::test] + #[ignore] + async fn test_pool_prover_verify_proof_with_predicate_fail_case() { + run_setup!(|setup| async move { + let (schemas, cred_defs, proof_req, proof) = create_proof_with_predicate( + setup.profile.anoncreds(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), + &setup.institution_did, + false, + ) + .await; + + let anoncreds = setup.profile.anoncreds(); + anoncreds + .verifier_verify_proof(&proof_req, &proof, &schemas, &cred_defs, "{}", "{}") + .await + .unwrap_err(); + }) + .await; + } +} diff --git a/aries_vcx/src/common/proofs/verifier/verifier.rs b/aries_vcx/src/common/proofs/verifier/verifier.rs deleted file mode 100644 index dbd75e0c3c..0000000000 --- a/aries_vcx/src/common/proofs/verifier/verifier.rs +++ /dev/null @@ -1,645 +0,0 @@ -use std::sync::Arc; - -use aries_vcx_core::{ - anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, -}; - -use crate::{ - common::proofs::verifier::verifier_internal::{ - build_cred_defs_json_verifier, build_rev_reg_defs_json, build_rev_reg_json, - build_schemas_json_verifier, get_credential_info, validate_proof_revealed_attributes, - }, - errors::error::prelude::*, - utils::mockdata::mock_settings::get_mock_result_for_validate_indy_proof, -}; - -pub async fn validate_indy_proof( - ledger: &Arc, - anoncreds: &Arc, - proof_json: &str, - proof_req_json: &str, -) -> VcxResult { - if let Some(mock_result) = get_mock_result_for_validate_indy_proof() { - return mock_result; - } - validate_proof_revealed_attributes(proof_json)?; - - let credential_data = get_credential_info(proof_json)?; - debug!("validate_indy_proof >> credential_data: {credential_data:?}"); - let credential_defs_json = build_cred_defs_json_verifier(ledger, &credential_data).await?; - let schemas_json = build_schemas_json_verifier(ledger, &credential_data).await?; - let rev_reg_defs_json = build_rev_reg_defs_json(ledger, &credential_data) - .await - .unwrap_or(json!({}).to_string()); - let rev_regs_json = build_rev_reg_json(ledger, &credential_data) - .await - .unwrap_or(json!({}).to_string()); - - debug!("validate_indy_proof >> credential_defs_json: {credential_defs_json}"); - debug!("validate_indy_proof >> schemas_json: {schemas_json}"); - trace!("validate_indy_proof >> proof_json: {proof_json}"); - debug!("validate_indy_proof >> proof_req_json: {proof_req_json}"); - debug!("validate_indy_proof >> rev_reg_defs_json: {rev_reg_defs_json}"); - debug!("validate_indy_proof >> rev_regs_json: {rev_regs_json}"); - anoncreds - .verifier_verify_proof( - proof_req_json, - proof_json, - &schemas_json, - &credential_defs_json, - &rev_reg_defs_json, - &rev_regs_json, - ) - .await - .map_err(|err| err.into()) -} - -#[cfg(test)] -#[allow(clippy::unwrap_used)] -pub mod integration_tests { - use std::{sync::Arc, time::Duration}; - - use aries_vcx_core::{ - anoncreds::base_anoncreds::BaseAnonCreds, - ledger::base_ledger::{AnoncredsLedgerRead, AnoncredsLedgerWrite}, - }; - - use super::*; - use crate::{ - common::{ - primitives::{credential_definition::CredentialDef, credential_schema::Schema}, - proofs::proof_request::ProofRequestData, - test_utils::{ - create_and_write_credential, create_and_write_test_cred_def, - create_and_write_test_schema, - }, - }, - errors::error::AriesVcxErrorKind, - utils::{self, constants::DEFAULT_SCHEMA_ATTRS, devsetup::SetupProfile}, - }; - - // FUTURE - issuer and holder seperation only needed whilst modular deps not fully implemented - async fn create_indy_proof( - anoncreds_issuer: &Arc, - anoncreds_holder: &Arc, - ledger_read: &Arc, - ledger_write: &Arc, - did: &str, - ) -> (String, String, String, String) { - let (schema, cred_def, cred_id) = create_and_store_nonrevocable_credential( - anoncreds_issuer, - anoncreds_holder, - ledger_read, - ledger_write, - did, - DEFAULT_SCHEMA_ATTRS, - ) - .await; - let proof_req = json!({ - "nonce":"123432421212", - "name":"proof_req_1", - "version":"0.1", - "requested_attributes": json!({ - "address1_1": json!({ - "name":"address1", - "restrictions": [json!({ "issuer_did": did })] - }), - "zip_2": json!({ - "name":"zip", - "restrictions": [json!({ "issuer_did": did })] - }), - "self_attest_3": json!({ - "name":"self_attest", - }), - }), - "requested_predicates": json!({}), - }) - .to_string(); - let requested_credentials_json = json!({ - "self_attested_attributes":{ - "self_attest_3": "my_self_attested_val" - }, - "requested_attributes":{ - "address1_1": {"cred_id": cred_id, "revealed": true}, - "zip_2": {"cred_id": cred_id, "revealed": true} - }, - "requested_predicates":{} - }) - .to_string(); - - let schema_id = schema.schema_id.clone(); - let schema_json: serde_json::Value = serde_json::from_str(&schema.schema_json).unwrap(); - let schemas = json!({ - schema_id: schema_json, - }) - .to_string(); - - let cred_def_json: serde_json::Value = - serde_json::from_str(cred_def.get_cred_def_json()).unwrap(); - let cred_defs = json!({ - cred_def.get_cred_def_id(): cred_def_json, - }) - .to_string(); - - anoncreds_holder - .prover_get_credentials_for_proof_req(&proof_req) - .await - .unwrap(); - - let proof = anoncreds_holder - .prover_create_proof( - &proof_req, - &requested_credentials_json, - "main", - &schemas, - &cred_defs, - None, - ) - .await - .unwrap(); - (schemas, cred_defs, proof_req, proof) - } - - async fn create_proof_with_predicate( - anoncreds_issuer: &Arc, - anoncreds_holder: &Arc, - ledger_read: &Arc, - ledger_write: &Arc, - did: &str, - include_predicate_cred: bool, - ) -> (String, String, String, String) { - let (schema, cred_def, cred_id) = create_and_store_nonrevocable_credential( - anoncreds_issuer, - anoncreds_holder, - ledger_read, - ledger_write, - did, - DEFAULT_SCHEMA_ATTRS, - ) - .await; - - let proof_req = json!({ - "nonce":"123432421212", - "name":"proof_req_1", - "version":"0.1", - "requested_attributes": json!({ - "address1_1": json!({ - "name":"address1", - "restrictions": [json!({ "issuer_did": did })] - }), - "self_attest_3": json!({ - "name":"self_attest", - }), - }), - "requested_predicates": json!({ - "zip_3": {"name":"zip", "p_type":">=", "p_value":18} - }), - }) - .to_string(); - - let requested_credentials_json = if include_predicate_cred { - json!({ - "self_attested_attributes":{ - "self_attest_3": "my_self_attested_val" - }, - "requested_attributes":{ - "address1_1": {"cred_id": cred_id, "revealed": true} - }, - "requested_predicates":{ - "zip_3": {"cred_id": cred_id} - } - }) - .to_string() - } else { - json!({ - "self_attested_attributes":{ - "self_attest_3": "my_self_attested_val" - }, - "requested_attributes":{ - "address1_1": {"cred_id": cred_id, "revealed": true} - }, - "requested_predicates":{ - } - }) - .to_string() - }; - - let schema_json: serde_json::Value = serde_json::from_str(&schema.schema_json).unwrap(); - let schemas = json!({ - schema.schema_id: schema_json, - }) - .to_string(); - - let cred_def_json: serde_json::Value = - serde_json::from_str(cred_def.get_cred_def_json()).unwrap(); - let cred_defs = json!({ - cred_def.get_cred_def_id(): cred_def_json, - }) - .to_string(); - - anoncreds_holder - .prover_get_credentials_for_proof_req(&proof_req) - .await - .unwrap(); - - let proof = anoncreds_holder - .prover_create_proof( - &proof_req, - &requested_credentials_json, - "main", - &schemas, - &cred_defs, - None, - ) - .await - .unwrap(); - (schemas, cred_defs, proof_req, proof) - } - - async fn create_and_store_nonrevocable_credential( - anoncreds_issuer: &Arc, - anoncreds_holder: &Arc, - ledger_read: &Arc, - ledger_write: &Arc, - issuer_did: &str, - attr_list: &str, - ) -> (Schema, CredentialDef, String) { - let schema = - create_and_write_test_schema(anoncreds_issuer, ledger_write, issuer_did, attr_list) - .await; - - let cred_def = create_and_write_test_cred_def( - anoncreds_issuer, - ledger_read, - ledger_write, - issuer_did, - &schema.schema_id, - false, - ) - .await; - tokio::time::sleep(Duration::from_millis(500)).await; - - let cred_id = create_and_write_credential( - anoncreds_issuer, - anoncreds_holder, - issuer_did, - &cred_def, - None, - ) - .await; - (schema, cred_def, cred_id) - } - - #[tokio::test] - #[ignore] - async fn test_pool_proof_self_attested_proof_validation() { - SetupProfile::run(|setup| async move { - let requested_attrs = json!([ - json!({ - "name":"address1", - "self_attest_allowed": true, - }), - json!({ - "name":"zip", - "self_attest_allowed": true, - }), - ]) - .to_string(); - let requested_predicates = json!([]).to_string(); - let revocation_details = r#"{"support_revocation":false}"#.to_string(); - let name = "Optional".to_owned(); - - let proof_req_json = ProofRequestData::create(&setup.profile.inject_anoncreds(), &name) - .await - .unwrap() - .set_requested_attributes_as_string(requested_attrs) - .unwrap() - .set_requested_predicates_as_string(requested_predicates) - .unwrap() - .set_not_revoked_interval(revocation_details) - .unwrap(); - - let proof_req_json = serde_json::to_string(&proof_req_json).unwrap(); - - let anoncreds = Arc::clone(&setup.profile).inject_anoncreds(); - let prover_proof_json = anoncreds - .prover_create_proof( - &proof_req_json, - &json!({ - "self_attested_attributes":{ - "attribute_0": "my_self_attested_address", - "attribute_1": "my_self_attested_zip" - }, - "requested_attributes":{}, - "requested_predicates":{} - }) - .to_string(), - "main", - &json!({}).to_string(), - &json!({}).to_string(), - None, - ) - .await - .unwrap(); - - assert!(validate_indy_proof( - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds(), - &prover_proof_json, - &proof_req_json - ) - .await - .unwrap()); - }) - .await; - } - - #[tokio::test] - #[ignore] - async fn test_pool_proof_restrictions() { - SetupProfile::run(|setup| async move { - let requested_attrs = json!([ - json!({ - "name":"address1", - "restrictions": [{ "issuer_did": "Not Here" }], - }), - json!({ - "name":"zip", - }), - json!({ - "name":"self_attest", - "self_attest_allowed": true, - }), - ]) - .to_string(); - let requested_predicates = json!([]).to_string(); - let revocation_details = r#"{"support_revocation":true}"#.to_string(); - let name = "Optional".to_owned(); - - let proof_req_json = ProofRequestData::create(&setup.profile.inject_anoncreds(), &name) - .await - .unwrap() - .set_requested_attributes_as_string(requested_attrs) - .unwrap() - .set_requested_predicates_as_string(requested_predicates) - .unwrap() - .set_not_revoked_interval(revocation_details) - .unwrap(); - - let proof_req_json = serde_json::to_string(&proof_req_json).unwrap(); - - let (schema, cred_def, cred_id) = create_and_store_nonrevocable_credential( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), - &setup.institution_did, - utils::constants::DEFAULT_SCHEMA_ATTRS, - ) - .await; - let cred_def_json: serde_json::Value = - serde_json::from_str(cred_def.get_cred_def_json()).unwrap(); - let schema_json: serde_json::Value = serde_json::from_str(&schema.schema_json).unwrap(); - - let anoncreds = Arc::clone(&setup.profile).inject_anoncreds(); - let prover_proof_json = anoncreds - .prover_create_proof( - &proof_req_json, - &json!({ - "self_attested_attributes":{ - "attribute_2": "my_self_attested_val" - }, - "requested_attributes":{ - "attribute_0": {"cred_id": cred_id, "revealed": true}, - "attribute_1": {"cred_id": cred_id, "revealed": true} - }, - "requested_predicates":{} - }) - .to_string(), - "main", - &json!({ schema.schema_id: schema_json }).to_string(), - &json!({ cred_def.get_cred_def_id(): cred_def_json }).to_string(), - None, - ) - .await - .unwrap(); - assert_eq!( - validate_indy_proof( - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds(), - &prover_proof_json, - &proof_req_json - ) - .await - .unwrap_err() - .kind(), - AriesVcxErrorKind::ProofRejected - ); - - let mut proof_req_json: serde_json::Value = - serde_json::from_str(&proof_req_json).unwrap(); - proof_req_json["requested_attributes"]["attribute_0"]["restrictions"] = json!({}); - assert!(validate_indy_proof( - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds(), - &prover_proof_json, - &proof_req_json.to_string() - ) - .await - .unwrap()); - }) - .await; - } - - #[tokio::test] - #[ignore] - async fn test_pool_proof_validate_attribute() { - SetupProfile::run(|setup| async move { - let requested_attrs = json!([ - json!({ - "name":"address1", - "restrictions": [json!({ "issuer_did": setup.institution_did })] - }), - json!({ - "name":"zip", - "restrictions": [json!({ "issuer_did": setup.institution_did })] - }), - json!({ - "name":"self_attest", - "self_attest_allowed": true, - }), - ]) - .to_string(); - let requested_predicates = json!([]).to_string(); - let revocation_details = r#"{"support_revocation":true}"#.to_string(); - let name = "Optional".to_owned(); - - let proof_req_json = ProofRequestData::create(&setup.profile.inject_anoncreds(), &name) - .await - .unwrap() - .set_requested_attributes_as_string(requested_attrs) - .unwrap() - .set_requested_predicates_as_string(requested_predicates) - .unwrap() - .set_not_revoked_interval(revocation_details) - .unwrap(); - - let proof_req_json = serde_json::to_string(&proof_req_json).unwrap(); - - let (schema, cred_def, cred_id) = create_and_store_nonrevocable_credential( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), - &setup.institution_did, - utils::constants::DEFAULT_SCHEMA_ATTRS, - ) - .await; - let cred_def_json: serde_json::Value = - serde_json::from_str(cred_def.get_cred_def_json()).unwrap(); - let schema_json: serde_json::Value = serde_json::from_str(&schema.schema_json).unwrap(); - - let anoncreds = Arc::clone(&setup.profile).inject_anoncreds(); - let prover_proof_json = anoncreds - .prover_create_proof( - &proof_req_json, - &json!({ - "self_attested_attributes":{ - "attribute_2": "my_self_attested_val" - }, - "requested_attributes":{ - "attribute_0": {"cred_id": cred_id, "revealed": true}, - "attribute_1": {"cred_id": cred_id, "revealed": true} - }, - "requested_predicates":{} - }) - .to_string(), - "main", - &json!({ schema.schema_id: schema_json }).to_string(), - &json!({ cred_def.get_cred_def_id(): cred_def_json }).to_string(), - None, - ) - .await - .unwrap(); - assert!(validate_indy_proof( - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds(), - &prover_proof_json, - &proof_req_json - ) - .await - .unwrap()); - - let mut proof_obj: serde_json::Value = - serde_json::from_str(&prover_proof_json).unwrap(); - { - proof_obj["requested_proof"]["revealed_attrs"]["address1_1"]["raw"] = - json!("Other Value"); - let prover_proof_json = serde_json::to_string(&proof_obj).unwrap(); - - assert_eq!( - validate_indy_proof( - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds(), - &prover_proof_json, - &proof_req_json - ) - .await - .unwrap_err() - .kind(), - AriesVcxErrorKind::InvalidProof - ); - } - { - proof_obj["requested_proof"]["revealed_attrs"]["address1_1"]["encoded"] = - json!("1111111111111111111111111111111111111111111111111111111111"); - let prover_proof_json = serde_json::to_string(&proof_obj).unwrap(); - - assert_eq!( - validate_indy_proof( - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds(), - &prover_proof_json, - &proof_req_json - ) - .await - .unwrap_err() - .kind(), - AriesVcxErrorKind::InvalidProof - ); - } - }) - .await; - } - #[tokio::test] - #[ignore] - async fn test_pool_prover_verify_proof() { - SetupProfile::run(|setup| async move { - let (schemas, cred_defs, proof_req, proof) = create_indy_proof( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), - &setup.institution_did, - ) - .await; - - let anoncreds = Arc::clone(&setup.profile).inject_anoncreds(); - let proof_validation = anoncreds - .verifier_verify_proof(&proof_req, &proof, &schemas, &cred_defs, "{}", "{}") - .await - .unwrap(); - - assert!(proof_validation); - }) - .await; - } - - #[tokio::test] - #[ignore] - async fn test_pool_prover_verify_proof_with_predicate_success_case() { - SetupProfile::run(|setup| async move { - let (schemas, cred_defs, proof_req, proof) = create_proof_with_predicate( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), - &setup.institution_did, - true, - ) - .await; - - let anoncreds = Arc::clone(&setup.profile).inject_anoncreds(); - let proof_validation = anoncreds - .verifier_verify_proof(&proof_req, &proof, &schemas, &cred_defs, "{}", "{}") - .await - .unwrap(); - - assert!(proof_validation); - }) - .await; - } - - #[tokio::test] - #[ignore] - async fn test_pool_prover_verify_proof_with_predicate_fail_case() { - SetupProfile::run(|setup| async move { - let (schemas, cred_defs, proof_req, proof) = create_proof_with_predicate( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), - &setup.institution_did, - false, - ) - .await; - - let anoncreds = Arc::clone(&setup.profile).inject_anoncreds(); - anoncreds - .verifier_verify_proof(&proof_req, &proof, &schemas, &cred_defs, "{}", "{}") - .await - .unwrap_err(); - }) - .await; - } -} diff --git a/aries_vcx/src/common/proofs/verifier/verifier_internal.rs b/aries_vcx/src/common/proofs/verifier/verifier_internal.rs index 3c4a8d8df0..022d0be206 100644 --- a/aries_vcx/src/common/proofs/verifier/verifier_internal.rs +++ b/aries_vcx/src/common/proofs/verifier/verifier_internal.rs @@ -1,11 +1,9 @@ -use std::sync::Arc; - use aries_vcx_core::{ errors::error::AriesVcxCoreErrorKind, ledger::base_ledger::AnoncredsLedgerRead, }; use serde_json::{self, Value}; -use crate::{errors::error::prelude::*, global::settings, utils::openssl::encode}; +use crate::{errors::error::prelude::*, utils::openssl::encode}; #[derive(Debug, Deserialize, Serialize, PartialEq, Eq)] pub struct CredInfoVerifier { @@ -53,10 +51,6 @@ pub fn get_credential_info(proof: &str) -> VcxResult> { } pub fn validate_proof_revealed_attributes(proof_json: &str) -> VcxResult<()> { - if settings::indy_mocks_enabled() { - return Ok(()); - } - let proof: Value = serde_json::from_str(proof_json).map_err(|err| { AriesVcxError::from_msg( AriesVcxErrorKind::InvalidJson, @@ -99,7 +93,7 @@ pub fn validate_proof_revealed_attributes(proof_json: &str) -> VcxResult<()> { } pub async fn build_cred_defs_json_verifier( - ledger: &Arc, + ledger: &impl AnoncredsLedgerRead, credential_data: &[CredInfoVerifier], ) -> VcxResult { trace!("build_cred_defs_json_verifier >>"); @@ -125,7 +119,7 @@ pub async fn build_cred_defs_json_verifier( } pub async fn build_schemas_json_verifier( - ledger: &Arc, + ledger: &impl AnoncredsLedgerRead, credential_data: &[CredInfoVerifier], ) -> VcxResult { trace!("build_schemas_json_verifier >>"); @@ -152,7 +146,7 @@ pub async fn build_schemas_json_verifier( } pub async fn build_rev_reg_defs_json( - ledger: &Arc, + ledger: &impl AnoncredsLedgerRead, credential_data: &[CredInfoVerifier], ) -> VcxResult { trace!("build_rev_reg_defs_json >>"); @@ -185,7 +179,7 @@ pub async fn build_rev_reg_defs_json( } pub async fn build_rev_reg_json( - ledger: &Arc, + ledger: &impl AnoncredsLedgerRead, credential_data: &[CredInfoVerifier], ) -> VcxResult { trace!("build_rev_reg_json >>"); @@ -248,7 +242,7 @@ pub mod unit_tests { timestamp: None, }; let credentials = vec![cred1, cred2]; - let ledger_read: Arc = Arc::new(MockLedger {}); + let ledger_read = MockLedger; let credential_json = build_cred_defs_json_verifier(&ledger_read, &credentials) .await .unwrap(); @@ -274,7 +268,7 @@ pub mod unit_tests { rev_reg_id: None, timestamp: None, }; - let ledger_read: Arc = Arc::new(MockLedger {}); + let ledger_read = MockLedger; let credentials = vec![cred1, cred2]; let schema_json = build_schemas_json_verifier(&ledger_read, &credentials) .await @@ -301,7 +295,7 @@ pub mod unit_tests { rev_reg_id: Some(REV_REG_ID.to_string()), timestamp: None, }; - let ledger_read: Arc = Arc::new(MockLedger {}); + let ledger_read = MockLedger; let credentials = vec![cred1, cred2]; let rev_reg_defs_json = build_rev_reg_defs_json(&ledger_read, &credentials) .await @@ -328,7 +322,7 @@ pub mod unit_tests { rev_reg_id: Some("id2".to_string()), timestamp: Some(2), }; - let ledger_read: Arc = Arc::new(MockLedger {}); + let ledger_read = MockLedger; let credentials = vec![cred1, cred2]; let rev_reg_json = build_rev_reg_json(&ledger_read, &credentials) .await diff --git a/aries_vcx/src/common/signing.rs b/aries_vcx/src/common/signing.rs index 225353ef88..bd0e803dcf 100644 --- a/aries_vcx/src/common/signing.rs +++ b/aries_vcx/src/common/signing.rs @@ -1,17 +1,15 @@ -use std::sync::Arc; - use aries_vcx_core::wallet::base_wallet::BaseWallet; -use base64; +use base64::{self, engine::general_purpose, Engine}; use messages::msg_fields::protocols::connection::{ response::{ConnectionSignature, ResponseContent}, ConnectionData, }; use time; -use crate::{errors::error::prelude::*, global::settings}; +use crate::errors::error::prelude::*; async fn get_signature_data( - wallet: &Arc, + wallet: &impl BaseWallet, data: String, key: &str, ) -> VcxResult<(Vec, Vec)> { @@ -25,15 +23,15 @@ async fn get_signature_data( } pub async fn sign_connection_response( - wallet: &Arc, + wallet: &impl BaseWallet, key: &str, con_data: &ConnectionData, ) -> VcxResult { let con_data = json!(con_data).to_string(); let (signature, sig_data) = get_signature_data(wallet, con_data, key).await?; - let sig_data = base64::encode_config(&sig_data, base64::URL_SAFE); - let signature = base64::encode_config(&signature, base64::URL_SAFE); + let sig_data = general_purpose::URL_SAFE.encode(sig_data); + let signature = general_purpose::URL_SAFE.encode(signature); let connection_sig = ConnectionSignature::new(signature, sig_data, key.to_string()); @@ -41,31 +39,27 @@ pub async fn sign_connection_response( } pub async fn decode_signed_connection_response( - wallet: &Arc, + wallet: &impl BaseWallet, response: ResponseContent, their_vk: &str, ) -> VcxResult { - let signature = base64::decode_config( - &response.connection_sig.signature.as_bytes(), - base64::URL_SAFE, - ) - .map_err(|err| { - AriesVcxError::from_msg( - AriesVcxErrorKind::InvalidJson, - format!("Cannot decode ConnectionResponse: {:?}", err), - ) - })?; - - let sig_data = base64::decode_config( - &response.connection_sig.sig_data.as_bytes(), - base64::URL_SAFE, - ) - .map_err(|err| { - AriesVcxError::from_msg( - AriesVcxErrorKind::InvalidJson, - format!("Cannot decode ConnectionResponse: {:?}", err), - ) - })?; + let signature = general_purpose::URL_SAFE + .decode(response.connection_sig.signature.as_bytes()) + .map_err(|err| { + AriesVcxError::from_msg( + AriesVcxErrorKind::InvalidJson, + format!("Cannot decode ConnectionResponse: {:?}", err), + ) + })?; + + let sig_data = general_purpose::URL_SAFE + .decode(response.connection_sig.sig_data.as_bytes()) + .map_err(|err| { + AriesVcxError::from_msg( + AriesVcxErrorKind::InvalidJson, + format!("Cannot decode ConnectionResponse: {:?}", err), + ) + })?; if !wallet.verify(their_vk, &sig_data, &signature).await? { return Err(AriesVcxError::from_msg( @@ -90,28 +84,6 @@ pub async fn decode_signed_connection_response( Ok(connection) } -pub async fn unpack_message_to_string( - wallet: &Arc, - msg: &[u8], -) -> VcxResult { - if settings::indy_mocks_enabled() { - return Ok(String::new()); - } - - String::from_utf8(wallet.unpack_message(msg).await.map_err(|_| { - AriesVcxError::from_msg( - AriesVcxErrorKind::InvalidMessagePack, - "Failed to unpack message", - ) - })?) - .map_err(|_| { - AriesVcxError::from_msg( - AriesVcxErrorKind::InvalidMessageFormat, - "Failed to convert message to utf8 string", - ) - }) -} - // #[cfg(test)] // pub mod unit_tests { // use crate::common::test_utils::{create_trustee_key, indy_handles_to_profile}; diff --git a/aries_vcx/src/common/test_utils.rs b/aries_vcx/src/common/test_utils.rs index 1831011185..f3e5f1a5fa 100644 --- a/aries_vcx/src/common/test_utils.rs +++ b/aries_vcx/src/common/test_utils.rs @@ -1,6 +1,6 @@ #![allow(clippy::unwrap_used)] -use std::{sync::Arc, time::Duration}; +use std::time::Duration; use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, @@ -27,33 +27,30 @@ use crate::{ }; pub async fn create_and_write_test_schema( - anoncreds: &Arc, - ledger_write: &Arc, + anoncreds: &impl BaseAnonCreds, + ledger_write: &impl AnoncredsLedgerWrite, submitter_did: &str, attr_list: &str, ) -> Schema { - let (schema_id, schema_json) = anoncreds - .issuer_create_schema( - submitter_did, - &generate_random_schema_name(), - &generate_random_schema_version(), - attr_list, - ) - .await - .unwrap(); - - ledger_write - .publish_schema(&schema_json, submitter_did, None) - .await - .unwrap(); - tokio::time::sleep(Duration::from_millis(1000)).await; - Schema::create_from_ledger_json(&schema_json, "", &schema_id).unwrap() + let schema = Schema::create( + anoncreds, + "source_id", + submitter_did, + &generate_random_schema_name(), + &generate_random_schema_version(), + &serde_json::from_str::>(attr_list).unwrap(), + ) + .await + .unwrap(); + let schema = schema.publish(ledger_write).await.unwrap(); + std::thread::sleep(Duration::from_millis(500)); + schema } pub async fn create_and_write_test_cred_def( - anoncreds: &Arc, - ledger_read: &Arc, - ledger_write: &Arc, + anoncreds: &impl BaseAnonCreds, + ledger_read: &impl AnoncredsLedgerRead, + ledger_write: &impl AnoncredsLedgerWrite, issuer_did: &str, schema_id: &str, revokable: bool, @@ -77,9 +74,9 @@ pub async fn create_and_write_test_cred_def( .unwrap() } -pub async fn create_and_write_test_rev_reg( - anoncreds: &Arc, - ledger_write: &Arc, +pub async fn create_and_publish_test_rev_reg( + anoncreds: &impl BaseAnonCreds, + ledger_write: &impl AnoncredsLedgerWrite, issuer_did: &str, cred_def_id: &str, ) -> RevocationRegistry { @@ -96,8 +93,8 @@ pub async fn create_and_write_test_rev_reg( } pub async fn create_and_write_credential( - anoncreds_issuer: &Arc, - anoncreds_holder: &Arc, + anoncreds_issuer: &impl BaseAnonCreds, + anoncreds_holder: &impl BaseAnonCreds, institution_did: &str, cred_def: &CredentialDef, rev_reg: Option<&RevocationRegistry>, diff --git a/aries_vcx/src/core/profile/ledger.rs b/aries_vcx/src/core/profile/ledger.rs index 62e43427ac..d952272073 100644 --- a/aries_vcx/src/core/profile/ledger.rs +++ b/aries_vcx/src/core/profile/ledger.rs @@ -16,8 +16,9 @@ use aries_vcx_core::{ }; use crate::errors::error::VcxResult; -type ArcIndyVdrLedgerRead = Arc>; -type ArcIndyVdrLedgerWrite = Arc>; +/// TODO: Rename these +pub type ArcIndyVdrLedgerRead = IndyVdrLedgerRead; +pub type ArcIndyVdrLedgerWrite = IndyVdrLedgerWrite; pub struct VcxPoolConfig { pub genesis_file_path: String, @@ -41,19 +42,14 @@ pub fn build_ledger_components( Some(cfg) => cfg, }; - let ledger_pool = Arc::new(IndyVdrLedgerPool::new( - pool_config.genesis_file_path, - indy_vdr_config, - vec![], - )?); + let ledger_pool = + IndyVdrLedgerPool::new(pool_config.genesis_file_path, indy_vdr_config, vec![])?; + let request_submitter = Arc::new(IndyVdrSubmitter::new(ledger_pool)); let ledger_read = indyvdr_build_ledger_read(request_submitter.clone(), cache_config)?; let ledger_write = indyvdr_build_ledger_write(wallet, request_submitter, None); - let ledger_read = Arc::new(ledger_read); - let ledger_write = Arc::new(ledger_write); - Ok((ledger_read, ledger_write)) } @@ -65,7 +61,7 @@ pub fn indyvdr_build_ledger_read( let response_cacher = Arc::new(InMemoryResponseCacher::new(cache_config)); let config_read = IndyVdrLedgerReadConfig { - request_submitter: request_submitter.clone(), + request_submitter, response_parser, response_cacher, protocol_version: ProtocolVersion::node_1_4(), diff --git a/aries_vcx/src/core/profile/mod.rs b/aries_vcx/src/core/profile/mod.rs index 42d95aa51e..563dcff7f4 100644 --- a/aries_vcx/src/core/profile/mod.rs +++ b/aries_vcx/src/core/profile/mod.rs @@ -1,23 +1,28 @@ pub mod ledger; -#[cfg(feature = "modular_libs")] +#[cfg(all(feature = "credx", feature = "vdrtools_wallet"))] pub mod modular_libs_profile; -pub mod profile; #[cfg(feature = "vdr_proxy_ledger")] pub mod vdr_proxy_profile; -#[cfg(feature = "vdrtools")] -pub mod vdrtools_profile; - -const DEFAULT_AML_LABEL: &str = "eula"; use std::sync::Arc; -use aries_vcx_core::ledger::{ - base_ledger::{IndyLedgerRead, TxnAuthrAgrmtOptions}, - indy_vdr_ledger::GetTxnAuthorAgreementData, +use aries_vcx_core::{ + anoncreds::base_anoncreds::BaseAnonCreds, + ledger::{ + base_ledger::{ + AnoncredsLedgerRead, AnoncredsLedgerWrite, IndyLedgerRead, IndyLedgerWrite, + TxnAuthrAgrmtOptions, + }, + indy_vdr_ledger::GetTxnAuthorAgreementData, + }, + wallet::base_wallet::BaseWallet, }; +use async_trait::async_trait; use crate::errors::error::VcxResult; +const DEFAULT_AML_LABEL: &str = "eula"; + pub async fn prepare_taa_options( ledger_read: Arc, ) -> VcxResult> { @@ -32,3 +37,21 @@ pub async fn prepare_taa_options( Ok(None) } } + +#[async_trait] +pub trait Profile: std::fmt::Debug + Send + Sync { + type LedgerRead: IndyLedgerRead + AnoncredsLedgerRead; + type LedgerWrite: IndyLedgerWrite + AnoncredsLedgerWrite; + type Anoncreds: BaseAnonCreds; + type Wallet: BaseWallet; + + fn ledger_read(&self) -> &Self::LedgerRead; + + fn ledger_write(&self) -> &Self::LedgerWrite; + + fn anoncreds(&self) -> &Self::Anoncreds; + + fn wallet(&self) -> &Self::Wallet; + + fn update_taa_configuration(&self, taa_options: TxnAuthrAgrmtOptions) -> VcxResult<()>; +} diff --git a/aries_vcx/src/core/profile/modular_libs_profile.rs b/aries_vcx/src/core/profile/modular_libs_profile.rs index c254aa3938..c7f0f310b4 100644 --- a/aries_vcx/src/core/profile/modular_libs_profile.rs +++ b/aries_vcx/src/core/profile/modular_libs_profile.rs @@ -1,16 +1,16 @@ use std::sync::Arc; use aries_vcx_core::{ - anoncreds::{base_anoncreds::BaseAnonCreds, credx_anoncreds::IndyCredxAnonCreds}, - ledger::base_ledger::{ - AnoncredsLedgerRead, AnoncredsLedgerWrite, IndyLedgerRead, IndyLedgerWrite, - TaaConfigurator, TxnAuthrAgrmtOptions, - }, - wallet::base_wallet::BaseWallet, + anoncreds::credx_anoncreds::IndyCredxAnonCreds, + ledger::base_ledger::{TaaConfigurator, TxnAuthrAgrmtOptions}, + wallet::indy::IndySdkWallet, }; use async_trait::async_trait; -use super::profile::Profile; +use super::{ + ledger::{ArcIndyVdrLedgerRead, ArcIndyVdrLedgerWrite}, + Profile, +}; use crate::{ core::profile::ledger::{build_ledger_components, VcxPoolConfig}, errors::error::VcxResult, @@ -19,64 +19,51 @@ use crate::{ #[allow(dead_code)] #[derive(Debug)] pub struct ModularLibsProfile { - wallet: Arc, - anoncreds: Arc, - - // ledger reads - anoncreds_ledger_read: Arc, - indy_ledger_read: Arc, - - // ledger writes - anoncreds_ledger_write: Arc, - indy_ledger_write: Arc, - taa_configurator: Arc, + wallet: Arc, + anoncreds: IndyCredxAnonCreds, + indy_ledger_read: ArcIndyVdrLedgerRead, + indy_ledger_write: ArcIndyVdrLedgerWrite, } impl ModularLibsProfile { - pub fn init(wallet: Arc, vcx_pool_config: VcxPoolConfig) -> VcxResult { - let anoncreds = Arc::new(IndyCredxAnonCreds::new(Arc::clone(&wallet))); + pub fn init(wallet: Arc, vcx_pool_config: VcxPoolConfig) -> VcxResult { + let anoncreds = IndyCredxAnonCreds::new(wallet.clone()); let (ledger_read, ledger_write) = build_ledger_components(wallet.clone(), vcx_pool_config)?; Ok(ModularLibsProfile { wallet, anoncreds, - anoncreds_ledger_read: ledger_read.clone(), indy_ledger_read: ledger_read, - anoncreds_ledger_write: ledger_write.clone(), - indy_ledger_write: ledger_write.clone(), - taa_configurator: ledger_write, + indy_ledger_write: ledger_write, }) } } #[async_trait] impl Profile for ModularLibsProfile { - fn inject_indy_ledger_read(&self) -> Arc { - Arc::clone(&self.indy_ledger_read) - } - - fn inject_indy_ledger_write(&self) -> Arc { - Arc::clone(&self.indy_ledger_write) - } + type LedgerRead = ArcIndyVdrLedgerRead; + type LedgerWrite = ArcIndyVdrLedgerWrite; + type Anoncreds = IndyCredxAnonCreds; + type Wallet = IndySdkWallet; - fn inject_anoncreds(&self) -> Arc { - Arc::clone(&self.anoncreds) + fn ledger_read(&self) -> &Self::LedgerRead { + &self.indy_ledger_read } - fn inject_anoncreds_ledger_read(&self) -> Arc { - Arc::clone(&self.anoncreds_ledger_read) + fn ledger_write(&self) -> &Self::LedgerWrite { + &self.indy_ledger_write } - fn inject_anoncreds_ledger_write(&self) -> Arc { - Arc::clone(&self.anoncreds_ledger_write) + fn anoncreds(&self) -> &Self::Anoncreds { + &self.anoncreds } - fn inject_wallet(&self) -> Arc { - Arc::clone(&self.wallet) + fn wallet(&self) -> &Self::Wallet { + &self.wallet } fn update_taa_configuration(&self, taa_options: TxnAuthrAgrmtOptions) -> VcxResult<()> { - self.taa_configurator + self.indy_ledger_write .set_txn_author_agreement_options(taa_options) .map_err(|e| e.into()) } diff --git a/aries_vcx/src/core/profile/profile.rs b/aries_vcx/src/core/profile/profile.rs deleted file mode 100644 index b24eabcb12..0000000000 --- a/aries_vcx/src/core/profile/profile.rs +++ /dev/null @@ -1,37 +0,0 @@ -use std::sync::Arc; - -#[cfg(feature = "migration")] -use aries_vcx_core::WalletHandle; -use aries_vcx_core::{ - anoncreds::base_anoncreds::BaseAnonCreds, - ledger::base_ledger::{ - AnoncredsLedgerRead, AnoncredsLedgerWrite, IndyLedgerRead, IndyLedgerWrite, - TxnAuthrAgrmtOptions, - }, - wallet::base_wallet::BaseWallet, -}; -use async_trait::async_trait; - -use crate::errors::error::VcxResult; - -#[async_trait] -pub trait Profile: std::fmt::Debug + Send + Sync { - fn inject_indy_ledger_read(&self) -> Arc; - - fn inject_indy_ledger_write(&self) -> Arc; - - fn inject_anoncreds(&self) -> Arc; - - fn inject_anoncreds_ledger_read(&self) -> Arc; - - fn inject_anoncreds_ledger_write(&self) -> Arc; - - fn inject_wallet(&self) -> Arc; - - #[cfg(feature = "migration")] - fn wallet_handle(&self) -> Option { - None - } - - fn update_taa_configuration(&self, taa_options: TxnAuthrAgrmtOptions) -> VcxResult<()>; -} diff --git a/aries_vcx/src/core/profile/vdr_proxy_profile.rs b/aries_vcx/src/core/profile/vdr_proxy_profile.rs index 81d68e4721..3d3e9381b2 100644 --- a/aries_vcx/src/core/profile/vdr_proxy_profile.rs +++ b/aries_vcx/src/core/profile/vdr_proxy_profile.rs @@ -1,12 +1,9 @@ use std::{sync::Arc, time::Duration}; use aries_vcx_core::{ - anoncreds::{base_anoncreds::BaseAnonCreds, indy_anoncreds::IndySdkAnonCreds}, + anoncreds::credx_anoncreds::IndyCredxAnonCreds, ledger::{ - base_ledger::{ - AnoncredsLedgerRead, AnoncredsLedgerWrite, IndyLedgerRead, IndyLedgerWrite, - TaaConfigurator, TxnAuthrAgrmtOptions, - }, + base_ledger::{TaaConfigurator, TxnAuthrAgrmtOptions}, indy_vdr_ledger::{ IndyVdrLedgerRead, IndyVdrLedgerReadConfig, IndyVdrLedgerWrite, IndyVdrLedgerWriteConfig, ProtocolVersion, @@ -15,32 +12,25 @@ use aries_vcx_core::{ request_submitter::vdr_proxy::VdrProxySubmitter, response_cacher::in_memory::{InMemoryResponseCacher, InMemoryResponseCacherConfig}, }, - wallet::{base_wallet::BaseWallet, indy::IndySdkWallet}, + wallet::indy::IndySdkWallet, ResponseParser, VdrProxyClient, }; use async_trait::async_trait; -use super::{prepare_taa_options, profile::Profile}; +use super::{prepare_taa_options, Profile}; use crate::errors::error::VcxResult; #[derive(Debug)] pub struct VdrProxyProfile { - wallet: Arc, - anoncreds: Arc, - - // ledger reads - anoncreds_ledger_read: Arc, - indy_ledger_read: Arc, - - // ledger writes - anoncreds_ledger_write: Arc, - indy_ledger_write: Arc, - taa_configurator: Arc, + wallet: Arc, + anoncreds: IndyCredxAnonCreds, + indy_ledger_read: Arc>, + indy_ledger_write: IndyVdrLedgerWrite, } impl VdrProxyProfile { pub async fn init(wallet: Arc, client: VdrProxyClient) -> VcxResult { - let anoncreds = Arc::new(IndySdkAnonCreds::new(wallet.wallet_handle)); + let anoncreds = IndyCredxAnonCreds::new(wallet.clone()); let request_signer = Arc::new(BaseWalletRequestSigner::new(wallet.clone())); let request_submitter = Arc::new(VdrProxySubmitter::new(Arc::new(client))); let response_parser = Arc::new(ResponseParser); @@ -64,48 +54,42 @@ impl VdrProxyProfile { taa_options: prepare_taa_options(ledger_read.clone()).await?, protocol_version: ProtocolVersion::node_1_4(), }; - let ledger_write = Arc::new(IndyVdrLedgerWrite::new(config_write)); + let ledger_write = IndyVdrLedgerWrite::new(config_write); Ok(VdrProxyProfile { wallet, anoncreds, - anoncreds_ledger_read: ledger_read.clone(), - anoncreds_ledger_write: ledger_write.clone(), indy_ledger_read: ledger_read, - indy_ledger_write: ledger_write.clone(), - taa_configurator: ledger_write, + indy_ledger_write: ledger_write, }) } } #[async_trait] impl Profile for VdrProxyProfile { - fn inject_indy_ledger_read(&self) -> Arc { - Arc::clone(&self.indy_ledger_read) - } - - fn inject_indy_ledger_write(&self) -> Arc { - Arc::clone(&self.indy_ledger_write) - } + type LedgerRead = IndyVdrLedgerRead; + type LedgerWrite = IndyVdrLedgerWrite; + type Anoncreds = IndyCredxAnonCreds; + type Wallet = IndySdkWallet; - fn inject_anoncreds(&self) -> Arc { - Arc::clone(&self.anoncreds) + fn ledger_read(&self) -> &Self::LedgerRead { + &self.indy_ledger_read } - fn inject_anoncreds_ledger_read(&self) -> Arc { - Arc::clone(&self.anoncreds_ledger_read) + fn ledger_write(&self) -> &Self::LedgerWrite { + &self.indy_ledger_write } - fn inject_anoncreds_ledger_write(&self) -> Arc { - Arc::clone(&self.anoncreds_ledger_write) + fn anoncreds(&self) -> &Self::Anoncreds { + &self.anoncreds } - fn inject_wallet(&self) -> Arc { - Arc::clone(&self.wallet) + fn wallet(&self) -> &Self::Wallet { + &self.wallet } fn update_taa_configuration(&self, taa_options: TxnAuthrAgrmtOptions) -> VcxResult<()> { - self.taa_configurator + self.ledger_write() .set_txn_author_agreement_options(taa_options) .map_err(|e| e.into()) } diff --git a/aries_vcx/src/core/profile/vdrtools_profile.rs b/aries_vcx/src/core/profile/vdrtools_profile.rs deleted file mode 100644 index 58db443883..0000000000 --- a/aries_vcx/src/core/profile/vdrtools_profile.rs +++ /dev/null @@ -1,83 +0,0 @@ -use std::sync::Arc; - -use aries_vcx_core::{ - anoncreds::{base_anoncreds::BaseAnonCreds, indy_anoncreds::IndySdkAnonCreds}, - ledger::base_ledger::{ - AnoncredsLedgerRead, AnoncredsLedgerWrite, IndyLedgerRead, IndyLedgerWrite, - TxnAuthrAgrmtOptions, - }, - wallet::{base_wallet::BaseWallet, indy::IndySdkWallet}, -}; -use async_trait::async_trait; - -use super::profile::Profile; -use crate::errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}; - -#[derive(Debug)] -pub struct VdrtoolsProfile { - wallet: Arc, - anoncreds: Arc, - anoncreds_ledger_read: Arc, - anoncreds_ledger_write: Arc, - indy_ledger_read: Arc, - indy_ledger_write: Arc, -} - -impl VdrtoolsProfile { - pub fn init( - wallet: Arc, - anoncreds_ledger_read: Arc, - anoncreds_ledger_write: Arc, - indy_ledger_read: Arc, - indy_ledger_write: Arc, - ) -> Self { - let anoncreds = Arc::new(IndySdkAnonCreds::new(wallet.wallet_handle)); - VdrtoolsProfile { - wallet, - anoncreds, - anoncreds_ledger_read, - anoncreds_ledger_write, - indy_ledger_read, - indy_ledger_write, - } - } -} - -#[async_trait] -impl Profile for VdrtoolsProfile { - fn inject_indy_ledger_read(&self) -> Arc { - Arc::clone(&self.indy_ledger_read) - } - - fn inject_indy_ledger_write(&self) -> Arc { - Arc::clone(&self.indy_ledger_write) - } - - fn inject_anoncreds(&self) -> Arc { - Arc::clone(&self.anoncreds) - } - - fn inject_anoncreds_ledger_read(&self) -> Arc { - Arc::clone(&self.anoncreds_ledger_read) - } - - fn inject_anoncreds_ledger_write(&self) -> Arc { - Arc::clone(&self.anoncreds_ledger_write) - } - - fn inject_wallet(&self) -> Arc { - self.wallet.clone() - } - - #[cfg(feature = "migration")] - fn wallet_handle(&self) -> Option { - Some(self.wallet.wallet_handle) - } - - fn update_taa_configuration(&self, _taa_options: TxnAuthrAgrmtOptions) -> VcxResult<()> { - Err(AriesVcxError::from_msg( - AriesVcxErrorKind::ActionNotSupported, - "update_taa_configuration no implemented for VdrtoolsProfile", - )) - } -} diff --git a/aries_vcx/src/global/settings.rs b/aries_vcx/src/global/settings.rs index 8f55a3c17b..1149a6e1df 100644 --- a/aries_vcx/src/global/settings.rs +++ b/aries_vcx/src/global/settings.rs @@ -1,171 +1,9 @@ -use std::{collections::HashMap, sync::RwLock}; - -use aries_vcx_core::global::settings::{disable_indy_mocks, enable_indy_mocks}; - -use crate::errors::error::prelude::*; - -pub static CONFIG_SDK_TO_REMOTE_ROLE: &str = "sdk_to_remote_role"; -pub static CONFIG_INSTITUTION_DID: &str = "institution_did"; -pub static CONFIG_INSTITUTION_VERKEY: &str = "institution_verkey"; - -// functionally not used -pub static CONFIG_WEBHOOK_URL: &str = "webhook_url"; -pub static CONFIG_ENABLE_TEST_MODE: &str = "enable_test_mode"; -pub static CONFIG_GENESIS_PATH: &str = "genesis_path"; -pub static CONFIG_LOG_CONFIG: &str = "log_config"; -pub static CONFIG_EXPORTED_WALLET_PATH: &str = "exported_wallet_path"; -pub static CONFIG_WALLET_KEY: &str = "wallet_key"; -pub static CONFIG_WALLET_NAME: &str = "wallet_name"; -pub static CONFIG_WALLET_TYPE: &str = "wallet_type"; -pub static CONFIG_WALLET_KEY_DERIVATION: &str = "wallet_key_derivation"; -pub static CONFIG_PROTOCOL_VERSION: &str = "protocol_version"; -pub static CONFIG_TXN_AUTHOR_AGREEMENT: &str = "author_agreement"; -pub static CONFIG_POOL_CONFIG: &str = "pool_config"; -pub static CONFIG_DID_METHOD: &str = "did_method"; -pub static DEFAULT_PROTOCOL_VERSION: usize = 2; -pub static MAX_SUPPORTED_PROTOCOL_VERSION: usize = 2; -pub static UNINITIALIZED_WALLET_KEY: &str = ""; pub static DEFAULT_GENESIS_PATH: &str = "genesis.txn"; -pub static DEFAULT_WALLET_NAME: &str = "LIBVCX_SDK_WALLET"; -pub static DEFAULT_POOL_NAME: &str = "pool1"; pub static DEFAULT_LINK_SECRET_ALIAS: &str = "main"; pub static DEFAULT_DID: &str = "2hoqvcwupRTUNkXn6ArYzs"; -pub static DEFAULT_ROLE: &str = "0"; pub static DEFAULT_WALLET_BACKUP_KEY: &str = "backup_wallet_key"; pub static DEFAULT_WALLET_KEY: &str = "8dvfYSt5d1taSd6yJdpjq4emkwsPDDLYxkNFysFD2cZY"; -pub static MASK_VALUE: &str = "********"; pub static WALLET_KDF_RAW: &str = "RAW"; pub static WALLET_KDF_ARGON2I_INT: &str = "ARGON2I_INT"; pub static WALLET_KDF_ARGON2I_MOD: &str = "ARGON2I_MOD"; pub static WALLET_KDF_DEFAULT: &str = WALLET_KDF_ARGON2I_MOD; - -lazy_static! { - static ref SETTINGS: RwLock> = RwLock::new(HashMap::new()); -} - -pub fn aries_vcx_enable_indy_mocks() -> VcxResult<()> { - debug!("enable_indy_mocks >>>"); - enable_indy_mocks()?; - set_config_value(CONFIG_ENABLE_TEST_MODE, "true") -} - -pub fn aries_vcx_disable_indy_mocks() -> VcxResult<()> { - debug!("disable_indy_mocks >>>"); - disable_indy_mocks()?; - set_config_value(CONFIG_ENABLE_TEST_MODE, "false") -} - -pub fn indy_mocks_enabled() -> bool { - let config = SETTINGS.read().expect("Unable to access SETTINGS"); - - match config.get(CONFIG_ENABLE_TEST_MODE) { - None => false, - Some(value) => { - debug!("indy_mocks_enabled >>> {}", value); - value == "true" || value == "indy" - } - } -} - -pub fn get_config_value(key: &str) -> VcxResult { - trace!("get_config_value >>> key: {}", key); - - SETTINGS - .read() - .or(Err(AriesVcxError::from_msg( - AriesVcxErrorKind::InvalidConfiguration, - "Cannot read settings", - )))? - .get(key) - .map(|v| v.to_string()) - .ok_or(AriesVcxError::from_msg( - AriesVcxErrorKind::InvalidConfiguration, - format!("Cannot read \"{}\" from settings", key), - )) -} - -pub fn set_config_value(key: &str, value: &str) -> VcxResult<()> { - trace!("set_config_value >>> key: {}, value: {}", key, value); - SETTINGS - .write() - .or(Err(AriesVcxError::from_msg( - AriesVcxErrorKind::UnknownError, - "Cannot write settings", - )))? - .insert(key.to_string(), value.to_string()); - Ok(()) -} - -pub fn reset_config_values_ariesvcx() -> VcxResult<()> { - trace!("reset_config_values >>>"); - let mut config = SETTINGS.write()?; - config.clear(); - Ok(()) -} - -pub fn get_protocol_version() -> usize { - let protocol_version = match get_config_value(CONFIG_PROTOCOL_VERSION) { - Ok(ver) => ver.parse::().unwrap_or_else(|err| { - warn!( - "Can't parse value of protocol version from config ({}), use default one ({})", - err, DEFAULT_PROTOCOL_VERSION - ); - DEFAULT_PROTOCOL_VERSION - }), - Err(err) => { - info!( - "Can't fetch protocol version from config ({}), use default one ({})", - err, DEFAULT_PROTOCOL_VERSION - ); - DEFAULT_PROTOCOL_VERSION - } - }; - if protocol_version > MAX_SUPPORTED_PROTOCOL_VERSION { - error!( - "Protocol version from config {}, greater then maximal supported {}, use maximum one", - protocol_version, MAX_SUPPORTED_PROTOCOL_VERSION - ); - MAX_SUPPORTED_PROTOCOL_VERSION - } else { - protocol_version - } -} - -#[cfg(test)] -#[allow(clippy::unwrap_used)] -pub mod unit_tests { - use super::*; - use crate::utils::devsetup::SetupDefaults; - - fn _pool_config() -> String { - r#"{"timeout":40}"#.to_string() - } - - fn _mandatory_config() -> HashMap { - let mut config: HashMap = HashMap::new(); - config.insert(CONFIG_WALLET_KEY.to_string(), "password".to_string()); - config - } - - #[test] - fn test_get_and_set_values() { - let _setup = SetupDefaults::init(); - - let key = "key1".to_string(); - let value1 = "value1".to_string(); - - // Fails with invalid key - assert_eq!( - get_config_value(&key).unwrap_err().kind(), - AriesVcxErrorKind::InvalidConfiguration - ); - - set_config_value(&key, &value1).unwrap(); - assert_eq!(get_config_value(&key).unwrap(), value1); - } -} - -pub fn init_issuer_config(institution_did: &str) -> VcxResult<()> { - set_config_value(CONFIG_INSTITUTION_DID, institution_did)?; - Ok(()) -} diff --git a/aries_vcx/src/handlers/connection/mediated_connection.rs b/aries_vcx/src/handlers/connection/mediated_connection.rs index 11f7a3cf1e..896c36dbc0 100644 --- a/aries_vcx/src/handlers/connection/mediated_connection.rs +++ b/aries_vcx/src/handlers/connection/mediated_connection.rs @@ -1,5 +1,5 @@ use core::fmt; -use std::{clone::Clone, collections::HashMap, sync::Arc}; +use std::{clone::Clone, collections::HashMap}; use agency_client::{ agency_client::AgencyClient, api::downloaded_message::DownloadedMessage, MessageStatusCode, @@ -99,7 +99,7 @@ pub enum Actor { impl MediatedConnection { pub async fn create( source_id: &str, - wallet: &Arc, + wallet: &impl BaseWallet, agency_client: &AgencyClient, autohop_enabled: bool, ) -> VcxResult { @@ -118,7 +118,7 @@ impl MediatedConnection { pub async fn create_with_invite( source_id: &str, - wallet: &Arc, + wallet: &impl BaseWallet, agency_client: &AgencyClient, invitation: AnyInvitation, did_doc: AriesDidDoc, @@ -145,7 +145,7 @@ impl MediatedConnection { } pub async fn create_with_request( - wallet: &Arc, + wallet: &impl BaseWallet, request: Request, pairwise_info: PairwiseInfo, agency_client: &AgencyClient, @@ -331,7 +331,7 @@ impl MediatedConnection { pub async fn process_request( &mut self, - wallet: &Arc, + wallet: &impl BaseWallet, agency_client: &AgencyClient, request: Request, ) -> VcxResult<()> { @@ -341,7 +341,7 @@ impl MediatedConnection { ); let (connection_sm, new_cloud_agent_info) = match &self.connection_sm { SmConnection::Inviter(sm_inviter) => { - let send_message = self.send_message_closure_connection(Arc::clone(wallet)); + let send_message = self.send_message_closure_connection(wallet); let new_pairwise_info = PairwiseInfo::create(wallet).await?; let new_cloud_agent = CloudAgentInfo::create(agency_client, &new_pairwise_info).await?; @@ -376,12 +376,12 @@ impl MediatedConnection { Ok(()) } - pub async fn send_response(&mut self, wallet: &Arc) -> VcxResult<()> { + pub async fn send_response(&mut self, wallet: &impl BaseWallet) -> VcxResult<()> { trace!("MediatedConnection::send_response >>>"); let connection_sm = match self.connection_sm.clone() { SmConnection::Inviter(sm_inviter) => { if let InviterFullState::Requested(_) = sm_inviter.state_object() { - let send_message = self.send_message_closure_connection(Arc::clone(wallet)); + let send_message = self.send_message_closure_connection(wallet); sm_inviter.handle_send_response(send_message).await? } else { return Err(AriesVcxError::from_msg( @@ -422,18 +422,18 @@ impl MediatedConnection { // TODO:::: check usage of this method in regards to profile usage // TODO:::: check usage of this method in regards to profile usage // TODO:::: check usage of this method in regards to profile usage - pub fn update_state_with_message( - &mut self, - wallet: Arc, + pub fn update_state_with_message<'a>( + &'a mut self, + wallet: &'a impl BaseWallet, agency_client: AgencyClient, message: Option, - ) -> BoxFuture<'_, VcxResult<()>> { + ) -> BoxFuture<'a, VcxResult<()>> { Box::pin(async move { let (new_connection_sm, can_autohop) = match &self.connection_sm { SmConnection::Inviter(_) => { - self.step_inviter(&wallet, message, &agency_client).await? + self.step_inviter(wallet, message, &agency_client).await? } - SmConnection::Invitee(_) => self.step_invitee(&wallet, message).await?, + SmConnection::Invitee(_) => self.step_invitee(wallet, message).await?, }; *self = new_connection_sm; if can_autohop && self.autohop_enabled { @@ -447,7 +447,7 @@ impl MediatedConnection { pub async fn find_and_handle_message( &mut self, - wallet: &Arc, + wallet: &impl BaseWallet, agency_client: &AgencyClient, ) -> VcxResult<()> { if !self.is_in_final_state() { @@ -488,7 +488,7 @@ impl MediatedConnection { pub async fn handle_message( &mut self, message: AriesMessage, - wallet: &Arc, + wallet: &impl BaseWallet, ) -> VcxResult<()> { let did_doc = self.their_did_doc().ok_or(AriesVcxError::from_msg( AriesVcxErrorKind::NotReady, @@ -510,7 +510,7 @@ impl MediatedConnection { if ping.content.response_requested { send_message( - Arc::clone(wallet), + wallet, pw_vk.to_string(), did_doc.clone(), build_ping_response(&ping).into(), @@ -527,13 +527,7 @@ impl MediatedConnection { ); let msg = build_handshake_reuse_accepted_msg(&handshake_reuse)?; - send_message( - Arc::clone(wallet), - pw_vk.to_string(), - did_doc.clone(), - msg.into(), - ) - .await?; + send_message(wallet, pw_vk.to_string(), did_doc.clone(), msg.into()).await?; } AriesMessage::DiscoverFeatures(DiscoverFeatures::Query(query)) => { let supported_protocols = query.content.lookup(); @@ -564,7 +558,7 @@ impl MediatedConnection { pub async fn find_message_and_update_state( &mut self, - wallet: &Arc, + wallet: &impl BaseWallet, agency_client: &AgencyClient, ) -> VcxResult<()> { if self.is_in_null_state() { @@ -598,12 +592,8 @@ impl MediatedConnection { "MediatedConnection::update_state >>> handling message uid: {:?}", uid ); - self.update_state_with_message( - Arc::clone(wallet), - agency_client.clone(), - Some(message), - ) - .await?; + self.update_state_with_message(wallet, agency_client.clone(), Some(message)) + .await?; self.cloud_agent_info() .ok_or(AriesVcxError::from_msg( AriesVcxErrorKind::NoAgentInformation, @@ -616,7 +606,7 @@ impl MediatedConnection { trace!( "MediatedConnection::update_state >>> trying to update state without message" ); - self.update_state_with_message(Arc::clone(wallet), agency_client.clone(), None) + self.update_state_with_message(wallet, agency_client.clone(), None) .await?; } } @@ -630,7 +620,7 @@ impl MediatedConnection { async fn step_inviter( &self, - wallet: &Arc, + wallet: &impl BaseWallet, message: Option, agency_client: &AgencyClient, ) -> VcxResult<(Self, bool)> { @@ -639,8 +629,7 @@ impl MediatedConnection { let (sm_inviter, new_cloud_agent_info, can_autohop) = match message { Some(message) => match message { AriesMessage::Connection(Connection::Request(request)) => { - let send_message = - self.send_message_closure_connection(Arc::clone(wallet)); + let send_message = self.send_message_closure_connection(wallet); let new_pairwise_info = PairwiseInfo::create(wallet).await?; let new_cloud_agent = CloudAgentInfo::create(agency_client, &new_pairwise_info).await?; @@ -675,8 +664,7 @@ impl MediatedConnection { }, None => { if let InviterFullState::Requested(_) = sm_inviter.state_object() { - let send_message = - self.send_message_closure_connection(Arc::clone(wallet)); + let send_message = self.send_message_closure_connection(wallet); ( sm_inviter.handle_send_response(send_message).await?, None, @@ -705,7 +693,7 @@ impl MediatedConnection { async fn step_invitee( &self, - wallet: &Arc, + wallet: &impl BaseWallet, message: Option, ) -> VcxResult<(Self, bool)> { match self.connection_sm.clone() { @@ -729,8 +717,7 @@ impl MediatedConnection { ) } AriesMessage::Connection(Connection::Response(response)) => { - let send_message = - self.send_message_closure_connection(Arc::clone(wallet)); + let send_message = self.send_message_closure_connection(wallet); ( sm_invitee .handle_connection_response(wallet, response, send_message) @@ -744,7 +731,7 @@ impl MediatedConnection { _ => (sm_invitee, false), }, None => { - let send_message = self.send_message_closure_connection(Arc::clone(wallet)); + let send_message = self.send_message_closure_connection(wallet); (sm_invitee.handle_send_ack(send_message).await?, false) } }; @@ -773,12 +760,15 @@ impl MediatedConnection { } } - pub async fn connect( - &mut self, - wallet: &Arc, + pub async fn connect<'a, 'b>( + &'a mut self, + wallet: &'b impl BaseWallet, agency_client: &AgencyClient, - send_message: Option, - ) -> VcxResult<()> { + send_message: Option>, + ) -> VcxResult<()> + where + 'a: 'b, + { trace!( "MediatedConnection::connect >>> source_id: {}", self.source_id() @@ -790,7 +780,7 @@ impl MediatedConnection { AriesVcxErrorKind::NoAgentInformation, "Missing cloud agent info", ))?; - self.connection_sm = match &self.connection_sm { + let sm = match &self.connection_sm { SmConnection::Inviter(sm_inviter) => { SmConnection::Inviter(sm_inviter.clone().create_invitation( cloud_agent_info.routing_keys(agency_client)?, @@ -798,8 +788,9 @@ impl MediatedConnection { )?) } SmConnection::Invitee(sm_invitee) => { - let send_message = send_message - .unwrap_or(self.send_message_closure_connection(Arc::clone(wallet))); + let send_message = + send_message.unwrap_or(self.send_message_closure_connection(wallet)); + SmConnection::Invitee( sm_invitee .clone() @@ -812,6 +803,8 @@ impl MediatedConnection { ) } }; + + self.connection_sm = sm; Ok(()) } @@ -930,10 +923,10 @@ impl MediatedConnection { .await } - pub async fn send_message_closure( + pub async fn send_message_closure<'a>( &self, - wallet: Arc, - ) -> VcxResult { + wallet: &'a impl BaseWallet, + ) -> VcxResult> { trace!("send_message_closure >>>"); let did_doc = self.their_did_doc().ok_or(AriesVcxError::from_msg( AriesVcxErrorKind::NotReady, @@ -950,10 +943,10 @@ impl MediatedConnection { })) } - fn send_message_closure_connection( + fn send_message_closure_connection<'a>( &self, - wallet: Arc, - ) -> SendClosureConnection { + wallet: &'a impl BaseWallet, + ) -> SendClosureConnection<'a> { trace!("send_message_closure_connection >>>"); Box::new( move |message: AriesMessage, sender_vk: String, did_doc: AriesDidDoc| { @@ -988,7 +981,7 @@ impl MediatedConnection { pub async fn send_generic_message( &self, - wallet: &Arc, + wallet: &impl BaseWallet, message: &str, ) -> VcxResult { trace!( @@ -996,26 +989,26 @@ impl MediatedConnection { message ); let message = Self::build_basic_message(message); - let send_message = self.send_message_closure(Arc::clone(wallet)).await?; + let send_message = self.send_message_closure(wallet).await?; send_message(message).await.map(|_| String::new()) } pub async fn send_a2a_message( &self, - wallet: &Arc, + wallet: &impl BaseWallet, message: &AriesMessage, ) -> VcxResult { trace!( "MediatedConnection::send_a2a_message >>> message: {:?}", message ); - let send_message = self.send_message_closure(Arc::clone(wallet)).await?; + let send_message = self.send_message_closure(wallet).await?; send_message(message.clone()).await.map(|_| String::new()) } pub async fn send_ping( &mut self, - wallet: Arc, + wallet: &impl BaseWallet, comment: Option, ) -> VcxResult { let mut trust_ping = TrustPingSender::build(true, comment); @@ -1027,7 +1020,7 @@ impl MediatedConnection { pub async fn send_handshake_reuse( &self, - wallet: &Arc, + wallet: &impl BaseWallet, oob_msg: &str, ) -> VcxResult<()> { trace!("MediatedConnection::send_handshake_reuse >>>"); @@ -1049,7 +1042,7 @@ impl MediatedConnection { )); } }; - let send_message = self.send_message_closure(Arc::clone(wallet)).await?; + let send_message = self.send_message_closure(wallet).await?; send_message(build_handshake_reuse_msg(&oob).into()).await } @@ -1066,7 +1059,7 @@ impl MediatedConnection { pub async fn send_discovery_query( &self, - wallet: &Arc, + wallet: &impl BaseWallet, query: Option, comment: Option, ) -> VcxResult<()> { diff --git a/aries_vcx/src/handlers/discovery/mod.rs b/aries_vcx/src/handlers/discovery/mod.rs index 1ef01063cc..4e0433eb5e 100644 --- a/aries_vcx/src/handlers/discovery/mod.rs +++ b/aries_vcx/src/handlers/discovery/mod.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use aries_vcx_core::wallet::base_wallet::BaseWallet; use chrono::Utc; use diddoc_legacy::aries::diddoc::AriesDidDoc; @@ -16,7 +14,7 @@ use uuid::Uuid; use crate::{errors::error::VcxResult, utils::send_message}; pub async fn send_discovery_query( - wallet: &Arc, + wallet: &impl BaseWallet, query: Option, comment: Option, did_doc: &AriesDidDoc, @@ -41,17 +39,11 @@ pub async fn send_discovery_query( .decorators(decorators) .build(); - send_message( - Arc::clone(wallet), - pw_vk.to_string(), - did_doc.clone(), - query, - ) - .await + send_message(wallet, pw_vk.to_string(), did_doc.clone(), query).await } pub async fn respond_discovery_query( - wallet: &Arc, + wallet: &impl BaseWallet, query: Query, did_doc: &AriesDidDoc, pw_vk: &str, @@ -70,11 +62,5 @@ pub async fn respond_discovery_query( .decorators(decorators) .build(); - send_message( - Arc::clone(wallet), - pw_vk.to_string(), - did_doc.clone(), - disclose, - ) - .await + send_message(wallet, pw_vk.to_string(), did_doc.clone(), disclose).await } diff --git a/aries_vcx/src/handlers/issuance/holder.rs b/aries_vcx/src/handlers/issuance/holder.rs index 8d9ff70b0a..b1beec1309 100644 --- a/aries_vcx/src/handlers/issuance/holder.rs +++ b/aries_vcx/src/handlers/issuance/holder.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, wallet::base_wallet::BaseWallet, @@ -98,8 +96,8 @@ impl Holder { pub async fn prepare_credential_request( &mut self, - ledger: &Arc, - anoncreds: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, my_pw_did: String, ) -> VcxResult { self.holder_sm = self @@ -143,8 +141,8 @@ impl Holder { pub async fn process_credential( &mut self, - ledger: &Arc, - anoncreds: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, credential: IssueCredentialV1, ) -> VcxResult<()> { self.holder_sm = self @@ -203,19 +201,19 @@ impl Holder { self.holder_sm.get_thread_id() } - pub async fn is_revokable(&self, ledger: &Arc) -> VcxResult { + pub async fn is_revokable(&self, ledger: &impl AnoncredsLedgerRead) -> VcxResult { self.holder_sm.is_revokable(ledger).await } pub async fn is_revoked( &self, - ledger: &Arc, - anoncreds: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, ) -> VcxResult { self.holder_sm.is_revoked(ledger, anoncreds).await } - pub async fn delete_credential(&self, anoncreds: &Arc) -> VcxResult<()> { + pub async fn delete_credential(&self, anoncreds: &impl BaseAnonCreds) -> VcxResult<()> { self.holder_sm.delete_credential(anoncreds).await } @@ -223,20 +221,20 @@ impl Holder { Ok(self.holder_sm.credential_status()) } - pub async fn get_cred_rev_id(&self, anoncreds: &Arc) -> VcxResult { + pub async fn get_cred_rev_id(&self, anoncreds: &impl BaseAnonCreds) -> VcxResult { get_cred_rev_id(anoncreds, &self.get_cred_id()?).await } pub async fn handle_revocation_notification( &self, - ledger: &Arc, - anoncreds: &Arc, - wallet: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, + wallet: &impl BaseWallet, connection: &MediatedConnection, notification: Revoke, ) -> VcxResult<()> { if self.holder_sm.is_revokable(ledger).await? { - let send_message = connection.send_message_closure(Arc::clone(wallet)).await?; + let send_message = connection.send_message_closure(wallet).await?; // TODO: Store to remember notification was received along with details RevocationNotificationReceiver::build( self.get_rev_reg_id()?, @@ -259,8 +257,8 @@ impl Holder { pub async fn process_aries_msg( &mut self, - ledger: &Arc, - anoncreds: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, message: AriesMessage, ) -> VcxResult<()> { let holder_sm = match message { diff --git a/aries_vcx/src/handlers/issuance/issuer.rs b/aries_vcx/src/handlers/issuance/issuer.rs index fa73b0a9c2..116323511a 100644 --- a/aries_vcx/src/handlers/issuance/issuer.rs +++ b/aries_vcx/src/handlers/issuance/issuer.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, }; @@ -145,7 +143,7 @@ impl Issuer { // from that pub async fn build_credential_offer_msg( &mut self, - anoncreds: &Arc, + anoncreds: &impl BaseAnonCreds, offer_info: OfferInfo, comment: Option, ) -> VcxResult<()> { @@ -181,7 +179,7 @@ impl Issuer { Ok(()) } - pub async fn build_credential(&mut self, anoncreds: &Arc) -> VcxResult<()> { + pub async fn build_credential(&mut self, anoncreds: &impl BaseAnonCreds) -> VcxResult<()> { self.issuer_sm = self.issuer_sm.clone().build_credential(anoncreds).await?; Ok(()) } @@ -216,10 +214,7 @@ impl Issuer { )) } - pub async fn revoke_credential_local( - &self, - anoncreds: &Arc, - ) -> VcxResult<()> { + pub async fn revoke_credential_local(&self, anoncreds: &impl BaseAnonCreds) -> VcxResult<()> { let revocation_info: RevocationInfoV1 = self.issuer_sm .get_revocation_info() @@ -268,7 +263,7 @@ impl Issuer { self.issuer_sm.is_revokable() } - pub async fn is_revoked(&self, ledger: &Arc) -> VcxResult { + pub async fn is_revoked(&self, ledger: &impl AnoncredsLedgerRead) -> VcxResult { self.issuer_sm.is_revoked(ledger).await } diff --git a/aries_vcx/src/handlers/out_of_band/receiver.rs b/aries_vcx/src/handlers/out_of_band/receiver.rs index 2593cec6c5..c0bb718537 100644 --- a/aries_vcx/src/handlers/out_of_band/receiver.rs +++ b/aries_vcx/src/handlers/out_of_band/receiver.rs @@ -1,7 +1,8 @@ -use std::{clone::Clone, str::FromStr, sync::Arc}; +use std::{clone::Clone, fmt::Display, str::FromStr}; use agency_client::agency_client::AgencyClient; use aries_vcx_core::{ledger::base_ledger::IndyLedgerRead, wallet::base_wallet::BaseWallet}; +use base64::{engine::general_purpose, Engine}; use diddoc_legacy::aries::diddoc::AriesDidDoc; use messages::{ decorators::{attachment::AttachmentType, thread::Thread}, @@ -63,7 +64,7 @@ impl OutOfBandReceiver { pub async fn connection_exists<'a>( &self, - indy_ledger: &Arc, + indy_ledger: &impl IndyLedgerRead, connections: &'a Vec<&'a MediatedConnection>, ) -> VcxResult> { trace!("OutOfBandReceiver::connection_exists >>>"); @@ -89,7 +90,7 @@ impl OutOfBandReceiver { pub async fn nonmediated_connection_exists<'a, I, T>( &self, - indy_ledger: &Arc, + indy_ledger: &impl IndyLedgerRead, connections: I, ) -> Option where @@ -109,7 +110,7 @@ impl OutOfBandReceiver { } async fn connection_matches_service( - indy_ledger: &Arc, + indy_ledger: &impl IndyLedgerRead, connection: &GenericConnection, service: &OobService, ) -> bool { @@ -120,7 +121,7 @@ impl OutOfBandReceiver { } async fn did_doc_matches_service( - indy_ledger: &Arc, + indy_ledger: &impl IndyLedgerRead, service: &OobService, did_doc: &AriesDidDoc, ) -> bool { @@ -139,7 +140,7 @@ impl OutOfBandReceiver { } async fn did_doc_matches_resolved_service( - indy_ledger: &Arc, + indy_ledger: &impl IndyLedgerRead, service: &OobService, did_doc: &AriesDidDoc, ) -> VcxResult { @@ -166,7 +167,7 @@ impl OutOfBandReceiver { )); }; - let Ok(bytes) = base64::decode(encoded_attach) else { + let Ok(bytes) = general_purpose::STANDARD.decode(encoded_attach) else { return Err(AriesVcxError::from_msg( AriesVcxErrorKind::SerializationError, format!("Attachment is not base 64 encoded JSON: {attach:?}"), @@ -299,7 +300,7 @@ impl OutOfBandReceiver { pub async fn build_connection( &self, - wallet: &Arc, + wallet: &impl BaseWallet, agency_client: &AgencyClient, did_doc: AriesDidDoc, autohop_enabled: bool, @@ -323,13 +324,15 @@ impl OutOfBandReceiver { self.oob.clone().into() } - pub fn to_string(&self) -> String { - json!(AriesMessage::from(self.oob.clone())).to_string() - } - pub fn from_string(oob_data: &str) -> VcxResult { Ok(Self { oob: serde_json::from_str(oob_data)?, }) } } + +impl Display for OutOfBandReceiver { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", json!(AriesMessage::from(self.oob.clone()))) + } +} diff --git a/aries_vcx/src/handlers/out_of_band/sender.rs b/aries_vcx/src/handlers/out_of_band/sender.rs index d49fbf0ce3..d849993a6f 100644 --- a/aries_vcx/src/handlers/out_of_band/sender.rs +++ b/aries_vcx/src/handlers/out_of_band/sender.rs @@ -1,3 +1,5 @@ +use std::fmt::Display; + use messages::{ msg_fields::protocols::{ cred_issuance::{v1::CredentialIssuanceV1, CredentialIssuance}, @@ -122,10 +124,6 @@ impl OutOfBandSender { self.oob.clone().into() } - pub fn to_string(&self) -> String { - json!(AriesMessage::from(self.oob.clone())).to_string() - } - pub fn from_string(oob_data: &str) -> VcxResult { Ok(Self { oob: serde_json::from_str(oob_data)?, @@ -133,6 +131,12 @@ impl OutOfBandSender { } } +impl Display for OutOfBandSender { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", json!(AriesMessage::from(self.oob.clone()))) + } +} + // #[cfg(test)] // mod unit_tests { // use crate::utils::devsetup::SetupMocks; diff --git a/aries_vcx/src/handlers/proof_presentation/prover.rs b/aries_vcx/src/handlers/proof_presentation/prover.rs index 78cb262bad..32050516d7 100644 --- a/aries_vcx/src/handlers/proof_presentation/prover.rs +++ b/aries_vcx/src/handlers/proof_presentation/prover.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, sync::Arc}; +use std::collections::HashMap; use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, @@ -70,7 +70,7 @@ impl Prover { pub async fn retrieve_credentials( &self, - anoncreds: &Arc, + anoncreds: &impl BaseAnonCreds, ) -> VcxResult { trace!("Prover::retrieve_credentials >>>"); let presentation_request = self.presentation_request_data()?; @@ -86,8 +86,8 @@ impl Prover { pub async fn generate_presentation( &mut self, - ledger: &Arc, - anoncreds: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, credentials: SelectedCredentials, self_attested_attrs: HashMap, ) -> VcxResult<()> { diff --git a/aries_vcx/src/handlers/proof_presentation/types.rs b/aries_vcx/src/handlers/proof_presentation/types.rs index 45064281be..05eacf9bf1 100644 --- a/aries_vcx/src/handlers/proof_presentation/types.rs +++ b/aries_vcx/src/handlers/proof_presentation/types.rs @@ -135,7 +135,7 @@ impl SelectedCredentials { with_tails_dir: Option, ) { self.credential_for_referent.insert( - referent.to_string(), + referent, SelectedCredentialForReferent { credential: SelectedCredentialForReferentCredential::from(retrieved_cred), tails_dir: with_tails_dir, diff --git a/aries_vcx/src/handlers/proof_presentation/verifier.rs b/aries_vcx/src/handlers/proof_presentation/verifier.rs index dbf109b0c8..1ab9c51586 100644 --- a/aries_vcx/src/handlers/proof_presentation/verifier.rs +++ b/aries_vcx/src/handlers/proof_presentation/verifier.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, }; @@ -94,8 +92,8 @@ impl Verifier { // todo: verification and sending ack should be separate apis pub async fn verify_presentation( &mut self, - ledger: &Arc, - anoncreds: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, presentation: Presentation, ) -> VcxResult { trace!("Verifier::verify_presentation >>>"); @@ -161,8 +159,8 @@ impl Verifier { pub async fn process_aries_msg( &mut self, - ledger: &Arc, - anoncreds: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, message: AriesMessage, ) -> VcxResult> { let (verifier_sm, message) = match message { diff --git a/aries_vcx/src/handlers/revocation_notification/mod.rs b/aries_vcx/src/handlers/revocation_notification/mod.rs index 1dcdfc7ad0..9499e26a10 100644 --- a/aries_vcx/src/handlers/revocation_notification/mod.rs +++ b/aries_vcx/src/handlers/revocation_notification/mod.rs @@ -15,7 +15,7 @@ pub async fn send_revocation_notification( issuer: &Issuer, ack_on: Vec, comment: Option, - send_message: SendClosure, + send_message: SendClosure<'_>, ) -> VcxResult<()> { // TODO: Check if actually revoked if issuer.is_revokable() { diff --git a/aries_vcx/src/handlers/revocation_notification/receiver.rs b/aries_vcx/src/handlers/revocation_notification/receiver.rs index 3b61a4daa7..26c648d82d 100644 --- a/aries_vcx/src/handlers/revocation_notification/receiver.rs +++ b/aries_vcx/src/handlers/revocation_notification/receiver.rs @@ -27,7 +27,7 @@ impl RevocationNotificationReceiver { pub async fn handle_revocation_notification( self, notification: Revoke, - send_message: SendClosure, + send_message: SendClosure<'_>, ) -> VcxResult { let receiver_sm = self .receiver_sm @@ -36,7 +36,7 @@ impl RevocationNotificationReceiver { Ok(Self { receiver_sm }) } - pub async fn send_ack(self, send_message: SendClosure) -> VcxResult { + pub async fn send_ack(self, send_message: SendClosure<'_>) -> VcxResult { let receiver_sm = self.receiver_sm.send_ack(send_message).await?; Ok(Self { receiver_sm }) } diff --git a/aries_vcx/src/handlers/revocation_notification/sender.rs b/aries_vcx/src/handlers/revocation_notification/sender.rs index 1cd02cd3bb..eebadefe60 100644 --- a/aries_vcx/src/handlers/revocation_notification/sender.rs +++ b/aries_vcx/src/handlers/revocation_notification/sender.rs @@ -25,7 +25,7 @@ impl RevocationNotificationSender { pub async fn send_revocation_notification( self, config: SenderConfig, - send_message: SendClosure, + send_message: SendClosure<'_>, ) -> VcxResult { let sender_sm = self.sender_sm.send(config, send_message).await?; Ok(Self { sender_sm }) diff --git a/aries_vcx/src/handlers/trust_ping/mod.rs b/aries_vcx/src/handlers/trust_ping/mod.rs index ec8377827f..872a4b33a6 100644 --- a/aries_vcx/src/handlers/trust_ping/mod.rs +++ b/aries_vcx/src/handlers/trust_ping/mod.rs @@ -36,7 +36,7 @@ impl TrustPingSender { .unwrap_or(self.ping.id.as_str()) } - pub async fn send_ping(&mut self, send_message: SendClosure) -> VcxResult<()> { + pub async fn send_ping(&mut self, send_message: SendClosure<'_>) -> VcxResult<()> { if self.ping_sent { return Err(AriesVcxError::from_msg( AriesVcxErrorKind::NotReady, @@ -79,7 +79,7 @@ mod unit_tests { utils::devsetup::SetupMocks, }; - pub fn _send_message() -> SendClosure { + pub fn _send_message() -> SendClosure<'static> { Box::new(|_: AriesMessage| Box::pin(async { VcxResult::Ok(()) })) } diff --git a/aries_vcx/src/handlers/util.rs b/aries_vcx/src/handlers/util.rs index 56d0d5b621..a0bae5c2a3 100644 --- a/aries_vcx/src/handlers/util.rs +++ b/aries_vcx/src/handlers/util.rs @@ -1,4 +1,6 @@ +use base64::{engine::general_purpose, Engine}; use messages::{ + decorators::attachment::{Attachment, AttachmentType}, msg_fields::protocols::{ connection::{invitation::Invitation, Connection}, cred_issuance::{v1::CredentialIssuanceV1, v2::CredentialIssuanceV2, CredentialIssuance}, @@ -19,6 +21,15 @@ use strum_macros::{AsRefStr, EnumString}; use crate::errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}; +macro_rules! get_thread_id_or_message_id { + ($msg:expr) => { + $msg.decorators + .thread + .as_ref() + .map_or($msg.id.clone(), |t| t.thid.clone()) + }; +} + macro_rules! matches_thread_id { ($msg:expr, $id:expr) => { $msg.decorators.thread.thid == $id || $msg.decorators.thread.pthid.as_deref() == Some($id) @@ -46,7 +57,7 @@ macro_rules! get_attach_as_string { }; let Some(messages::decorators::attachment::AttachmentType::Base64(encoded_attach)) = __attach else { return err_fn($attachments.get(0)); }; - let Ok(bytes) = base64::decode(encoded_attach) else { return err_fn($attachments.get(0)); }; + let Ok(bytes) = base64::engine::Engine::decode(&base64::engine::general_purpose::STANDARD, &encoded_attach) else { return err_fn($attachments.get(0)); }; let Ok(attach_string) = String::from_utf8(bytes) else { return err_fn($attachments.get(0)); }; attach_string @@ -55,8 +66,9 @@ macro_rules! get_attach_as_string { macro_rules! make_attach_from_str { ($str_attach:expr, $id:expr) => {{ - let attach_type = - messages::decorators::attachment::AttachmentType::Base64(base64::encode($str_attach)); + let attach_type = messages::decorators::attachment::AttachmentType::Base64( + base64::engine::Engine::encode(&base64::engine::general_purpose::STANDARD, $str_attach), + ); let attach_data = messages::decorators::attachment::AttachmentData::builder() .content(attach_type) .build(); @@ -70,18 +82,43 @@ macro_rules! make_attach_from_str { } pub(crate) use get_attach_as_string; +pub(crate) use get_thread_id_or_message_id; pub(crate) use make_attach_from_str; pub(crate) use matches_opt_thread_id; pub(crate) use matches_thread_id; use crate::global::settings; +pub fn extract_attachment_as_base64(attachment: &Attachment) -> VcxResult> { + let AttachmentType::Base64(encoded_attach) = &attachment.data.content else { + return Err(AriesVcxError::from_msg( + AriesVcxErrorKind::InvalidMessageFormat, + format!("Message attachment is not base64 as expected: {attachment:?}"), + )); + }; + + general_purpose::URL_SAFE.decode(encoded_attach).map_err(|_| { + AriesVcxError::from_msg( + AriesVcxErrorKind::EncodeError, + format!("Message attachment is not base64 as expected: {attachment:?}"), + ) + }) +} + +pub fn get_attachment_with_id<'a>( + attachments: &'a Vec, + id: &String, +) -> VcxResult<&'a Attachment> { + attachments + .iter() + .find(|attachment| attachment.id.as_ref() == Some(id)) + .ok_or(AriesVcxError::from_msg( + AriesVcxErrorKind::InvalidMessageFormat, + format!("Message is missing an attachment with the expected ID : {id}."), + )) +} + pub fn verify_thread_id(thread_id: &str, message: &AriesMessage) -> VcxResult<()> { - // todo: ultimately remove this - improve tests - // libvcx_core unit tests are passing in hardcoded message which have mismatching thid - if settings::indy_mocks_enabled() { - return Ok(()); - } let is_match = match message { AriesMessage::BasicMessage(msg) => matches_opt_thread_id!(msg, thread_id), AriesMessage::Connection(Connection::Invitation(msg)) => msg.id == thread_id, diff --git a/aries_vcx/src/lib.rs b/aries_vcx/src/lib.rs index 97fd1891c3..4feabd20f6 100644 --- a/aries_vcx/src/lib.rs +++ b/aries_vcx/src/lib.rs @@ -1,12 +1,5 @@ -#![allow(clippy::or_fun_call)] -#![allow(clippy::module_inception)] -#![allow(clippy::derive_partial_eq_without_eq)] -#![allow(clippy::new_without_default)] -#![allow(clippy::inherent_to_string)] #![allow(clippy::large_enum_variant)] -#![deny(clippy::unwrap_used)] -#![cfg_attr(feature = "fatal_warnings", deny(warnings))] -#![crate_name = "aries_vcx"] +#![allow(clippy::diverging_sub_expression)] //this is needed for some large json macro invocations #![recursion_limit = "128"] @@ -27,9 +20,6 @@ extern crate serde_json; #[macro_use] extern crate derive_builder; -#[cfg(test)] -extern crate async_channel; - pub extern crate aries_vcx_core; pub extern crate messages; @@ -46,10 +36,3 @@ pub mod common; pub mod core; pub mod errors; pub mod transport; - -#[cfg(test)] -pub mod test { - pub fn source_id() -> String { - String::from("test source id") - } -} diff --git a/aries_vcx/src/protocols/connection/generic/mod.rs b/aries_vcx/src/protocols/connection/generic/mod.rs index 8a7f53c52f..93bdd45c25 100644 --- a/aries_vcx/src/protocols/connection/generic/mod.rs +++ b/aries_vcx/src/protocols/connection/generic/mod.rs @@ -1,14 +1,12 @@ mod conversions; mod thin_state; -use std::sync::Arc; - use aries_vcx_core::wallet::base_wallet::BaseWallet; use diddoc_legacy::aries::diddoc::AriesDidDoc; use messages::AriesMessage; pub use self::thin_state::{State, ThinState}; -use super::{trait_bounds::BootstrapDidDoc, wrap_and_send_msg}; +use super::trait_bounds::BootstrapDidDoc; use crate::{ errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}, handlers::util::AnyInvitation, @@ -174,7 +172,7 @@ impl GenericConnection { pub async fn encrypt_message( &self, - wallet: &Arc, + wallet: &impl BaseWallet, message: &AriesMessage, ) -> VcxResult { let sender_verkey = &self.pairwise_info().pw_vk; @@ -182,25 +180,34 @@ impl GenericConnection { AriesVcxErrorKind::NotReady, "No DidDoc present", ))?; - EncryptionEnvelope::create(wallet, message, Some(sender_verkey), did_doc).await + EncryptionEnvelope::create( + wallet, + json!(message).to_string().as_bytes(), + Some(sender_verkey), + did_doc, + ) + .await } pub async fn send_message( &self, - wallet: &Arc, + wallet: &impl BaseWallet, message: &AriesMessage, transport: &T, ) -> VcxResult<()> where T: Transport, { - let sender_verkey = &self.pairwise_info().pw_vk; let did_doc = self.their_did_doc().ok_or(AriesVcxError::from_msg( AriesVcxErrorKind::NotReady, "No DidDoc present", ))?; - wrap_and_send_msg(wallet, message, sender_verkey, did_doc, transport).await + let msg = self.encrypt_message(wallet, message).await?.0; + let service_endpoint = did_doc.get_endpoint().ok_or_else(|| { + AriesVcxError::from_msg(AriesVcxErrorKind::InvalidUrl, "No URL in DID Doc") + })?; + transport.send_message(msg, service_endpoint).await } } @@ -210,9 +217,7 @@ impl GenericConnection { mod connection_serde_tests { #![allow(clippy::unwrap_used)] - use std::sync::Arc; - - use aries_vcx_core::{ledger::base_ledger::IndyLedgerRead, wallet::mock_wallet::MockWallet}; + use aries_vcx_core::wallet::mock_wallet::MockWallet; use async_trait::async_trait; use chrono::Utc; use messages::{ @@ -377,7 +382,7 @@ mod connection_serde_tests { async fn make_initial_parts() -> (String, PairwiseInfo) { let source_id = SOURCE_ID.to_owned(); - let wallet: Arc = Arc::new(MockWallet {}); + let wallet = MockWallet; let pairwise_info = PairwiseInfo::create(&wallet).await.unwrap(); (source_id, pairwise_info) @@ -389,7 +394,7 @@ mod connection_serde_tests { } async fn make_invitee_invited() -> InviteeConnection { - let indy_ledger: Arc = Arc::new(MockLedger {}); + let indy_ledger = MockLedger; let content = InvitationContent::builder_pairwise() .label(String::new()) .recipient_keys(vec![PW_KEY.to_owned()]) @@ -422,7 +427,7 @@ mod connection_serde_tests { } async fn make_invitee_completed() -> InviteeConnection { - let wallet: Arc = Arc::new(MockWallet {}); + let wallet = MockWallet; let con = make_invitee_requested().await; let mut con_data = ConnectionData::new(PW_KEY.to_owned(), AriesDidDoc::default()); con_data.did_doc.id = PW_KEY.to_owned(); @@ -445,10 +450,7 @@ mod connection_serde_tests { .decorators(decorators) .build(); - let con = con - .handle_response(&wallet, response, &MockTransport) - .await - .unwrap(); + let con = con.handle_response(&wallet, response).await.unwrap(); con.send_message(&wallet, &con.get_ack().into(), &MockTransport) .await @@ -468,7 +470,7 @@ mod connection_serde_tests { } async fn make_inviter_requested() -> InviterConnection { - let wallet: Arc = Arc::new(MockWallet {}); + let wallet = MockWallet; let con = make_inviter_invited().await; let new_service_endpoint = SERVICE_ENDPOINT .to_owned() @@ -497,15 +499,9 @@ mod connection_serde_tests { .decorators(decorators) .build(); - con.handle_request( - &wallet, - request, - new_service_endpoint, - new_routing_keys, - &MockTransport, - ) - .await - .unwrap() + con.handle_request(&wallet, request, new_service_endpoint, new_routing_keys) + .await + .unwrap() } async fn make_inviter_completed() -> InviterConnection { diff --git a/aries_vcx/src/protocols/connection/invitee/mod.rs b/aries_vcx/src/protocols/connection/invitee/mod.rs index f6a7023ea5..352d47e3a8 100644 --- a/aries_vcx/src/protocols/connection/invitee/mod.rs +++ b/aries_vcx/src/protocols/connection/invitee/mod.rs @@ -1,7 +1,5 @@ pub mod states; -use std::sync::Arc; - use aries_vcx_core::{ledger::base_ledger::IndyLedgerRead, wallet::base_wallet::BaseWallet}; use chrono::Utc; use diddoc_legacy::aries::diddoc::AriesDidDoc; @@ -32,7 +30,6 @@ use crate::{ errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}, handlers::util::{matches_thread_id, AnyInvitation}, protocols::connection::trait_bounds::ThreadId, - transport::Transport, }; /// Convenience alias @@ -56,7 +53,7 @@ impl InviteeConnection { /// Will error out if a DidDoc could not be resolved from the [`Invitation`]. pub async fn accept_invitation( self, - indy_ledger: &Arc, + indy_ledger: &impl IndyLedgerRead, invitation: AnyInvitation, ) -> VcxResult> { trace!( @@ -169,15 +166,11 @@ impl InviteeConnection { /// * the thread ID of the response does not match the connection thread ID /// * no recipient verkeys are provided in the response. /// * decoding the signed response fails - pub async fn handle_response( + pub async fn handle_response( self, - wallet: &Arc, + wallet: &impl BaseWallet, response: Response, - transport: &T, - ) -> VcxResult> - where - T: Transport, - { + ) -> VcxResult> { let is_match = matches_thread_id!(response, self.state.thread_id()); if !is_match { @@ -197,25 +190,9 @@ impl InviteeConnection { "Cannot handle response: remote verkey not found", ))?; - let did_doc = - match decode_signed_connection_response(wallet, response.content, their_vk).await { - Ok(con_data) => Ok(con_data.did_doc), - Err(err) => { - error!("Request DidDoc validation failed! Sending ProblemReport..."); - - self.send_problem_report( - wallet, - &err, - self.thread_id(), - &self.state.did_doc, - transport, - ) - .await; - - Err(err) - } - }?; - + let did_doc = decode_signed_connection_response(wallet, response.content, their_vk) + .await? + .did_doc; let state = Completed::new(did_doc, self.state.did_doc, self.state.thread_id, None); Ok(Connection { diff --git a/aries_vcx/src/protocols/connection/inviter/mod.rs b/aries_vcx/src/protocols/connection/inviter/mod.rs index 55ab6013a6..a3b489d856 100644 --- a/aries_vcx/src/protocols/connection/inviter/mod.rs +++ b/aries_vcx/src/protocols/connection/inviter/mod.rs @@ -1,7 +1,5 @@ pub mod states; -use std::sync::Arc; - use ::uuid::Uuid; use aries_vcx_core::wallet::base_wallet::BaseWallet; use chrono::Utc; @@ -27,7 +25,6 @@ use crate::{ errors::error::VcxResult, handlers::util::{verify_thread_id, AnyInvitation}, protocols::connection::trait_bounds::ThreadId, - transport::Transport, }; pub type InviterConnection = Connection; @@ -118,7 +115,7 @@ impl InviterConnection { // but was placed here to retro-fit the previous API. async fn build_response_content( &self, - wallet: &Arc, + wallet: &impl BaseWallet, thread_id: String, new_pairwise_info: &PairwiseInfo, new_service_endpoint: Url, @@ -163,17 +160,13 @@ impl InviterConnection { /// invitation /// * the [`Request`]'s DidDoc is not valid /// * generating new [`PairwiseInfo`] fails - pub async fn handle_request( + pub async fn handle_request( self, - wallet: &Arc, + wallet: &impl BaseWallet, request: Request, new_service_endpoint: Url, new_routing_keys: Vec, - transport: &T, - ) -> VcxResult> - where - T: Transport, - { + ) -> VcxResult> { trace!( "Connection::process_request >>> request: {:?}, service_endpoint: {}, routing_keys: \ {:?}", @@ -185,28 +178,7 @@ impl InviterConnection { // There must be some other way to validate the thread ID other than cloning the entire // Request verify_thread_id(self.thread_id(), &request.clone().into())?; - - // If the request's DidDoc validation fails, we generate and send a ProblemReport. - // We then return early with the provided error. - if let Err(err) = request.content.connection.did_doc.validate() { - error!("Request DidDoc validation failed! Sending ProblemReport..."); - - self.send_problem_report( - wallet, - &err, - request - .decorators - .thread - .as_ref() - .map(|t| t.thid.as_str()) - .unwrap_or(request.id.as_str()), - &request.content.connection.did_doc, - transport, - ) - .await; - - Err(err)?; - } + request.content.connection.did_doc.validate()?; // Generate new pairwise info that will be used from this point on // and incorporate that into the response. diff --git a/aries_vcx/src/protocols/connection/mod.rs b/aries_vcx/src/protocols/connection/mod.rs index 5ccd2873b1..5368355451 100644 --- a/aries_vcx/src/protocols/connection/mod.rs +++ b/aries_vcx/src/protocols/connection/mod.rs @@ -6,28 +6,20 @@ pub mod pairwise_info; mod serializable; mod trait_bounds; -use std::{error::Error, sync::Arc}; - use aries_vcx_core::wallet::base_wallet::BaseWallet; -use chrono::Utc; use diddoc_legacy::aries::diddoc::AriesDidDoc; use messages::{ - decorators::{thread::Thread, timing::Timing}, - msg_fields::protocols::{ - connection::problem_report::{ - ProblemReport, ProblemReportContent, ProblemReportDecorators, - }, - discover_features::{disclose::Disclose, query::QueryContent, ProtocolDescriptor}, + msg_fields::protocols::discover_features::{ + disclose::Disclose, query::QueryContent, ProtocolDescriptor, }, AriesMessage, }; -use uuid::Uuid; pub use self::generic::{GenericConnection, State, ThinState}; use self::{ generic::GenericState, pairwise_info::PairwiseInfo, - trait_bounds::{CompletedState, HandleProblem, TheirDidDoc, ThreadId}, + trait_bounds::{CompletedState, TheirDidDoc, ThreadId}, }; use crate::{ errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}, @@ -102,6 +94,21 @@ where self.state.their_did_doc() } + pub async fn encrypt_message( + &self, + wallet: &impl BaseWallet, + message: &AriesMessage, + ) -> VcxResult { + let sender_verkey = &self.pairwise_info().pw_vk; + EncryptionEnvelope::create( + wallet, + json!(message).to_string().as_bytes(), + Some(sender_verkey), + self.their_did_doc(), + ) + .await + } + pub fn remote_did(&self) -> &str { &self.their_did_doc().id } @@ -119,70 +126,18 @@ where pub async fn send_message( &self, - wallet: &Arc, + wallet: &impl BaseWallet, message: &AriesMessage, transport: &T, ) -> VcxResult<()> where T: Transport, { - let sender_verkey = &self.pairwise_info().pw_vk; - let did_doc = self.their_did_doc(); - wrap_and_send_msg(wallet, message, sender_verkey, did_doc, transport).await - } -} - -impl Connection -where - S: HandleProblem, -{ - fn create_problem_report(&self, err: &E, thread_id: &str) -> ProblemReport - where - E: Error, - { - let content = ProblemReportContent::builder() - .explain(err.to_string()) - .build(); - - let decorators = ProblemReportDecorators::builder() - .thread(Thread::builder().thid(thread_id.to_owned()).build()) - .timing(Timing::builder().out_time(Utc::now()).build()) - .build(); - - ProblemReport::builder() - .id(Uuid::new_v4().to_string()) - .content(content) - .decorators(decorators) - .build() - } - - async fn send_problem_report( - &self, - wallet: &Arc, - err: &E, - thread_id: &str, - did_doc: &AriesDidDoc, - transport: &T, - ) where - E: Error, - T: Transport, - { - let sender_verkey = &self.pairwise_info().pw_vk; - let problem_report = self.create_problem_report(err, thread_id); - let res = wrap_and_send_msg( - wallet, - &problem_report.into(), - sender_verkey, - did_doc, - transport, - ) - .await; - - if let Err(e) = res { - trace!("Error encountered when sending ProblemReport: {}", e); - } else { - info!("Error report sent!"); - } + let msg = self.encrypt_message(wallet, message).await?.0; + let service_endpoint = self.their_did_doc().get_endpoint().ok_or_else(|| { + AriesVcxError::from_msg(AriesVcxErrorKind::InvalidUrl, "No URL in DID Doc") + })?; + transport.send_message(msg, service_endpoint).await } } @@ -198,22 +153,3 @@ where self.state.handle_disclose(disclose) } } - -pub(crate) async fn wrap_and_send_msg( - wallet: &Arc, - message: &AriesMessage, - sender_verkey: &str, - did_doc: &AriesDidDoc, - transport: &T, -) -> VcxResult<()> -where - T: Transport, -{ - let env = EncryptionEnvelope::create(wallet, message, Some(sender_verkey), did_doc).await?; - let msg = env.0; - let service_endpoint = did_doc.get_endpoint().ok_or_else(|| { - AriesVcxError::from_msg(AriesVcxErrorKind::InvalidUrl, "No URL in DID Doc") - })?; // This, like many other things, shouldn't clone... - - transport.send_message(msg, service_endpoint).await -} diff --git a/aries_vcx/src/protocols/connection/pairwise_info.rs b/aries_vcx/src/protocols/connection/pairwise_info.rs index 9c56f173aa..40b1fc0791 100644 --- a/aries_vcx/src/protocols/connection/pairwise_info.rs +++ b/aries_vcx/src/protocols/connection/pairwise_info.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use aries_vcx_core::wallet::base_wallet::BaseWallet; use crate::errors::error::VcxResult; @@ -11,7 +9,7 @@ pub struct PairwiseInfo { } impl PairwiseInfo { - pub async fn create(wallet: &Arc) -> VcxResult { + pub async fn create(wallet: &impl BaseWallet) -> VcxResult { let (pw_did, pw_vk) = wallet.create_and_store_my_did(None, None).await?; Ok(PairwiseInfo { pw_did, pw_vk }) } diff --git a/aries_vcx/src/protocols/issuance/holder/state_machine.rs b/aries_vcx/src/protocols/issuance/holder/state_machine.rs index ca43201503..3bba8100b4 100644 --- a/aries_vcx/src/protocols/issuance/holder/state_machine.rs +++ b/aries_vcx/src/protocols/issuance/holder/state_machine.rs @@ -1,4 +1,4 @@ -use std::{fmt, sync::Arc}; +use std::fmt; use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, @@ -107,7 +107,7 @@ impl HolderSM { pub fn new(source_id: String) -> Self { HolderSM { thread_id: Uuid::new_v4().to_string(), - state: HolderFullState::Initial(InitialHolderState::new()), + state: HolderFullState::Initial(InitialHolderState), source_id, } } @@ -205,8 +205,8 @@ impl HolderSM { pub async fn prepare_credential_request<'a>( self, - ledger: &'a Arc, - anoncreds: &'a Arc, + ledger: &'a impl AnoncredsLedgerRead, + anoncreds: &'a impl BaseAnonCreds, my_pw_did: String, ) -> VcxResult { trace!("HolderSM::prepare_credential_request >>"); @@ -264,8 +264,8 @@ impl HolderSM { pub async fn receive_credential<'a>( self, - ledger: &'a Arc, - anoncreds: &'a Arc, + ledger: &'a impl AnoncredsLedgerRead, + anoncreds: &'a impl BaseAnonCreds, credential: IssueCredentialV1, ) -> VcxResult { trace!("HolderSM::receive_credential >>"); @@ -422,7 +422,7 @@ impl HolderSM { Ok(self.thread_id.clone()) } - pub async fn is_revokable(&self, ledger: &Arc) -> VcxResult { + pub async fn is_revokable(&self, ledger: &impl AnoncredsLedgerRead) -> VcxResult { match self.state { HolderFullState::Initial(ref state) => state.is_revokable(), HolderFullState::ProposalSet(ref state) => state.is_revokable(ledger).await, @@ -434,8 +434,8 @@ impl HolderSM { pub async fn is_revoked( &self, - ledger: &Arc, - anoncreds: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, ) -> VcxResult { if self.is_revokable(ledger).await? { let rev_reg_id = self.get_rev_reg_id()?; @@ -450,7 +450,7 @@ impl HolderSM { } } - pub async fn delete_credential(&self, anoncreds: &Arc) -> VcxResult<()> { + pub async fn delete_credential(&self, anoncreds: &impl BaseAnonCreds) -> VcxResult<()> { trace!("Holder::delete_credential"); match self.state { @@ -512,7 +512,7 @@ pub fn parse_cred_def_id_from_cred_offer(cred_offer: &str) -> VcxResult Ok(cred_def_id.to_string()) } -fn _parse_rev_reg_id_from_credential(credential: &str) -> VcxResult> { +pub fn _parse_rev_reg_id_from_credential(credential: &str) -> VcxResult> { trace!("Holder::_parse_rev_reg_id_from_credential >>>"); let parsed_credential: serde_json::Value = serde_json::from_str(credential).map_err(|err| { @@ -532,8 +532,8 @@ fn _parse_rev_reg_id_from_credential(credential: &str) -> VcxResult, - anoncreds: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, credential: &IssueCredentialV1, req_meta: &str, cred_def_json: &str, @@ -568,8 +568,8 @@ async fn _store_credential( } pub async fn create_anoncreds_credential_request( - ledger: &Arc, - anoncreds: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, cred_def_id: &str, prover_did: &str, cred_offer: &str, @@ -586,8 +586,8 @@ pub async fn create_anoncreds_credential_request( } async fn build_credential_request_msg( - ledger: &Arc, - anoncreds: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, thread_id: String, my_pw_did: String, offer: &OfferCredentialV1, diff --git a/aries_vcx/src/protocols/issuance/holder/states/initial.rs b/aries_vcx/src/protocols/issuance/holder/states/initial.rs index 6d1eb5f80a..d0d4804286 100644 --- a/aries_vcx/src/protocols/issuance/holder/states/initial.rs +++ b/aries_vcx/src/protocols/issuance/holder/states/initial.rs @@ -1,13 +1,9 @@ use crate::errors::error::prelude::*; #[derive(Serialize, Deserialize, Debug, Clone)] -pub struct InitialHolderState {} +pub struct InitialHolderState; impl InitialHolderState { - pub fn new() -> Self { - Self {} - } - pub fn is_revokable(&self) -> VcxResult { Err(AriesVcxError::from_msg( AriesVcxErrorKind::InvalidState, diff --git a/aries_vcx/src/protocols/issuance/holder/states/offer_received.rs b/aries_vcx/src/protocols/issuance/holder/states/offer_received.rs index 3adf7af235..994c5e0afb 100644 --- a/aries_vcx/src/protocols/issuance/holder/states/offer_received.rs +++ b/aries_vcx/src/protocols/issuance/holder/states/offer_received.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use aries_vcx_core::ledger::base_ledger::AnoncredsLedgerRead; use messages::msg_fields::protocols::cred_issuance::v1::offer_credential::OfferCredentialV1; @@ -37,7 +35,7 @@ impl OfferReceivedState { Ok(serde_json::Value::Object(new_map).to_string()) } - pub async fn is_revokable(&self, ledger: &Arc) -> VcxResult { + pub async fn is_revokable(&self, ledger: &impl AnoncredsLedgerRead) -> VcxResult { let offer = self.get_attachment()?; let cred_def_id = parse_cred_def_id_from_cred_offer(&offer).map_err(|err| { diff --git a/aries_vcx/src/protocols/issuance/holder/states/proposal_set.rs b/aries_vcx/src/protocols/issuance/holder/states/proposal_set.rs index f5352bad3b..b52667733d 100644 --- a/aries_vcx/src/protocols/issuance/holder/states/proposal_set.rs +++ b/aries_vcx/src/protocols/issuance/holder/states/proposal_set.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use aries_vcx_core::ledger::base_ledger::AnoncredsLedgerRead; use messages::msg_fields::protocols::cred_issuance::v1::propose_credential::ProposeCredentialV1; @@ -17,7 +15,7 @@ impl ProposalSetState { } } - pub async fn is_revokable(&self, ledger: &Arc) -> VcxResult { + pub async fn is_revokable(&self, ledger: &impl AnoncredsLedgerRead) -> VcxResult { is_cred_def_revokable(ledger, &self.credential_proposal.content.cred_def_id).await } } diff --git a/aries_vcx/src/protocols/issuance/issuer/state_machine.rs b/aries_vcx/src/protocols/issuance/issuer/state_machine.rs index a2b897203f..94091b20be 100644 --- a/aries_vcx/src/protocols/issuance/issuer/state_machine.rs +++ b/aries_vcx/src/protocols/issuance/issuer/state_machine.rs @@ -1,4 +1,4 @@ -use std::{fmt::Display, sync::Arc}; +use std::fmt::Display; use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, @@ -275,7 +275,7 @@ impl IssuerSM { } } - pub async fn is_revoked(&self, ledger: &Arc) -> VcxResult { + pub async fn is_revoked(&self, ledger: &impl AnoncredsLedgerRead) -> VcxResult { if self.is_revokable() { let rev_reg_id = self.get_rev_reg_id()?; let rev_id = self.get_rev_id()?; @@ -416,7 +416,7 @@ impl IssuerSM { Ok(Self { state, ..self }) } - pub async fn build_credential(self, anoncreds: &Arc) -> VcxResult { + pub async fn build_credential(self, anoncreds: &impl BaseAnonCreds) -> VcxResult { let state = match self.state { IssuerFullState::RequestReceived(state_data) => { match create_credential( @@ -560,7 +560,7 @@ impl IssuerSM { } async fn create_credential( - anoncreds: &Arc, + anoncreds: &impl BaseAnonCreds, request: &RequestCredentialV1, rev_reg_id: &Option, tails_file: &Option, diff --git a/aries_vcx/src/protocols/issuance/mod.rs b/aries_vcx/src/protocols/issuance/mod.rs index 8ce13a5487..367285be32 100644 --- a/aries_vcx/src/protocols/issuance/mod.rs +++ b/aries_vcx/src/protocols/issuance/mod.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use aries_vcx_core::ledger::base_ledger::AnoncredsLedgerRead; use crate::errors::error::prelude::*; @@ -8,7 +6,7 @@ pub mod holder; pub mod issuer; pub async fn is_cred_def_revokable( - ledger: &Arc, + ledger: &impl AnoncredsLedgerRead, cred_def_id: &str, ) -> VcxResult { let cred_def_json = ledger diff --git a/aries_vcx/src/protocols/issuance_v2/formats/holder/hyperledger_indy.rs b/aries_vcx/src/protocols/issuance_v2/formats/holder/hyperledger_indy.rs new file mode 100644 index 0000000000..1729b8f87e --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/formats/holder/hyperledger_indy.rs @@ -0,0 +1,240 @@ +use std::marker::PhantomData; + +use aries_vcx_core::{ + anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, +}; +use async_trait::async_trait; +use messages::msg_fields::protocols::cred_issuance::v2::{ + issue_credential::{IssueCredentialAttachmentFormatType, IssueCredentialV2}, + offer_credential::{OfferCredentialAttachmentFormatType, OfferCredentialV2}, + propose_credential::ProposeCredentialAttachmentFormatType, + request_credential::RequestCredentialAttachmentFormatType, +}; +use shared_vcx::maybe_known::MaybeKnown; + +use super::HolderCredentialIssuanceFormat; +use crate::{ + errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}, + protocols::issuance::holder::state_machine::{ + _parse_rev_reg_id_from_credential, create_anoncreds_credential_request, + parse_cred_def_id_from_cred_offer, + }, +}; + +/// Structure which implements [HolderCredentialIssuanceFormat] functionality for the `hlindy/...` +/// family of issue-credential-v2 attachment formats. +/// +/// This implementation expects and creates attachments of the following types: +/// * [ProposeCredentialAttachmentFormatType::HyperledgerIndyCredentialFilter2_0] +/// * [RequestCredentialAttachmentFormatType::HyperledgerIndyCredentialRequest2_0] +/// * [OfferCredentialAttachmentFormatType::HyperledgerIndyCredentialAbstract2_0] +/// * [IssueCredentialAttachmentFormatType::HyperledgerIndyCredential2_0] +/// +/// This is done in accordance to the Aries RFC 0592 Spec: +/// +/// https://github.com/hyperledger/aries-rfcs/blob/b3a3942ef052039e73cd23d847f42947f8287da2/features/0592-indy-attachments/README.md +pub struct HyperledgerIndyHolderCredentialIssuanceFormat<'a, R, A> +where + R: AnoncredsLedgerRead, + A: BaseAnonCreds, +{ + _data: &'a PhantomData<()>, + _ledger_read: PhantomData, + _anoncreds: PhantomData, +} + +pub struct HyperledgerIndyCreateProposalInput { + pub cred_filter: HyperledgerIndyCredentialFilter, +} + +#[derive(Default, Clone, PartialEq, Debug, Serialize, Deserialize, Builder)] +#[builder(setter(into, strip_option), default)] +pub struct HyperledgerIndyCredentialFilter { + #[serde(skip_serializing_if = "Option::is_none")] + pub schema_issuer_did: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub schema_name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub schema_version: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub schema_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub issuer_did: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub cred_def_id: Option, +} + +// Simplified cred abstract, for purpose of easy viewing for consumer +// https://github.com/hyperledger/aries-rfcs/blob/main/features/0592-indy-attachments/README.md#cred-abstract-format +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct HyperledgerIndyOfferDetails { + pub schema_id: String, + pub cred_def_id: String, +} + +pub struct HyperledgerIndyCreateRequestInput<'a, R, A> +where + R: AnoncredsLedgerRead, + A: BaseAnonCreds, +{ + pub my_pairwise_did: String, + pub ledger: &'a R, + pub anoncreds: &'a A, +} + +#[derive(Clone, Debug)] +pub struct HyperledgerIndyCreatedRequestMetadata { + credential_request_metadata: String, + credential_def_json: String, +} + +pub struct HyperledgerIndyStoreCredentialInput<'a, R, A> +where + R: AnoncredsLedgerRead, + A: BaseAnonCreds, +{ + pub ledger: &'a R, + pub anoncreds: &'a A, +} + +#[derive(Clone, Debug)] +pub struct HyperledgerIndyStoredCredentialMetadata { + pub credential_id: String, +} + +#[async_trait] +impl<'a, R, A> HolderCredentialIssuanceFormat + for HyperledgerIndyHolderCredentialIssuanceFormat<'a, R, A> +where + R: AnoncredsLedgerRead + 'a, + A: BaseAnonCreds + 'a, +{ + type CreateProposalInput = HyperledgerIndyCreateProposalInput; + + type OfferDetails = HyperledgerIndyOfferDetails; + + type CreateRequestInput = HyperledgerIndyCreateRequestInput<'a, R, A>; + type CreatedRequestMetadata = HyperledgerIndyCreatedRequestMetadata; + + type StoreCredentialInput = HyperledgerIndyStoreCredentialInput<'a, R, A>; + type StoredCredentialMetadata = HyperledgerIndyStoredCredentialMetadata; + + fn supports_request_independent_of_offer() -> bool { + false + } + + fn get_proposal_attachment_format() -> MaybeKnown { + MaybeKnown::Known(ProposeCredentialAttachmentFormatType::HyperledgerIndyCredentialFilter2_0) + } + fn get_request_attachment_format() -> MaybeKnown { + MaybeKnown::Known( + RequestCredentialAttachmentFormatType::HyperledgerIndyCredentialRequest2_0, + ) + } + fn get_offer_attachment_format() -> MaybeKnown { + MaybeKnown::Known(OfferCredentialAttachmentFormatType::HyperledgerIndyCredentialAbstract2_0) + } + fn get_credential_attachment_format() -> MaybeKnown { + MaybeKnown::Known(IssueCredentialAttachmentFormatType::HyperledgerIndyCredential2_0) + } + + async fn create_proposal_attachment_content( + data: &HyperledgerIndyCreateProposalInput, + ) -> VcxResult> { + let filter_bytes = serde_json::to_vec(&data.cred_filter)?; + + Ok(filter_bytes) + } + + fn extract_offer_details( + offer_message: &OfferCredentialV2, + ) -> VcxResult { + let attachment = Self::extract_offer_attachment_content(offer_message)?; + + Ok(serde_json::from_slice(&attachment)?) + } + + async fn create_request_attachment_content( + offer_message: &OfferCredentialV2, + data: &Self::CreateRequestInput, + ) -> VcxResult<(Vec, HyperledgerIndyCreatedRequestMetadata)> { + let offer_bytes = Self::extract_offer_attachment_content(&offer_message)?; + let offer_payload = String::from_utf8(offer_bytes).map_err(|_| { + AriesVcxError::from_msg( + AriesVcxErrorKind::EncodeError, + "Expected payload to be a utf8 string", + ) + })?; + + let cred_def_id = parse_cred_def_id_from_cred_offer(&offer_payload)?; + let entropy = &data.my_pairwise_did; + let ledger = data.ledger; + let anoncreds = data.anoncreds; + + let (credential_request, credential_request_metadata, _, credential_def_json) = + create_anoncreds_credential_request( + ledger, + anoncreds, + &cred_def_id, + &entropy, + &offer_payload, + ) + .await?; + + Ok(( + credential_request.into(), + HyperledgerIndyCreatedRequestMetadata { + credential_request_metadata, + credential_def_json, + }, + )) + } + + async fn create_request_attachment_content_independent_of_offer( + _: &Self::CreateRequestInput, + ) -> VcxResult<(Vec, Self::CreatedRequestMetadata)> { + Err(AriesVcxError::from_msg( + AriesVcxErrorKind::ActionNotSupported, + "Anoncreds cannot create request payload independent of an offer", + )) + } + + async fn process_and_store_credential( + issue_credential_message: &IssueCredentialV2, + user_input: &HyperledgerIndyStoreCredentialInput, + request_metadata: &HyperledgerIndyCreatedRequestMetadata, + ) -> VcxResult { + let cred_bytes = Self::extract_credential_attachment_content(&issue_credential_message)?; + let credential_payload = String::from_utf8(cred_bytes).map_err(|_| { + AriesVcxError::from_msg( + AriesVcxErrorKind::EncodeError, + "Expected payload to be a utf8 string", + ) + })?; + + let ledger = user_input.ledger; + let anoncreds = user_input.anoncreds; + + let rev_reg_id = _parse_rev_reg_id_from_credential(&credential_payload)?; + let rev_reg_def_json = if let Some(rev_reg_id) = rev_reg_id { + let json = ledger.get_rev_reg_def_json(&rev_reg_id).await?; + Some(json) + } else { + None + }; + + let cred_id = anoncreds + .prover_store_credential( + None, + &request_metadata.credential_request_metadata, + &credential_payload, + &request_metadata.credential_def_json, + rev_reg_def_json.as_deref(), + ) + .await?; + + Ok(HyperledgerIndyStoredCredentialMetadata { + credential_id: cred_id, + }) + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/formats/holder/ld_proof_vc.rs b/aries_vcx/src/protocols/issuance_v2/formats/holder/ld_proof_vc.rs new file mode 100644 index 0000000000..e9d08d1a40 --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/formats/holder/ld_proof_vc.rs @@ -0,0 +1,75 @@ +use async_trait::async_trait; +use messages::msg_fields::protocols::cred_issuance::v2::{ + issue_credential::{IssueCredentialAttachmentFormatType, IssueCredentialV2}, + offer_credential::{OfferCredentialAttachmentFormatType, OfferCredentialV2}, + propose_credential::ProposeCredentialAttachmentFormatType, + request_credential::RequestCredentialAttachmentFormatType, +}; +use shared_vcx::maybe_known::MaybeKnown; + +use super::HolderCredentialIssuanceFormat; +use crate::errors::error::VcxResult; + +// TODO - delete, this is just a mock +pub struct LdProofHolderCredentialIssuanceFormat; + +#[async_trait] +impl HolderCredentialIssuanceFormat for LdProofHolderCredentialIssuanceFormat { + type CreateProposalInput = (); + + type OfferDetails = (); + + type CreateRequestInput = (); + type CreatedRequestMetadata = (); + + type StoreCredentialInput = (); + type StoredCredentialMetadata = (); + + fn supports_request_independent_of_offer() -> bool { + true + } + + fn get_proposal_attachment_format() -> MaybeKnown { + MaybeKnown::Known(ProposeCredentialAttachmentFormatType::AriesLdProofVcDetail1_0) + } + fn get_request_attachment_format() -> MaybeKnown { + MaybeKnown::Known(RequestCredentialAttachmentFormatType::AriesLdProofVcDetail1_0) + } + fn get_offer_attachment_format() -> MaybeKnown { + MaybeKnown::Known(OfferCredentialAttachmentFormatType::AriesLdProofVcDetail1_0) + } + fn get_credential_attachment_format() -> MaybeKnown { + MaybeKnown::Known(IssueCredentialAttachmentFormatType::AriesLdProofVc1_0) + } + + async fn create_proposal_attachment_content( + _data: &Self::CreateProposalInput, + ) -> VcxResult> { + Ok("mock".to_owned().into()) + } + + fn extract_offer_details(_: &OfferCredentialV2) -> VcxResult { + Ok(()) + } + + async fn create_request_attachment_content( + _offer_message: &OfferCredentialV2, + _data: &Self::CreateRequestInput, + ) -> VcxResult<(Vec, Self::CreatedRequestMetadata)> { + Ok(("mock".to_owned().into(), ())) + } + + async fn create_request_attachment_content_independent_of_offer( + _data: &Self::CreateRequestInput, + ) -> VcxResult<(Vec, Self::CreatedRequestMetadata)> { + Ok(("mock".to_owned().into(), ())) + } + + async fn process_and_store_credential( + _issue_credential_message: &IssueCredentialV2, + _user_input: &Self::StoreCredentialInput, + _request_metadata: &Self::CreatedRequestMetadata, + ) -> VcxResult { + Ok(()) + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/formats/holder/mod.rs b/aries_vcx/src/protocols/issuance_v2/formats/holder/mod.rs new file mode 100644 index 0000000000..1b532bc8df --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/formats/holder/mod.rs @@ -0,0 +1,166 @@ +use async_trait::async_trait; +use messages::msg_fields::protocols::cred_issuance::v2::{ + issue_credential::{IssueCredentialAttachmentFormatType, IssueCredentialV2}, + offer_credential::{OfferCredentialAttachmentFormatType, OfferCredentialV2}, + propose_credential::ProposeCredentialAttachmentFormatType, + request_credential::RequestCredentialAttachmentFormatType, +}; +use shared_vcx::maybe_known::MaybeKnown; + +use crate::{ + errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}, + handlers::util::{extract_attachment_as_base64, get_attachment_with_id}, +}; + +pub mod hyperledger_indy; +pub mod ld_proof_vc; + +/// Trait representing some issue-credential-v2 format family, containing methods required by an +/// holder of this format to create attachments of this format. +#[async_trait] +pub trait HolderCredentialIssuanceFormat { + type CreateProposalInput; + + type OfferDetails; + + type CreateRequestInput; + type CreatedRequestMetadata; + + type StoreCredentialInput; + type StoredCredentialMetadata; + + fn supports_request_independent_of_offer() -> bool; + + fn get_proposal_attachment_format() -> MaybeKnown; + fn get_offer_attachment_format() -> MaybeKnown; + fn get_request_attachment_format() -> MaybeKnown; + fn get_credential_attachment_format() -> MaybeKnown; + + async fn create_proposal_attachment_content( + data: &Self::CreateProposalInput, + ) -> VcxResult>; + + fn extract_offer_attachment_content(offer_message: &OfferCredentialV2) -> VcxResult> { + let attachment_id = offer_message + .content + .formats + .iter() + .find_map(|format| { + (format.format == Self::get_offer_attachment_format()).then_some(&format.attach_id) + }) + .ok_or(AriesVcxError::from_msg( + AriesVcxErrorKind::InvalidMessageFormat, + "Message does not containing an attachment with the expected format.", + ))?; + + let attachment = + get_attachment_with_id(&offer_message.content.offers_attach, attachment_id)?; + + // TODO - BAD, not ALWAYS base64, could be JSON + extract_attachment_as_base64(attachment) + } + + fn extract_offer_details(offer_message: &OfferCredentialV2) -> VcxResult; + + async fn create_request_attachment_content( + offer_message: &OfferCredentialV2, + data: &Self::CreateRequestInput, + ) -> VcxResult<(Vec, Self::CreatedRequestMetadata)>; + + async fn create_request_attachment_content_independent_of_offer( + data: &Self::CreateRequestInput, + ) -> VcxResult<(Vec, Self::CreatedRequestMetadata)>; + + fn extract_credential_attachment_content( + issue_credential_message: &IssueCredentialV2, + ) -> VcxResult> { + let attachment_id = issue_credential_message + .content + .formats + .iter() + .find_map(|format| { + (format.format == Self::get_credential_attachment_format()) + .then_some(&format.attach_id) + }) + .ok_or(AriesVcxError::from_msg( + AriesVcxErrorKind::InvalidMessageFormat, + "Message does not containing an attachment with the expected format.", + ))?; + + let attachment = get_attachment_with_id( + &issue_credential_message.content.credentials_attach, + attachment_id, + )?; + + // TODO - BAD, not ALWAYS base64, could be JSON + extract_attachment_as_base64(attachment) + } + + async fn process_and_store_credential( + issue_credential_message: &IssueCredentialV2, + data: &Self::StoreCredentialInput, + request_metadata: &Self::CreatedRequestMetadata, + ) -> VcxResult; +} + +#[cfg(test)] +pub(crate) mod mocks { + use async_trait::async_trait; + use messages::msg_fields::protocols::cred_issuance::v2::{ + issue_credential::{IssueCredentialAttachmentFormatType, IssueCredentialV2}, + offer_credential::{OfferCredentialAttachmentFormatType, OfferCredentialV2}, + propose_credential::ProposeCredentialAttachmentFormatType, + request_credential::RequestCredentialAttachmentFormatType, + }; + use mockall::mock; + use shared_vcx::maybe_known::MaybeKnown; + + use super::HolderCredentialIssuanceFormat; + use crate::errors::error::VcxResult; + + mock! { + pub HolderCredentialIssuanceFormat {} + #[async_trait] + impl HolderCredentialIssuanceFormat for HolderCredentialIssuanceFormat { + type CreateProposalInput = String; + + type OfferDetails = String; + + type CreateRequestInput = String; + type CreatedRequestMetadata = String; + + type StoreCredentialInput = String; + type StoredCredentialMetadata = String; + + fn supports_request_independent_of_offer() -> bool; + + fn get_proposal_attachment_format() -> MaybeKnown; + fn get_offer_attachment_format() -> MaybeKnown; + fn get_request_attachment_format() -> MaybeKnown; + fn get_credential_attachment_format() -> MaybeKnown; + + async fn create_proposal_attachment_content( + data: &String, + ) -> VcxResult>; + + fn extract_offer_details( + offer_message: &OfferCredentialV2, + ) -> VcxResult; + + async fn create_request_attachment_content( + offer_message: &OfferCredentialV2, + data: &String, + ) -> VcxResult<(Vec, String)>; + + async fn create_request_attachment_content_independent_of_offer( + data: &String, + ) -> VcxResult<(Vec, String)>; + + async fn process_and_store_credential( + issue_credential_message: &IssueCredentialV2, + data: &String, + request_metadata: &String, + ) -> VcxResult; + } + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/formats/issuer/hyperledger_indy.rs b/aries_vcx/src/protocols/issuance_v2/formats/issuer/hyperledger_indy.rs new file mode 100644 index 0000000000..31e503f432 --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/formats/issuer/hyperledger_indy.rs @@ -0,0 +1,201 @@ +use std::{collections::HashMap, marker::PhantomData}; + +use aries_vcx_core::anoncreds::base_anoncreds::BaseAnonCreds; +use async_trait::async_trait; +use messages::msg_fields::protocols::cred_issuance::v2::{ + issue_credential::IssueCredentialAttachmentFormatType, + offer_credential::OfferCredentialAttachmentFormatType, + propose_credential::{ProposeCredentialAttachmentFormatType, ProposeCredentialV2}, + request_credential::{RequestCredentialAttachmentFormatType, RequestCredentialV2}, +}; +use shared_vcx::maybe_known::MaybeKnown; + +use super::IssuerCredentialIssuanceFormat; +use crate::{ + errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}, + protocols::issuance_v2::formats::holder::hyperledger_indy::HyperledgerIndyCredentialFilter, + utils::openssl::encode, +}; + +/// Structure which implements [IssuerCredentialIssuanceFormat] functionality for the `hlindy/...` +/// family of issue-credential-v2 attachment formats. +/// +/// This implementation expects and creates attachments of the following types: +/// * [ProposeCredentialAttachmentFormatType::HyperledgerIndyCredentialFilter2_0] +/// * [RequestCredentialAttachmentFormatType::HyperledgerIndyCredentialRequest2_0] +/// * [OfferCredentialAttachmentFormatType::HyperledgerIndyCredentialAbstract2_0] +/// * [IssueCredentialAttachmentFormatType::HyperledgerIndyCredential2_0] +/// +/// This is done in accordance to the Aries RFC 0592 Spec: +/// +/// https://github.com/hyperledger/aries-rfcs/blob/b3a3942ef052039e73cd23d847f42947f8287da2/features/0592-indy-attachments/README.md + +pub struct HyperledgerIndyIssuerCredentialIssuanceFormat<'a, A> +where + A: BaseAnonCreds, +{ + _marker: &'a PhantomData<()>, + _anoncreds: PhantomData, +} + +pub struct HyperledgerIndyCreateOfferInput<'a, A> { + pub anoncreds: &'a A, + pub cred_def_id: String, +} + +#[derive(Clone)] +pub struct HyperledgerIndyCreatedOfferMetadata { + pub offer_json: String, +} + +pub struct HyperledgerIndyCreateCredentialInput<'a, A> { + pub anoncreds: &'a A, + pub credential_attributes: HashMap, + pub revocation_info: Option, +} + +#[derive(Clone)] +pub struct HyperledgerIndyCreateCredentialRevocationInfoInput { + pub registry_id: String, + pub tails_directory: String, +} + +#[derive(Clone)] +pub struct HyperledgerIndyCreatedCredentialMetadata { + pub credential_revocation_id: Option, +} + +#[async_trait] +impl<'a, A> IssuerCredentialIssuanceFormat for HyperledgerIndyIssuerCredentialIssuanceFormat<'a, A> +where + A: BaseAnonCreds + 'a, +{ + type ProposalDetails = HyperledgerIndyCredentialFilter; + + type CreateOfferInput = HyperledgerIndyCreateOfferInput<'a, A>; + type CreatedOfferMetadata = HyperledgerIndyCreatedOfferMetadata; + + type CreateCredentialInput = HyperledgerIndyCreateCredentialInput<'a, A>; + type CreatedCredentialMetadata = HyperledgerIndyCreatedCredentialMetadata; + + fn supports_request_independent_of_offer() -> bool { + false + } + + fn get_proposal_attachment_format() -> MaybeKnown { + MaybeKnown::Known(ProposeCredentialAttachmentFormatType::HyperledgerIndyCredentialFilter2_0) + } + + fn get_request_attachment_format() -> MaybeKnown { + MaybeKnown::Known( + RequestCredentialAttachmentFormatType::HyperledgerIndyCredentialRequest2_0, + ) + } + fn get_offer_attachment_format() -> MaybeKnown { + MaybeKnown::Known(OfferCredentialAttachmentFormatType::HyperledgerIndyCredentialAbstract2_0) + } + fn get_credential_attachment_format() -> MaybeKnown { + MaybeKnown::Known(IssueCredentialAttachmentFormatType::HyperledgerIndyCredential2_0) + } + + fn extract_proposal_details( + proposal_message: &ProposeCredentialV2, + ) -> VcxResult { + let attachment = Self::extract_proposal_attachment_content(proposal_message)?; + + Ok(serde_json::from_slice(&attachment)?) + } + + async fn create_offer_attachment_content( + data: &HyperledgerIndyCreateOfferInput, + ) -> VcxResult<(Vec, HyperledgerIndyCreatedOfferMetadata)> { + let cred_offer = data + .anoncreds + .issuer_create_credential_offer(&data.cred_def_id) + .await?; + + Ok(( + cred_offer.clone().into_bytes(), + HyperledgerIndyCreatedOfferMetadata { + offer_json: cred_offer, + }, + )) + } + + async fn create_credential_attachment_content( + offer_metadata: &HyperledgerIndyCreatedOfferMetadata, + request_message: &RequestCredentialV2, + data: &HyperledgerIndyCreateCredentialInput, + ) -> VcxResult<(Vec, HyperledgerIndyCreatedCredentialMetadata)> { + let offer = &offer_metadata.offer_json; + + let request_bytes = Self::extract_request_attachment_content(&request_message)?; + let request_payload = String::from_utf8(request_bytes).map_err(|_| { + AriesVcxError::from_msg( + AriesVcxErrorKind::EncodeError, + "Expected payload to be a utf8 string", + ) + })?; + + let encoded_credential_attributes = encode_attributes(&data.credential_attributes)?; + let encoded_credential_attributes_json = + serde_json::to_string(&encoded_credential_attributes)?; + + let (rev_reg_id, tails_dir) = data.revocation_info.as_ref().map_or((None, None), |info| { + ( + Some(info.registry_id.to_owned()), + Some(info.tails_directory.to_owned()), + ) + }); + + let (credential, cred_rev_id, _) = data + .anoncreds + .issuer_create_credential( + offer, + &request_payload, + &encoded_credential_attributes_json, + rev_reg_id, + tails_dir, + ) + .await?; + + let metadata = HyperledgerIndyCreatedCredentialMetadata { + credential_revocation_id: cred_rev_id, + }; + + Ok((credential.into_bytes(), metadata)) + } + + async fn create_credential_attachment_content_independent_of_offer( + _: &RequestCredentialV2, + _: &Self::CreateCredentialInput, + ) -> VcxResult<(Vec, HyperledgerIndyCreatedCredentialMetadata)> { + return Err(AriesVcxError::from_msg( + AriesVcxErrorKind::ActionNotSupported, + "Creating a credential independent of an offer is unsupported for this format", + )); + } +} + +fn encode_attributes( + attributes: &HashMap, +) -> VcxResult> { + let mut encoded = HashMap::::new(); + for (k, v) in attributes.into_iter() { + encoded.insert( + k.to_owned(), + RawAndEncoded { + raw: v.to_owned(), + encoded: encode(&v)?, + }, + ); + } + + Ok(encoded) +} + +#[derive(Serialize)] +struct RawAndEncoded { + raw: String, + encoded: String, +} diff --git a/aries_vcx/src/protocols/issuance_v2/formats/issuer/ld_proof_vc.rs b/aries_vcx/src/protocols/issuance_v2/formats/issuer/ld_proof_vc.rs new file mode 100644 index 0000000000..18b2533ceb --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/formats/issuer/ld_proof_vc.rs @@ -0,0 +1,68 @@ +// TODO - delete, this is a mock + +use async_trait::async_trait; +use messages::msg_fields::protocols::cred_issuance::v2::{ + issue_credential::IssueCredentialAttachmentFormatType, + offer_credential::OfferCredentialAttachmentFormatType, + propose_credential::{ProposeCredentialAttachmentFormatType, ProposeCredentialV2}, + request_credential::{RequestCredentialAttachmentFormatType, RequestCredentialV2}, +}; +use shared_vcx::maybe_known::MaybeKnown; + +use super::IssuerCredentialIssuanceFormat; +use crate::errors::error::VcxResult; + +pub struct LdProofIssuerCredentialIssuanceFormat; + +#[async_trait] +impl IssuerCredentialIssuanceFormat for LdProofIssuerCredentialIssuanceFormat { + type ProposalDetails = (); + + type CreateOfferInput = (); + type CreatedOfferMetadata = (); + + type CreateCredentialInput = (); + type CreatedCredentialMetadata = (); + + fn supports_request_independent_of_offer() -> bool { + true + } + + fn get_proposal_attachment_format() -> MaybeKnown { + MaybeKnown::Known(ProposeCredentialAttachmentFormatType::AriesLdProofVcDetail1_0) + } + fn get_request_attachment_format() -> MaybeKnown { + MaybeKnown::Known(RequestCredentialAttachmentFormatType::AriesLdProofVcDetail1_0) + } + fn get_offer_attachment_format() -> MaybeKnown { + MaybeKnown::Known(OfferCredentialAttachmentFormatType::AriesLdProofVcDetail1_0) + } + fn get_credential_attachment_format() -> MaybeKnown { + MaybeKnown::Known(IssueCredentialAttachmentFormatType::AriesLdProofVc1_0) + } + + fn extract_proposal_details(_: &ProposeCredentialV2) -> VcxResult { + Ok(()) + } + + async fn create_offer_attachment_content( + _: &Self::CreateOfferInput, + ) -> VcxResult<(Vec, ())> { + Ok(("mock data".into(), ())) + } + + async fn create_credential_attachment_content( + _offer_metadata: &(), + _request_message: &RequestCredentialV2, + _data: &Self::CreateCredentialInput, + ) -> VcxResult<(Vec, ())> { + Ok(("mock data".into(), ())) + } + + async fn create_credential_attachment_content_independent_of_offer( + _request_message: &RequestCredentialV2, + _data: &Self::CreateCredentialInput, + ) -> VcxResult<(Vec, ())> { + Ok(("mock data".into(), ())) + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/formats/issuer/mod.rs b/aries_vcx/src/protocols/issuance_v2/formats/issuer/mod.rs new file mode 100644 index 0000000000..41fc474f98 --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/formats/issuer/mod.rs @@ -0,0 +1,101 @@ +pub mod hyperledger_indy; +pub mod ld_proof_vc; + +use async_trait::async_trait; +use messages::msg_fields::protocols::cred_issuance::v2::{ + issue_credential::IssueCredentialAttachmentFormatType, + offer_credential::OfferCredentialAttachmentFormatType, + propose_credential::{ProposeCredentialAttachmentFormatType, ProposeCredentialV2}, + request_credential::{RequestCredentialAttachmentFormatType, RequestCredentialV2}, +}; +use shared_vcx::maybe_known::MaybeKnown; + +use crate::{ + errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}, + handlers::util::{extract_attachment_as_base64, get_attachment_with_id}, +}; + +/// Trait representing some issue-credential-v2 format family, containing methods required by an +/// issuer of this format to create attachments of this format. +#[async_trait] +pub trait IssuerCredentialIssuanceFormat { + type ProposalDetails; + + type CreateOfferInput; + type CreatedOfferMetadata; + + type CreateCredentialInput; + type CreatedCredentialMetadata; + + fn supports_request_independent_of_offer() -> bool; + + fn get_proposal_attachment_format() -> MaybeKnown; + fn get_offer_attachment_format() -> MaybeKnown; + fn get_request_attachment_format() -> MaybeKnown; + fn get_credential_attachment_format() -> MaybeKnown; + + fn extract_proposal_attachment_content( + proposal_message: &ProposeCredentialV2, + ) -> VcxResult> { + let attachment_id = proposal_message + .content + .formats + .iter() + .find_map(|format| { + (format.format == Self::get_proposal_attachment_format()) + .then_some(&format.attach_id) + }) + .ok_or(AriesVcxError::from_msg( + AriesVcxErrorKind::InvalidMessageFormat, + "Message does not containing an attachment with the expected format.", + ))?; + + let attachment = + get_attachment_with_id(&proposal_message.content.filters_attach, attachment_id)?; + + // TODO - BAD, not ALWAYS base64, could be JSON + extract_attachment_as_base64(attachment) + } + + fn extract_proposal_details( + proposal_message: &ProposeCredentialV2, + ) -> VcxResult; + + async fn create_offer_attachment_content( + data: &Self::CreateOfferInput, + ) -> VcxResult<(Vec, Self::CreatedOfferMetadata)>; + + fn extract_request_attachment_content( + request_message: &RequestCredentialV2, + ) -> VcxResult> { + let attachment_id = request_message + .content + .formats + .iter() + .find_map(|format| { + (format.format == Self::get_request_attachment_format()) + .then_some(&format.attach_id) + }) + .ok_or(AriesVcxError::from_msg( + AriesVcxErrorKind::InvalidMessageFormat, + "Message does not containing an attachment with the expected format.", + ))?; + + let attachment = + get_attachment_with_id(&request_message.content.requests_attach, attachment_id)?; + + // TODO - BAD, not ALWAYS base64, could be JSON + extract_attachment_as_base64(attachment) + } + + async fn create_credential_attachment_content( + offer_metadata: &Self::CreatedOfferMetadata, + request_message: &RequestCredentialV2, + data: &Self::CreateCredentialInput, + ) -> VcxResult<(Vec, Self::CreatedCredentialMetadata)>; + + async fn create_credential_attachment_content_independent_of_offer( + request_message: &RequestCredentialV2, + data: &Self::CreateCredentialInput, + ) -> VcxResult<(Vec, Self::CreatedCredentialMetadata)>; +} diff --git a/aries_vcx_core/src/anoncreds/indy/credentials/mod.rs b/aries_vcx/src/protocols/issuance_v2/formats/mod.rs similarity index 100% rename from aries_vcx_core/src/anoncreds/indy/credentials/mod.rs rename to aries_vcx/src/protocols/issuance_v2/formats/mod.rs diff --git a/aries_vcx/src/protocols/issuance_v2/holder/mod.rs b/aries_vcx/src/protocols/issuance_v2/holder/mod.rs new file mode 100644 index 0000000000..0f0b49895e --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/holder/mod.rs @@ -0,0 +1,563 @@ +pub mod states; + +use std::{error::Error, marker::PhantomData}; + +use ::messages::decorators::attachment::{Attachment, AttachmentData, AttachmentType}; +use messages::{ + decorators::thread::Thread, + misc::MimeType, + msg_fields::protocols::{ + cred_issuance::v2::{ + ack::AckCredentialV2, + issue_credential::IssueCredentialV2, + offer_credential::OfferCredentialV2, + problem_report::CredIssuanceProblemReportV2, + propose_credential::{ + ProposeCredentialV2, ProposeCredentialV2Content, ProposeCredentialV2Decorators, + }, + request_credential::{ + RequestCredentialV2, RequestCredentialV2Content, RequestCredentialV2Decorators, + }, + AttachmentFormatSpecifier, CredentialPreviewV2, + }, + notification::ack::{AckContent, AckDecorators, AckStatus}, + report_problem::{Description, ProblemReportContent, ProblemReportDecorators}, + }, +}; +use uuid::Uuid; + +use self::states::{ + complete::Complete, credential_received::CredentialReceived, failed::Failed, + offer_received::OfferReceived, proposal_prepared::ProposalPrepared, + request_prepared::RequestPrepared, +}; +use super::{ + formats::holder::HolderCredentialIssuanceFormat, unmatched_thread_id_error, RecoveredSMError, + VcxSMTransitionResult, +}; +use crate::{ + errors::error::VcxResult, + handlers::util::{get_thread_id_or_message_id, matches_thread_id}, +}; + +fn create_proposal_message_from_attachment( + attachment_data: Vec, + preview: Option, + thread_id: Option, +) -> ProposeCredentialV2 { + let attachment_content = AttachmentType::Base64(base64::encode(&attachment_data)); + let attach_id = Uuid::new_v4().to_string(); + let attachment = Attachment::builder() + .id(attach_id.clone()) + .mime_type(MimeType::Json) + .data( + AttachmentData::builder() + .content(attachment_content) + .build(), + ) + .build(); + + let content = ProposeCredentialV2Content::builder() + .formats(vec![AttachmentFormatSpecifier::builder() + .attach_id(attach_id) + .format(T::get_proposal_attachment_format()) + .build()]) + .filters_attach(vec![attachment]) + .credential_preview(preview) + .build(); + + let decorators = ProposeCredentialV2Decorators::builder() + .thread(thread_id.map(|id| Thread::builder().thid(id).build())) + .build(); + + ProposeCredentialV2::builder() + .id(Uuid::new_v4().to_string()) + .content(content) + .decorators(decorators) + .build() +} + +fn create_request_message_from_attachment( + attachment_data: Vec, + thread_id: Option, +) -> RequestCredentialV2 { + let attachment_content = AttachmentType::Base64(base64::encode(&attachment_data)); + let attach_id = uuid::Uuid::new_v4().to_string(); + let attachment = Attachment::builder() + .id(attach_id.clone()) + .mime_type(MimeType::Json) + .data( + AttachmentData::builder() + .content(attachment_content) + .build(), + ) + .build(); + + let content = RequestCredentialV2Content::builder() + .formats(vec![AttachmentFormatSpecifier::builder() + .attach_id(attach_id) + .format(T::get_request_attachment_format()) + .build()]) + .requests_attach(vec![attachment]) + .build(); + + let decorators = RequestCredentialV2Decorators::builder() + .thread(thread_id.map(|id| Thread::builder().thid(id).build())) + .build(); + + RequestCredentialV2::builder() + .id(Uuid::new_v4().to_string()) + .content(content) + .decorators(decorators) + .build() +} + +/// Represents a type-state machine which walks through issue-credential-v2 from the Holder +/// perspective. https://github.com/hyperledger/aries-rfcs/blob/main/features/0453-issue-credential-v2/README.md +/// +/// States in the HolderV2 APIs require knowledge of the credential format being used. As such, this +/// API only supports usage of a single credential format being used throughout a single protocol +/// flow. +/// +/// To indicate which credential format should be used by [HolderV2], an implementation of +/// [HolderCredentialIssuanceFormat] should be used as the generic argument when required. +/// +/// For instance, the following will bootstrap a [HolderV2] into the [ProposalPrepared] state, +/// with the `HyperledgerIndyHolderCredentialIssuanceFormat` format. +/// +/// ```no_run +/// let holder = +/// HolderV2::>::with_proposal( +/// &proposal_input, +/// Some(proposal_preview.clone()), +/// ) +/// .await +/// .unwrap(); +/// ``` +/// +/// For more information about formats, see [HolderCredentialIssuanceFormat] documentation. +pub struct HolderV2 { + state: S, + thread_id: String, +} + +impl HolderV2 { + pub fn from_parts(thread_id: String, state: S) -> Self { + Self { state, thread_id } + } + + pub fn into_parts(self) -> (String, S) { + (self.thread_id, self.state) + } + + /// Get the thread ID that is being used for this protocol instance. + pub fn get_thread_id(&self) -> &str { + &self.thread_id + } + + pub fn get_state(&self) -> &S { + &self.state + } +} + +impl HolderV2> { + /// Initiate a new [HolderV2] by preparing a proposal message from the provided input for + /// creating a proposal with the choosen [HolderCredentialIssuanceFormat]. + /// + /// Additionally, a [CredentialPreviewV2] can be provided to attach more proposal information + /// in the proposal message payload. + pub async fn with_proposal( + input_data: &T::CreateProposalInput, + preview: Option, + ) -> VcxResult { + let attachment_data = T::create_proposal_attachment_content(input_data).await?; + let proposal = create_proposal_message_from_attachment::(attachment_data, preview, None); + + Ok(HolderV2 { + thread_id: get_thread_id_or_message_id!(proposal), + state: ProposalPrepared { + proposal, + _marker: PhantomData, + }, + }) + } + + /// Get the prepared proposal message which should be sent to the issuer. + pub fn get_proposal(&self) -> &ProposeCredentialV2 { + &self.state.proposal + } + + /// Receive an incoming [OfferCredentialV2] message for this protocol. On success, the + /// [HolderV2] transitions into the [OfferReceived] state. + /// + /// This API should only be used for offers which are in response to an ongoing [HolderV2] + /// protocol thread. New offers should be received via [HolderV2::from_offer]. + /// + /// In the event of failure, an error is returned which contains the reason for failure + /// and the state machine before any transitions. Consumers should decide whether the failure + /// is terminal, in which case they should prepare a problem report. + pub fn receive_offer( + self, + offer: OfferCredentialV2, + ) -> VcxSMTransitionResult>, Self> { + let is_match = offer + .decorators + .thread + .as_ref() + .map_or(false, |t| t.thid == self.thread_id); + if !is_match { + return Err(RecoveredSMError { + error: unmatched_thread_id_error(offer.into(), &self.thread_id), + state_machine: self, + }); + } + + let new_state = OfferReceived { + offer, + _marker: PhantomData, + }; + + Ok(HolderV2 { + state: new_state, + thread_id: self.thread_id, + }) + } +} + +impl HolderV2> { + /// Initialize a [HolderV2] protocol from a new incoming [OfferCredentialV2] message. + /// + /// The [HolderCredentialIssuanceFormat] used during initialization should be suitable for + /// the attachments within the [OfferCredentialV2] message, or else the [HolderV2] will not + /// be able to transition forward without failure. + /// + /// This API should only be used for offers which are initializing a NEW issue-credential-v2 + /// thread. [OfferCredentialV2] messages which are in response to an ongoing protocol thread + /// should be handled via [HolderV2::receive_offer]. + pub fn from_offer(offer: OfferCredentialV2) -> Self { + Self { + thread_id: get_thread_id_or_message_id!(offer), + state: OfferReceived { + offer, + _marker: PhantomData, + }, + } + } + + /// Get the details and credential preview of the offer that was received. The returned + /// [HolderCredentialIssuanceFormat::OfferDetails] data will contain data specific to the + /// format being used. + pub fn get_offer_details(&self) -> VcxResult<(T::OfferDetails, &CredentialPreviewV2)> { + let details = T::extract_offer_details(&self.state.offer)?; + let preview = &self.state.offer.content.credential_preview; + + Ok((details, preview)) + } + + /// Respond to an offer by preparing a new proposal. This API can be used repeatedly to + /// negotiate the offer with the issuer until an agreement is reached. + /// + /// A proposal is prepared in the format of [HolderCredentialIssuanceFormat], using the provided + /// input data to create it. Additionally, a [CredentialPreviewV2] can be attached to give + /// further details to the issuer about the proposal. + /// + /// In the event of failure, an error is returned which contains the reason for failure + /// and the state machine before any transitions. Consumers should decide whether the failure + /// is terminal, in which case they should prepare a problem report. + pub async fn prepare_proposal( + self, + input_data: &T::CreateProposalInput, + preview: Option, + ) -> VcxSMTransitionResult>, Self> { + let attachment_data = match T::create_proposal_attachment_content(input_data).await { + Ok(msg) => msg, + Err(error) => { + return Err(RecoveredSMError { + error, + state_machine: self, + }) + } + }; + let proposal = create_proposal_message_from_attachment::( + attachment_data, + preview, + Some(self.thread_id.clone()), + ); + + Ok(HolderV2 { + state: ProposalPrepared { + proposal, + _marker: PhantomData, + }, + thread_id: self.thread_id, + }) + } + + /// Respond to an offer by preparing a request (to accept the offer). The request is prepared in + /// the format of [HolderCredentialIssuanceFormat] using the input data to create it. If the + /// request is successfully prepared, the [HolderV2] will transition to [RequestPrepared] where + /// the request message can be sent. + /// + /// In the event of failure, an error is returned which contains the reason for failure + /// and the state machine before any transitions. Consumers should decide whether the failure + /// is terminal, in which case they should prepare a problem report. + pub async fn prepare_credential_request( + self, + input_data: &T::CreateRequestInput, + ) -> VcxSMTransitionResult>, Self> { + let offer_message = &self.state.offer; + + let (attachment_data, output_metadata) = + match T::create_request_attachment_content(offer_message, input_data).await { + Ok((data, meta)) => (data, meta), + Err(error) => { + return Err(RecoveredSMError { + error, + state_machine: self, + }) + } + }; + + let request = create_request_message_from_attachment::( + attachment_data, + Some(self.thread_id.clone()), + ); + + let new_state = RequestPrepared { + request_preparation_metadata: output_metadata, + request, + }; + + Ok(HolderV2 { + state: new_state, + thread_id: self.thread_id, + }) + } +} + +impl HolderV2> { + /// Initialize a [HolderV2] by preparing a request. This API should only be used to create + /// standalone requests that are not in response to an ongoing protocol thread (i.e. in + /// response to an offer). + /// + /// To create a request in response to an ongoing protocol thread, the + /// [HolderV2::prepare_credential_request] method should be used. + /// + /// The request is prepared in the [HolderCredentialIssuanceFormat] using the input data to + /// create it. Note that the [HolderCredentialIssuanceFormat] MUST support standalone request + /// creation for this function to succeed, some formats (such as hlindy or anoncreds) do not + /// support this. + pub async fn with_request( + input_data: &T::CreateRequestInput, + ) -> VcxResult>> { + let (attachment_data, output_metadata) = + T::create_request_attachment_content_independent_of_offer(input_data).await?; + + let request = create_request_message_from_attachment::(attachment_data, None); + + let thread_id = get_thread_id_or_message_id!(request); + + let new_state = RequestPrepared { + request_preparation_metadata: output_metadata, + request, + }; + + Ok(HolderV2 { + thread_id, + state: new_state, + }) + } + + /// Get the prepared request message which should be sent to the issuer. + pub fn get_request(&self) -> &RequestCredentialV2 { + &self.state.request + } + + /// Receive a credential in response to a request message that was sent to the issuer. + /// The received credential is processed and stored in accordance to the + /// [HolderCredentialIssuanceFormat] being used. + /// + /// In the event of failure, an error is returned which contains the reason for failure + /// and the state machine before any transitions. Consumers should decide whether the failure + /// is terminal, in which case they should prepare a problem report. + pub async fn receive_credential( + self, + credential: IssueCredentialV2, + input_data: &T::StoreCredentialInput, + ) -> VcxSMTransitionResult>, Self> { + let is_match = matches_thread_id!(credential, self.thread_id.as_str()); + if !is_match { + return Err(RecoveredSMError { + error: unmatched_thread_id_error(credential.into(), &self.thread_id), + state_machine: self, + }); + } + let credential_received_metadata = match T::process_and_store_credential( + &credential, + input_data, + &self.state.request_preparation_metadata, + ) + .await + { + Ok(data) => data, + Err(error) => { + return Err(RecoveredSMError { + error, + state_machine: self, + }) + } + }; + + let new_state = CredentialReceived { + credential, + stored_credential_metadata: credential_received_metadata, + }; + Ok(HolderV2 { + state: new_state, + thread_id: self.thread_id, + }) + } +} + +impl HolderV2> { + /// Get details about the credential that was received and stored. + /// The details are specific to the [HolderCredentialIssuanceFormat] being used. + pub fn get_stored_credential_metadata(&self) -> &T::StoredCredentialMetadata { + &self.state.stored_credential_metadata + } + + // TODO - consider enum variants for (HolderV2, HoldverV2) + /// Transition into the [Complete] state, by preparing an Ack message, only if required. + pub fn prepare_ack_if_required(self) -> HolderV2> { + let should_ack = self.state.credential.decorators.please_ack.is_some(); + + let ack = if should_ack { + Some( + AckCredentialV2::builder() + .id(uuid::Uuid::new_v4().to_string()) + .content(AckContent::builder().status(AckStatus::Ok).build()) + .decorators( + AckDecorators::builder() + .thread(Thread::builder().thid(self.thread_id.clone()).build()) + .build(), + ) + .build(), + ) + } else { + None + }; + HolderV2 { + state: Complete { + ack, + _marker: PhantomData, + }, + thread_id: self.thread_id, + } + } +} + +impl HolderV2> { + /// Get the prepared Ack message in response to the received credential, ready to be sent to the + /// issuer. If an acknowledgement is not required by the issuer, then [None] is returned and + /// nothing is required to be sent. + pub fn get_ack(&self) -> Option<&AckCredentialV2> { + self.state.ack.as_ref() + } +} + +impl HolderV2 { + /// Get the prepared [CredIssuanceProblemReportV2] to be sent to the issuer to report a failure. + pub fn get_problem_report(&self) -> &CredIssuanceProblemReportV2 { + &self.state.problem_report + } +} + +impl HolderV2 { + /// Transition into the [Failed] state by preparing a problem report message for the issuer. + /// The problem report message is generated by using details from the provided [Error]. + pub fn prepare_problem_report_with_error(self, err: &E) -> HolderV2 + where + E: Error, + { + let content = ProblemReportContent::builder() + .description(Description::builder().code(err.to_string()).build()) + .build(); + + let decorators = ProblemReportDecorators::builder() + .thread(Thread::builder().thid(self.thread_id.clone()).build()) + .build(); + + let report = CredIssuanceProblemReportV2::builder() + .id(Uuid::new_v4().to_string()) + .content(content) + .decorators(decorators) + .build(); + + let new_state = Failed { + problem_report: report, + }; + + HolderV2 { + state: new_state, + thread_id: self.thread_id, + } + } +} + +#[cfg(test)] +mod tests { + use base64::{engine::general_purpose, Engine}; + use messages::decorators::attachment::AttachmentType; + use shared_vcx::maybe_known::MaybeKnown; + + use crate::protocols::issuance_v2::{ + formats::holder::mocks::MockHolderCredentialIssuanceFormat, + holder::{states::proposal_prepared::ProposalPrepared, HolderV2}, + }; + + #[tokio::test] + async fn test_with_proposal_creates_message_with_attachments() { + // note synchronization issues. might need to just set this once globally and use constant + // data + let ctx = MockHolderCredentialIssuanceFormat::create_proposal_attachment_content_context(); + + ctx.expect() + .returning(|_| Ok(String::from("data").into_bytes())); + + let ctx2 = MockHolderCredentialIssuanceFormat::get_proposal_attachment_format_context(); + ctx2.expect() + .returning(|| MaybeKnown::Unknown(String::from("format"))); + + let holder = + HolderV2::>::with_proposal( + &String::from("in"), + None, + ) + .await + .unwrap(); + + let proposal = holder.get_proposal(); + + let formats = proposal.content.formats.clone(); + let attachments = proposal.content.filters_attach.clone(); + + assert_eq!(formats.len(), 1); + assert_eq!(attachments.len(), 1); + + assert_eq!(formats[0].attach_id, attachments[0].id.clone().unwrap()); + assert_eq!( + formats[0].format, + MaybeKnown::Unknown(String::from("format")) + ); + + let AttachmentType::Base64(b64_content) = attachments[0].data.content.clone() else { + panic!("wrong attachment type") + }; + + let decoded = general_purpose::URL_SAFE.decode(&b64_content).unwrap(); + + assert_eq!(String::from_utf8(decoded).unwrap(), String::from("data")); + } + + // TODO - unit test all when we're happy with the layout +} diff --git a/aries_vcx/src/protocols/issuance_v2/holder/states/complete.rs b/aries_vcx/src/protocols/issuance_v2/holder/states/complete.rs new file mode 100644 index 0000000000..f8f1b3ffd8 --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/holder/states/complete.rs @@ -0,0 +1,23 @@ +use std::marker::PhantomData; + +use messages::msg_fields::protocols::cred_issuance::v2::ack::AckCredentialV2; + +use crate::protocols::issuance_v2::formats::holder::HolderCredentialIssuanceFormat; + +pub struct Complete { + pub(crate) ack: Option, + pub(crate) _marker: PhantomData, +} + +impl Complete { + pub fn new(ack: Option) -> Self { + Self { + ack, + _marker: PhantomData, + } + } + + pub fn get_ack(&self) -> Option<&AckCredentialV2> { + self.ack.as_ref() + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/holder/states/credential_received.rs b/aries_vcx/src/protocols/issuance_v2/holder/states/credential_received.rs new file mode 100644 index 0000000000..0ded5061e3 --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/holder/states/credential_received.rs @@ -0,0 +1,29 @@ +use messages::msg_fields::protocols::cred_issuance::v2::issue_credential::IssueCredentialV2; + +use crate::protocols::issuance_v2::formats::holder::HolderCredentialIssuanceFormat; + +pub struct CredentialReceived { + #[allow(dead_code)] // `credential` may become used in future + pub(crate) credential: IssueCredentialV2, + pub(crate) stored_credential_metadata: T::StoredCredentialMetadata, +} + +impl CredentialReceived { + pub fn new( + credential: IssueCredentialV2, + stored_credential_metadata: T::StoredCredentialMetadata, + ) -> Self { + Self { + credential, + stored_credential_metadata, + } + } + + pub fn get_credential(&self) -> &IssueCredentialV2 { + &self.credential + } + + pub fn get_stored_credential_metadata(&self) -> &T::StoredCredentialMetadata { + &self.stored_credential_metadata + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/holder/states/failed.rs b/aries_vcx/src/protocols/issuance_v2/holder/states/failed.rs new file mode 100644 index 0000000000..bf8b9c18a9 --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/holder/states/failed.rs @@ -0,0 +1,15 @@ +use messages::msg_fields::protocols::cred_issuance::v2::problem_report::CredIssuanceProblemReportV2; + +pub struct Failed { + pub(crate) problem_report: CredIssuanceProblemReportV2, +} + +impl Failed { + pub fn new(problem_report: CredIssuanceProblemReportV2) -> Self { + Self { problem_report } + } + + pub fn get_problem_report(&self) -> &CredIssuanceProblemReportV2 { + &self.problem_report + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/holder/states/mod.rs b/aries_vcx/src/protocols/issuance_v2/holder/states/mod.rs new file mode 100644 index 0000000000..a6677e9b29 --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/holder/states/mod.rs @@ -0,0 +1,6 @@ +pub mod complete; +pub mod credential_received; +pub mod failed; +pub mod offer_received; +pub mod proposal_prepared; +pub mod request_prepared; diff --git a/aries_vcx/src/protocols/issuance_v2/holder/states/offer_received.rs b/aries_vcx/src/protocols/issuance_v2/holder/states/offer_received.rs new file mode 100644 index 0000000000..76e10a5af6 --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/holder/states/offer_received.rs @@ -0,0 +1,23 @@ +use std::marker::PhantomData; + +use messages::msg_fields::protocols::cred_issuance::v2::offer_credential::OfferCredentialV2; + +use crate::protocols::issuance_v2::formats::holder::HolderCredentialIssuanceFormat; + +pub struct OfferReceived { + pub(crate) offer: OfferCredentialV2, + pub(crate) _marker: PhantomData, +} + +impl OfferReceived { + pub fn new(offer: OfferCredentialV2) -> Self { + Self { + offer, + _marker: PhantomData, + } + } + + pub fn get_offer(&self) -> &OfferCredentialV2 { + &self.offer + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/holder/states/proposal_prepared.rs b/aries_vcx/src/protocols/issuance_v2/holder/states/proposal_prepared.rs new file mode 100644 index 0000000000..fe6ba97558 --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/holder/states/proposal_prepared.rs @@ -0,0 +1,23 @@ +use std::marker::PhantomData; + +use messages::msg_fields::protocols::cred_issuance::v2::propose_credential::ProposeCredentialV2; + +use crate::protocols::issuance_v2::formats::holder::HolderCredentialIssuanceFormat; + +pub struct ProposalPrepared { + pub(crate) proposal: ProposeCredentialV2, + pub(crate) _marker: PhantomData, +} + +impl ProposalPrepared { + pub fn new(proposal: ProposeCredentialV2) -> Self { + Self { + proposal, + _marker: PhantomData, + } + } + + pub fn get_proposal(&self) -> &ProposeCredentialV2 { + &self.proposal + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/holder/states/request_prepared.rs b/aries_vcx/src/protocols/issuance_v2/holder/states/request_prepared.rs new file mode 100644 index 0000000000..5bf4a1a65e --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/holder/states/request_prepared.rs @@ -0,0 +1,28 @@ +use messages::msg_fields::protocols::cred_issuance::v2::request_credential::RequestCredentialV2; + +use crate::protocols::issuance_v2::formats::holder::HolderCredentialIssuanceFormat; + +pub struct RequestPrepared { + pub(crate) request: RequestCredentialV2, + pub(crate) request_preparation_metadata: T::CreatedRequestMetadata, +} + +impl RequestPrepared { + pub fn new( + request: RequestCredentialV2, + request_preparation_metadata: T::CreatedRequestMetadata, + ) -> Self { + Self { + request, + request_preparation_metadata, + } + } + + pub fn get_request(&self) -> &RequestCredentialV2 { + &self.request + } + + pub fn get_request_preparation_metadata(&self) -> &T::CreatedRequestMetadata { + &self.request_preparation_metadata + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/issuer/mod.rs b/aries_vcx/src/protocols/issuance_v2/issuer/mod.rs new file mode 100644 index 0000000000..2db7b0a544 --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/issuer/mod.rs @@ -0,0 +1,571 @@ +pub mod states; + +use std::{error::Error, marker::PhantomData}; + +use messages::{ + decorators::{ + attachment::{Attachment, AttachmentData, AttachmentType}, + please_ack::{AckOn, PleaseAck}, + thread::Thread, + }, + misc::MimeType, + msg_fields::protocols::{ + cred_issuance::v2::{ + ack::AckCredentialV2, + issue_credential::{ + IssueCredentialV2, IssueCredentialV2Content, IssueCredentialV2Decorators, + }, + offer_credential::{ + OfferCredentialV2, OfferCredentialV2Content, OfferCredentialV2Decorators, + }, + problem_report::CredIssuanceProblemReportV2, + propose_credential::ProposeCredentialV2, + request_credential::RequestCredentialV2, + AttachmentFormatSpecifier, CredentialPreviewV2, + }, + report_problem::{Description, ProblemReportContent, ProblemReportDecorators}, + }, +}; +use uuid::Uuid; + +use self::states::{ + complete::Complete, credential_prepared::CredentialPrepared, failed::Failed, + offer_prepared::OfferPrepared, proposal_received::ProposalReceived, + request_received::RequestReceived, +}; +use super::{ + formats::issuer::IssuerCredentialIssuanceFormat, unmatched_thread_id_error, + VcxSMTransitionResult, +}; +use crate::{ + errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}, + handlers::util::{get_thread_id_or_message_id, matches_thread_id}, + protocols::issuance_v2::RecoveredSMError, +}; + +fn create_offer_message_from_attachment( + attachment_data: Vec, + preview: CredentialPreviewV2, + replacement_id: Option, + thread_id: Option, +) -> OfferCredentialV2 { + let attachment_content = AttachmentType::Base64(base64::encode(&attachment_data)); + let attach_id = Uuid::new_v4().to_string(); + let attachment = Attachment::builder() + .id(attach_id.clone()) + .mime_type(MimeType::Json) + .data( + AttachmentData::builder() + .content(attachment_content) + .build(), + ) + .build(); + + let content = OfferCredentialV2Content::builder() + .credential_preview(preview) + .formats(vec![AttachmentFormatSpecifier::builder() + .attach_id(attach_id) + .format(T::get_offer_attachment_format()) + .build()]) + .offers_attach(vec![attachment]) + .replacement_id(replacement_id) + .build(); + + let decorators = OfferCredentialV2Decorators::builder() + .thread(thread_id.map(|id| Thread::builder().thid(id).build())) + .build(); + + OfferCredentialV2::builder() + .id(Uuid::new_v4().to_string()) + .content(content) + .decorators(decorators) + .build() +} + +fn create_credential_message_from_attachment( + attachment_data: Vec, + please_ack: bool, + thread_id: String, + replacement_id: Option, +) -> IssueCredentialV2 { + let attachment_content = AttachmentType::Base64(base64::encode(&attachment_data)); + let attach_id = Uuid::new_v4().to_string(); + let attachment = Attachment::builder() + .id(attach_id.clone()) + .mime_type(MimeType::Json) + .data( + AttachmentData::builder() + .content(attachment_content) + .build(), + ) + .build(); + + let content = IssueCredentialV2Content::builder() + .formats(vec![AttachmentFormatSpecifier::builder() + .attach_id(attach_id) + .format(T::get_credential_attachment_format()) + .build()]) + .credentials_attach(vec![attachment]) + .replacement_id(replacement_id) + .build(); + + let decorators = IssueCredentialV2Decorators::builder() + .thread(Thread::builder().thid(thread_id).build()) + .please_ack(please_ack.then_some(PleaseAck::builder().on(vec![AckOn::Outcome]).build())) + .build(); + + IssueCredentialV2::builder() + .id(Uuid::new_v4().to_string()) + .content(content) + .decorators(decorators) + .build() +} + +/// Represents a type-state machine which walks through issue-credential-v2 from the Issuer +/// perspective. https://github.com/hyperledger/aries-rfcs/blob/main/features/0453-issue-credential-v2/README.md +/// +/// States in the [IssuerV2] APIs require knowledge of the credential format being used. As such, +/// this API only supports usage of a single credential format being used throughout a single +/// protocol flow. +/// +/// To indicate which credential format should be used by [IssuerV2], an implementation of +/// [IssuerCredentialIssuanceFormat] should be used as the generic argument when required. +/// +/// For instance, the following will bootstrap a [IssuerV2] into the [ProposalPrepared] state, +/// with the `HyperledgerIndyIssuerCredentialIssuanceFormat` format. +/// +/// ```no_run +/// let issuer = +/// IssuerV2::>::with_offer( +/// &offer_data, +/// offer_preview, +/// None, +/// ) +/// .await +/// .unwrap(); +/// ``` +/// +/// For more information about formats, see [IssuerCredentialIssuanceFormat] documentation. +pub struct IssuerV2 { + state: S, + thread_id: String, +} + +impl IssuerV2 { + pub fn from_parts(thread_id: String, state: S) -> Self { + Self { state, thread_id } + } + + pub fn into_parts(self) -> (String, S) { + (self.thread_id, self.state) + } + + /// Get the thread ID that is being used for this protocol instance. + pub fn get_thread_id(&self) -> &str { + &self.thread_id + } + + pub fn get_state(&self) -> &S { + &self.state + } +} + +impl IssuerV2> { + /// Initialize a new [IssuerV2] by receiving an incoming [ProposeCredentialV2] message from a + /// holder. + /// + /// The [IssuerCredentialIssuanceFormat] used during initialization should be suitable + /// for the attachments within the [ProposeCredentialV2] message, or else the [IssuerV2] will + /// not be able to transition forward without failure. + /// + /// This API should only be used for standalone proposals that aren't apart of an existing + /// protocol thread. Proposals in response to an ongoing thread should be handled via + /// [HolderV2::receive_proposal]. + pub fn from_proposal(proposal: ProposeCredentialV2) -> Self { + IssuerV2 { + thread_id: get_thread_id_or_message_id!(proposal), + state: ProposalReceived { + proposal, + _marker: PhantomData, + }, + } + } + + /// Get the details and credential preview (if any) of the proposal that was received. The + /// returned [IssuerCredentialIssuanceFormat::ProposalDetails] data will contain data + /// specific to the format being used. + pub fn get_proposal_details( + &self, + ) -> VcxResult<(T::ProposalDetails, Option<&CredentialPreviewV2>)> { + let details = T::extract_proposal_details(&self.state.proposal)?; + let preview = self.state.proposal.content.credential_preview.as_ref(); + + Ok((details, preview)) + } + + /// Respond to a proposal by preparing a new offer. This API can be used repeatedly to negotiate + /// the offer with the holder until an agreement is reached. + /// + /// An offer is prepared in the format of [IssuerCredentialIssuanceFormat], using the provided + /// input data to create it. Additionally, a [CredentialPreviewV2] is attached to give further + /// details to the holder about the offer. + /// + /// In the event of failure, an error is returned which contains the reason for failure + /// and the state machine before any transitions. Consumers should decide whether the failure + /// is terminal, in which case they should prepare a problem report. + pub async fn prepare_offer( + self, + input_data: &T::CreateOfferInput, + preview: CredentialPreviewV2, + replacement_id: Option, + ) -> VcxSMTransitionResult>, Self> { + let (attachment_data, offer_metadata) = + match T::create_offer_attachment_content(input_data).await { + Ok(data) => data, + Err(error) => { + return Err(RecoveredSMError { + error, + state_machine: self, + }) + } + }; + + let offer = create_offer_message_from_attachment::( + attachment_data, + preview, + replacement_id, + Some(self.thread_id.clone()), + ); + + let new_state = OfferPrepared { + offer_metadata, + offer, + }; + + Ok(IssuerV2 { + state: new_state, + thread_id: self.thread_id, + }) + } +} + +impl IssuerV2> { + /// Initiate a new [IssuerV2] by preparing a offer message from the provided input for + /// creating a offer with the choosen [IssuerCredentialIssuanceFormat]. + /// + /// Additionally, a [CredentialPreviewV2] is provided to attach more credential information + /// in the offer message payload. + pub async fn with_offer( + input_data: &T::CreateOfferInput, + preview: CredentialPreviewV2, + replacement_id: Option, + ) -> VcxResult { + let (attachment_data, offer_metadata) = + T::create_offer_attachment_content(input_data).await?; + + let offer = create_offer_message_from_attachment::( + attachment_data, + preview, + replacement_id, + None, + ); + + let thread_id = get_thread_id_or_message_id!(offer); + + let new_state = OfferPrepared { + offer_metadata, + offer, + }; + + Ok(IssuerV2 { + state: new_state, + thread_id, + }) + } + + /// Get the prepared offer message which should be sent to the holder. + pub fn get_offer(&self) -> &OfferCredentialV2 { + &self.state.offer + } + + /// Receive an incoming [ProposeCredentialV2] message for this protocol. On success, the + /// [IssuerV2] transitions into the [ProposalReceived] state. + /// + /// This API should only be used for proposals which are in response to an ongoing [IssuerV2] + /// protocol thread. New proposals should be received via [IssuerV2::from_proposal]. + /// + /// In the event of failure, an error is returned which contains the reason for failure + /// and the state machine before any transitions. Consumers should decide whether the failure + /// is terminal, in which case they should prepare a problem report. + pub fn receive_proposal( + self, + proposal: ProposeCredentialV2, + ) -> VcxSMTransitionResult>, Self> { + let is_match = proposal + .decorators + .thread + .as_ref() + .map_or(false, |t| t.thid == self.thread_id); + if !is_match { + return Err(RecoveredSMError { + error: unmatched_thread_id_error(proposal.into(), &self.thread_id), + state_machine: self, + }); + } + + let new_state = ProposalReceived { + proposal, + _marker: PhantomData, + }; + + Ok(IssuerV2 { + state: new_state, + thread_id: self.thread_id, + }) + } + + /// Receive a request in response to an offer that was sent to the holder. + /// + /// This API should only be used for requests that are in response to an ongoing [IssuerV2] + /// protocol thread. To receive new standalone requests, [IssuerV2::from_request] should be + /// used. + /// + /// In the event of failure, an error is returned which contains the reason for failure + /// and the state machine before any transitions. Consumers should decide whether the failure + /// is terminal, in which case they should prepare a problem report. + pub fn receive_request( + self, + request: RequestCredentialV2, + ) -> VcxSMTransitionResult>, Self> { + let is_match = request + .decorators + .thread + .as_ref() + .map_or(false, |t| t.thid == self.thread_id); + if !is_match { + return Err(RecoveredSMError { + error: unmatched_thread_id_error(request.into(), &self.thread_id), + state_machine: self, + }); + } + + let new_state = RequestReceived { + from_offer_metadata: Some(self.state.offer_metadata), + request, + }; + + Ok(IssuerV2 { + state: new_state, + thread_id: self.thread_id, + }) + } +} + +impl IssuerV2> { + /// Initialize an [IssuerV2] by receiving a standalone request message from a holder. This API + /// should only be used for standalone requests not in response to an ongoing protocol thread. + /// + /// To receive a request in response to an ongoing protocol thread, the + /// [IssuerV2::receive_request] method should be used. + /// + /// The request should contain an attachment in the suitable [IssuerCredentialIssuanceFormat] + /// format, and the [IssuerCredentialIssuanceFormat] MUST support receiving standalone requests + /// for this function to succeed. Some formats (such as hlindy or anoncreds) do not + /// support this. + pub fn from_request(request: RequestCredentialV2) -> VcxResult { + if !T::supports_request_independent_of_offer() { + return Err(AriesVcxError::from_msg( + AriesVcxErrorKind::ActionNotSupported, + "Receiving a request independent of an offer is unsupported for this format", + )); + } + + let thread_id = get_thread_id_or_message_id!(request); + + let new_state = RequestReceived { + from_offer_metadata: None, + request, + }; + + Ok(Self { + state: new_state, + thread_id, + }) + } + + /// Prepare a credential message in response to a received request. The prepared credential will + /// be in the [IssuerCredentialIssuanceFormat] format, and will be created using the associated + /// input data. + /// + /// Additionally other flags can be attached to the prepared message for the holder. Notably: + /// * `please_ack` - whether the holder should acknowledge that they receive the credential + /// * `replacement_id` - a unique ID which can be used across credential issuances to indicate + /// that this credential should effectively 'replace' the last credential that this issuer + /// issued to them with the same `replacement_id`. + /// + /// In the event of failure, an error is returned which contains the reason for failure + /// and the state machine before any transitions. Consumers should decide whether the failure + /// is terminal, in which case they should prepare a problem report. + pub async fn prepare_credential( + self, + input_data: &T::CreateCredentialInput, + please_ack: Option, // defaults to false + replacement_id: Option, + ) -> VcxSMTransitionResult>, Self> { + let request = &self.state.request; + + let res = match &self.state.from_offer_metadata { + Some(offer) => { + T::create_credential_attachment_content(offer, request, input_data).await + } + None => { + T::create_credential_attachment_content_independent_of_offer(request, input_data) + .await + } + }; + + let (attachment_data, cred_metadata) = match res { + Ok(data) => data, + Err(error) => { + return Err(RecoveredSMError { + error, + state_machine: self, + }) + } + }; + + let please_ack = please_ack.unwrap_or(false); + let credential = create_credential_message_from_attachment::( + attachment_data, + please_ack, + self.thread_id.clone(), + replacement_id, + ); + + let new_state = CredentialPrepared { + from_offer_metadata: self.state.from_offer_metadata, + credential_metadata: cred_metadata, + credential, + please_ack, + }; + + Ok(IssuerV2 { + state: new_state, + thread_id: self.thread_id, + }) + } +} + +impl IssuerV2> { + /// Get the prepared credential message which should be sent to the holder. + pub fn get_credential(&self) -> &IssueCredentialV2 { + &self.state.credential + } + + /// Get details about the credential that was prepared. + /// The details are specific to the [IssuerCredentialIssuanceFormat] being used. + pub fn get_credential_creation_metadata(&self) -> &T::CreatedCredentialMetadata { + &self.state.credential_metadata + } + + /// Whether or not this [IssuerV2] is expecting an Ack message to complete. + pub fn is_expecting_ack(&self) -> bool { + self.state.please_ack + } + + /// Transition into a completed state without receiving an ack message from the holder. + /// + /// In the case where the [IssuerV2] was expecting an ack, this method will fail. + /// + /// In the event of failure, an error is returned which contains the reason for failure + /// and the state machine before any transitions. Consumers should decide whether the failure + /// is terminal, in which case they should prepare a problem report. + pub fn complete_without_ack(self) -> VcxSMTransitionResult>, Self> { + if self.is_expecting_ack() { + return Err(RecoveredSMError { + error: AriesVcxError::from_msg( + AriesVcxErrorKind::ActionNotSupported, + "Cannot transition until ACK is received", + ), + state_machine: self, + }); + } + + let new_state = Complete { + ack: None, + _marker: PhantomData, + }; + + Ok(IssuerV2 { + state: new_state, + thread_id: self.thread_id, + }) + } + + /// Transition into a completed state by receiving an incoming ack message from the holder. + /// + /// In the event of failure, an error is returned which contains the reason for failure + /// and the state machine before any transitions. Consumers should decide whether the failure + /// is terminal, in which case they should prepare a problem report. + pub fn complete_with_ack( + self, + ack: AckCredentialV2, + ) -> VcxSMTransitionResult>, Self> { + let is_match = matches_thread_id!(ack, self.thread_id.as_str()); + if !is_match { + return Err(RecoveredSMError { + error: unmatched_thread_id_error(ack.into(), &self.thread_id), + state_machine: self, + }); + } + + let new_state = Complete { + ack: Some(ack), + _marker: PhantomData, + }; + + Ok(IssuerV2 { + state: new_state, + thread_id: self.thread_id, + }) + } +} + +impl IssuerV2 { + /// Get the prepared [CredIssuanceProblemReportV2] to be sent to the holder to report a failure. + pub fn get_problem_report(&self) -> &CredIssuanceProblemReportV2 { + &self.state.problem_report + } +} + +impl IssuerV2 { + /// Transition into the [Failed] state by preparing a problem report message for the holder. + /// The problem report message is generated by using details from the provided [Error]. + pub fn prepare_problem_report_with_error(self, err: &E) -> IssuerV2 + where + E: Error, + { + let content = ProblemReportContent::builder() + .description(Description::builder().code(err.to_string()).build()) + .build(); + + let decorators = ProblemReportDecorators::builder() + .thread(Thread::builder().thid(self.thread_id.clone()).build()) + .build(); + + let report = CredIssuanceProblemReportV2::builder() + .id(Uuid::new_v4().to_string()) + .content(content) + .decorators(decorators) + .build(); + + let new_state = Failed { + problem_report: report, + }; + + IssuerV2 { + state: new_state, + thread_id: self.thread_id, + } + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/issuer/states/complete.rs b/aries_vcx/src/protocols/issuance_v2/issuer/states/complete.rs new file mode 100644 index 0000000000..648d393095 --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/issuer/states/complete.rs @@ -0,0 +1,23 @@ +use std::marker::PhantomData; + +use messages::msg_fields::protocols::cred_issuance::v2::ack::AckCredentialV2; + +use crate::protocols::issuance_v2::formats::issuer::IssuerCredentialIssuanceFormat; + +pub struct Complete { + pub(crate) ack: Option, + pub(crate) _marker: PhantomData, +} + +impl Complete { + pub fn new(ack: Option) -> Self { + Self { + ack, + _marker: PhantomData, + } + } + + pub fn get_ack(&self) -> Option<&AckCredentialV2> { + self.ack.as_ref() + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/issuer/states/credential_prepared.rs b/aries_vcx/src/protocols/issuance_v2/issuer/states/credential_prepared.rs new file mode 100644 index 0000000000..6e7a47a3ce --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/issuer/states/credential_prepared.rs @@ -0,0 +1,42 @@ +use messages::msg_fields::protocols::cred_issuance::v2::issue_credential::IssueCredentialV2; + +use crate::protocols::issuance_v2::formats::issuer::IssuerCredentialIssuanceFormat; + +pub struct CredentialPrepared { + pub(crate) from_offer_metadata: Option, + pub(crate) credential_metadata: T::CreatedCredentialMetadata, + pub(crate) credential: IssueCredentialV2, + pub(crate) please_ack: bool, +} + +impl CredentialPrepared { + pub fn new( + from_offer_metadata: Option, + credential_metadata: T::CreatedCredentialMetadata, + credential: IssueCredentialV2, + please_ack: bool, + ) -> Self { + Self { + from_offer_metadata, + credential_metadata, + credential, + please_ack, + } + } + + pub fn get_from_offer_metadata(&self) -> Option<&T::CreatedOfferMetadata> { + self.from_offer_metadata.as_ref() + } + + pub fn get_credential_metadata(&self) -> &T::CreatedCredentialMetadata { + &self.credential_metadata + } + + pub fn get_credential(&self) -> &IssueCredentialV2 { + &self.credential + } + + pub fn get_please_ack(&self) -> bool { + self.please_ack + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/issuer/states/failed.rs b/aries_vcx/src/protocols/issuance_v2/issuer/states/failed.rs new file mode 100644 index 0000000000..bf8b9c18a9 --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/issuer/states/failed.rs @@ -0,0 +1,15 @@ +use messages::msg_fields::protocols::cred_issuance::v2::problem_report::CredIssuanceProblemReportV2; + +pub struct Failed { + pub(crate) problem_report: CredIssuanceProblemReportV2, +} + +impl Failed { + pub fn new(problem_report: CredIssuanceProblemReportV2) -> Self { + Self { problem_report } + } + + pub fn get_problem_report(&self) -> &CredIssuanceProblemReportV2 { + &self.problem_report + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/issuer/states/mod.rs b/aries_vcx/src/protocols/issuance_v2/issuer/states/mod.rs new file mode 100644 index 0000000000..79b173bcdf --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/issuer/states/mod.rs @@ -0,0 +1,6 @@ +pub mod complete; +pub mod credential_prepared; +pub mod failed; +pub mod offer_prepared; +pub mod proposal_received; +pub mod request_received; diff --git a/aries_vcx/src/protocols/issuance_v2/issuer/states/offer_prepared.rs b/aries_vcx/src/protocols/issuance_v2/issuer/states/offer_prepared.rs new file mode 100644 index 0000000000..d8511fe45e --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/issuer/states/offer_prepared.rs @@ -0,0 +1,25 @@ +use messages::msg_fields::protocols::cred_issuance::v2::offer_credential::OfferCredentialV2; + +use crate::protocols::issuance_v2::formats::issuer::IssuerCredentialIssuanceFormat; + +pub struct OfferPrepared { + pub(crate) offer_metadata: T::CreatedOfferMetadata, + pub(crate) offer: OfferCredentialV2, +} + +impl OfferPrepared { + pub fn new(offer_metadata: T::CreatedOfferMetadata, offer: OfferCredentialV2) -> Self { + Self { + offer_metadata, + offer, + } + } + + pub fn get_offer_metadata(&self) -> &T::CreatedOfferMetadata { + &self.offer_metadata + } + + pub fn get_offer(&self) -> &OfferCredentialV2 { + &self.offer + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/issuer/states/proposal_received.rs b/aries_vcx/src/protocols/issuance_v2/issuer/states/proposal_received.rs new file mode 100644 index 0000000000..989c518c0b --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/issuer/states/proposal_received.rs @@ -0,0 +1,23 @@ +use std::marker::PhantomData; + +use messages::msg_fields::protocols::cred_issuance::v2::propose_credential::ProposeCredentialV2; + +use crate::protocols::issuance_v2::formats::issuer::IssuerCredentialIssuanceFormat; + +pub struct ProposalReceived { + pub(crate) proposal: ProposeCredentialV2, + pub(crate) _marker: PhantomData, +} + +impl ProposalReceived { + pub fn new(proposal: ProposeCredentialV2) -> Self { + Self { + proposal, + _marker: PhantomData, + } + } + + pub fn get_proposal(&self) -> &ProposeCredentialV2 { + &self.proposal + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/issuer/states/request_received.rs b/aries_vcx/src/protocols/issuance_v2/issuer/states/request_received.rs new file mode 100644 index 0000000000..8399dcc1c4 --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/issuer/states/request_received.rs @@ -0,0 +1,28 @@ +use messages::msg_fields::protocols::cred_issuance::v2::request_credential::RequestCredentialV2; + +use crate::protocols::issuance_v2::formats::issuer::IssuerCredentialIssuanceFormat; + +pub struct RequestReceived { + pub(crate) from_offer_metadata: Option, + pub(crate) request: RequestCredentialV2, +} + +impl RequestReceived { + pub fn new( + from_offer_metadata: Option, + request: RequestCredentialV2, + ) -> Self { + Self { + from_offer_metadata, + request, + } + } + + pub fn get_from_offer_metadata(&self) -> Option<&T::CreatedOfferMetadata> { + self.from_offer_metadata.as_ref() + } + + pub fn get_request(&self) -> &RequestCredentialV2 { + &self.request + } +} diff --git a/aries_vcx/src/protocols/issuance_v2/mod.rs b/aries_vcx/src/protocols/issuance_v2/mod.rs new file mode 100644 index 0000000000..4b083ab43e --- /dev/null +++ b/aries_vcx/src/protocols/issuance_v2/mod.rs @@ -0,0 +1,35 @@ +use messages::AriesMessage; + +use crate::errors::error::{AriesVcxError, AriesVcxErrorKind}; + +pub mod formats; +pub mod holder; +pub mod issuer; + +// TODO - better name? +pub struct RecoveredSMError { + pub error: AriesVcxError, + pub state_machine: T, +} + +impl std::fmt::Debug for RecoveredSMError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("RecoveredSMError") + .field("error", &self.error) + .finish() + } +} + +// TODO - impl Error for RecoveredSMError? + +type VcxSMTransitionResult = Result>; + +fn unmatched_thread_id_error(msg: AriesMessage, expected_thid: &str) -> AriesVcxError { + AriesVcxError::from_msg( + AriesVcxErrorKind::InvalidJson, + format!( + "Cannot handle message {:?}: thread id does not match, expected {:?}", + msg, expected_thid + ), + ) +} diff --git a/aries_vcx/src/protocols/mediated_connection/invitee/state_machine.rs b/aries_vcx/src/protocols/mediated_connection/invitee/state_machine.rs index bf5b7f5663..18676f57d9 100644 --- a/aries_vcx/src/protocols/mediated_connection/invitee/state_machine.rs +++ b/aries_vcx/src/protocols/mediated_connection/invitee/state_machine.rs @@ -1,4 +1,4 @@ -use std::{clone::Clone, collections::HashMap, sync::Arc}; +use std::{clone::Clone, collections::HashMap}; use aries_vcx_core::wallet::base_wallet::BaseWallet; use chrono::Utc; @@ -340,7 +340,7 @@ impl SmConnectionInvitee { self, routing_keys: Vec, service_endpoint: Url, - send_message: SendClosureConnection, + send_message: SendClosureConnection<'_>, ) -> VcxResult { let (state, thread_id) = match self.state { InviteeFullState::Invited(ref state) => { @@ -372,9 +372,9 @@ impl SmConnectionInvitee { pub async fn handle_connection_response( self, - wallet: &Arc, + wallet: &impl BaseWallet, response: Response, - send_message: SendClosureConnection, + send_message: SendClosureConnection<'_>, ) -> VcxResult { verify_thread_id(&self.get_thread_id(), &response.clone().into())?; @@ -456,7 +456,7 @@ impl SmConnectionInvitee { Ok(Self { state, ..self }) } - pub async fn handle_send_ack(self, send_message: SendClosureConnection) -> VcxResult { + pub async fn handle_send_ack(self, send_message: SendClosureConnection<'_>) -> VcxResult { let state = match self.state { InviteeFullState::Responded(ref state) => { let sender_vk = self.pairwise_info().pw_vk.clone(); diff --git a/aries_vcx/src/protocols/mediated_connection/inviter/state_machine.rs b/aries_vcx/src/protocols/mediated_connection/inviter/state_machine.rs index 12162d183c..ec1a61f65d 100644 --- a/aries_vcx/src/protocols/mediated_connection/inviter/state_machine.rs +++ b/aries_vcx/src/protocols/mediated_connection/inviter/state_machine.rs @@ -1,4 +1,4 @@ -use std::{clone::Clone, collections::HashMap, sync::Arc}; +use std::{clone::Clone, collections::HashMap}; use aries_vcx_core::wallet::base_wallet::BaseWallet; use chrono::Utc; @@ -241,12 +241,12 @@ impl SmConnectionInviter { pub async fn handle_connection_request<'a>( self, - wallet: &'a Arc, + wallet: &'a impl BaseWallet, request: Request, new_pairwise_info: &'a PairwiseInfo, new_routing_keys: Vec, new_service_endpoint: Url, - send_message: SendClosureConnection, + send_message: SendClosureConnection<'_>, ) -> VcxResult { if !matches!(self.state, InviterFullState::Initial(_)) { verify_thread_id(&self.get_thread_id(), &request.clone().into())?; @@ -324,7 +324,7 @@ impl SmConnectionInviter { pub async fn handle_send_response( self, - send_message: SendClosureConnection, + send_message: SendClosureConnection<'_>, ) -> VcxResult { let state = match self.state { InviterFullState::Requested(state) => { @@ -368,7 +368,7 @@ impl SmConnectionInviter { async fn build_response( &self, - wallet: &Arc, + wallet: &impl BaseWallet, thread_id: String, new_pairwise_info: &PairwiseInfo, new_routing_keys: Vec, diff --git a/aries_vcx/src/protocols/mod.rs b/aries_vcx/src/protocols/mod.rs index ef020becf9..853ba26f27 100644 --- a/aries_vcx/src/protocols/mod.rs +++ b/aries_vcx/src/protocols/mod.rs @@ -7,16 +7,18 @@ use crate::errors::error::VcxResult; pub mod common; pub mod connection; pub mod issuance; +pub mod issuance_v2; pub mod mediated_connection; pub mod oob; pub mod proof_presentation; pub mod revocation_notification; pub mod trustping; -pub type SendClosure = - Box BoxFuture<'static, VcxResult<()>> + Send + Sync>; -pub type SendClosureConnection = Box< - dyn FnOnce(AriesMessage, String, AriesDidDoc) -> BoxFuture<'static, VcxResult<()>> +pub type SendClosure<'a> = + Box BoxFuture<'a, VcxResult<()>> + Send + Sync + 'a>; +pub type SendClosureConnection<'a> = Box< + dyn FnOnce(AriesMessage, String, AriesDidDoc) -> BoxFuture<'a, VcxResult<()>> + Send - + Sync, + + Sync + + 'a, >; diff --git a/aries_vcx/src/protocols/proof_presentation/prover/state_machine.rs b/aries_vcx/src/protocols/proof_presentation/prover/state_machine.rs index a0a3bc830a..07318b0cd0 100644 --- a/aries_vcx/src/protocols/proof_presentation/prover/state_machine.rs +++ b/aries_vcx/src/protocols/proof_presentation/prover/state_machine.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, fmt, sync::Arc}; +use std::{collections::HashMap, fmt}; use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, @@ -229,8 +229,8 @@ impl ProverSM { pub async fn generate_presentation( self, - ledger: &Arc, - anoncreds: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, credentials: SelectedCredentials, self_attested_attrs: HashMap, ) -> VcxResult { diff --git a/aries_vcx/src/protocols/proof_presentation/prover/states/presentation_request_received.rs b/aries_vcx/src/protocols/proof_presentation/prover/states/presentation_request_received.rs index 70fe4125d8..ae9fcd4cd9 100644 --- a/aries_vcx/src/protocols/proof_presentation/prover/states/presentation_request_received.rs +++ b/aries_vcx/src/protocols/proof_presentation/prover/states/presentation_request_received.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, sync::Arc}; +use std::collections::HashMap; use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, @@ -13,7 +13,7 @@ use messages::msg_fields::protocols::{ use uuid::Uuid; use crate::{ - common::proofs::prover::prover::generate_indy_proof, + common::proofs::prover::generate_indy_proof, errors::error::prelude::*, handlers::{ proof_presentation::types::SelectedCredentials, @@ -56,8 +56,8 @@ impl PresentationRequestReceived { pub async fn build_presentation( &self, - ledger: &Arc, - anoncreds: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, credentials: &SelectedCredentials, self_attested_attrs: &HashMap, ) -> VcxResult { diff --git a/aries_vcx/src/protocols/proof_presentation/verifier/state_machine.rs b/aries_vcx/src/protocols/proof_presentation/verifier/state_machine.rs index a3a0789ac5..513cfc4584 100644 --- a/aries_vcx/src/protocols/proof_presentation/verifier/state_machine.rs +++ b/aries_vcx/src/protocols/proof_presentation/verifier/state_machine.rs @@ -1,4 +1,4 @@ -use std::{fmt::Display, sync::Arc}; +use std::fmt::Display; use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, @@ -255,8 +255,8 @@ impl VerifierSM { pub async fn verify_presentation<'a>( self, - ledger: &'a Arc, - anoncreds: &'a Arc, + ledger: &'a impl AnoncredsLedgerRead, + anoncreds: &'a impl BaseAnonCreds, presentation: Presentation, ) -> VcxResult { verify_thread_id( @@ -282,9 +282,7 @@ impl VerifierSM { (state, presentation, PresentationVerificationStatus::Invalid) .into(), ), - _ => { - VerifierFullState::Finished((state, problem_report.clone()).into()) - } + _ => VerifierFullState::Finished((state, problem_report).into()), } } } diff --git a/aries_vcx/src/protocols/proof_presentation/verifier/states/presentation_request_sent.rs b/aries_vcx/src/protocols/proof_presentation/verifier/states/presentation_request_sent.rs index f7f3036c47..3b18743f14 100644 --- a/aries_vcx/src/protocols/proof_presentation/verifier/states/presentation_request_sent.rs +++ b/aries_vcx/src/protocols/proof_presentation/verifier/states/presentation_request_sent.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, ledger::base_ledger::AnoncredsLedgerRead, }; @@ -9,9 +7,8 @@ use messages::msg_fields::protocols::{ }; use crate::{ - common::proofs::verifier::verifier::validate_indy_proof, + common::proofs::verifier::validate_indy_proof, errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}, - global::settings, handlers::util::{get_attach_as_string, matches_thread_id, Status}, protocols::proof_presentation::verifier::{ states::finished::FinishedState, verification_status::PresentationVerificationStatus, @@ -26,12 +23,12 @@ pub struct PresentationRequestSentState { impl PresentationRequestSentState { pub async fn verify_presentation( &self, - ledger: &Arc, - anoncreds: &Arc, + ledger: &impl AnoncredsLedgerRead, + anoncreds: &impl BaseAnonCreds, presentation: &Presentation, thread_id: &str, ) -> VcxResult<()> { - if !settings::indy_mocks_enabled() && !matches_thread_id!(presentation, thread_id) { + if !matches_thread_id!(presentation, thread_id) { return Err(AriesVcxError::from_msg( AriesVcxErrorKind::InvalidJson, format!( diff --git a/aries_vcx/src/protocols/revocation_notification/mod.rs b/aries_vcx/src/protocols/revocation_notification/mod.rs index b327fa9f4d..9252096adb 100644 --- a/aries_vcx/src/protocols/revocation_notification/mod.rs +++ b/aries_vcx/src/protocols/revocation_notification/mod.rs @@ -14,7 +14,7 @@ pub mod test_utils { use crate::{errors::error::VcxResult, protocols::SendClosure, utils::constants::REV_REG_ID}; - pub fn _send_message() -> SendClosure { + pub fn _send_message() -> SendClosure<'static> { Box::new(|_: AriesMessage| Box::pin(async { VcxResult::Ok(()) })) } diff --git a/aries_vcx/src/protocols/revocation_notification/receiver/state_machine.rs b/aries_vcx/src/protocols/revocation_notification/receiver/state_machine.rs index f3adf33f9f..28f0d23a3a 100644 --- a/aries_vcx/src/protocols/revocation_notification/receiver/state_machine.rs +++ b/aries_vcx/src/protocols/revocation_notification/receiver/state_machine.rs @@ -39,7 +39,7 @@ pub enum ReceiverFullState { impl RevocationNotificationReceiverSM { pub fn create(rev_reg_id: String, cred_rev_id: String) -> Self { Self { - state: ReceiverFullState::Initial(InitialState::new()), + state: ReceiverFullState::Initial(InitialState), rev_reg_id, cred_rev_id, } @@ -70,7 +70,7 @@ impl RevocationNotificationReceiverSM { pub async fn handle_revocation_notification( self, notification: Revoke, - send_message: SendClosure, + send_message: SendClosure<'_>, ) -> VcxResult { let state = match self.state { ReceiverFullState::Initial(_) => { @@ -129,7 +129,7 @@ impl RevocationNotificationReceiverSM { Ok(Self { state, ..self }) } - pub async fn send_ack(self, send_message: SendClosure) -> VcxResult { + pub async fn send_ack(self, send_message: SendClosure<'_>) -> VcxResult { let state = match self.state { ReceiverFullState::NotificationReceived(_) | ReceiverFullState::Finished(_) => { let notification = self.get_notification()?; @@ -250,7 +250,7 @@ pub mod test_utils { RevocationNotificationReceiverSM::create(_rev_reg_id(), _cred_rev_id()) } - pub fn _send_message_but_fail() -> SendClosure { + pub fn _send_message_but_fail() -> SendClosure<'static> { Box::new(|_: AriesMessage| { Box::pin(async { Err(AriesVcxError::from_msg( diff --git a/aries_vcx/src/protocols/revocation_notification/receiver/states/initial.rs b/aries_vcx/src/protocols/revocation_notification/receiver/states/initial.rs index 57c1641a6a..1f8325ee03 100644 --- a/aries_vcx/src/protocols/revocation_notification/receiver/states/initial.rs +++ b/aries_vcx/src/protocols/revocation_notification/receiver/states/initial.rs @@ -1,8 +1,2 @@ #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -pub struct InitialState {} - -impl InitialState { - pub fn new() -> Self { - Self {} - } -} +pub struct InitialState; diff --git a/aries_vcx/src/protocols/revocation_notification/sender/state_machine.rs b/aries_vcx/src/protocols/revocation_notification/sender/state_machine.rs index dac51a9579..b515714f4c 100644 --- a/aries_vcx/src/protocols/revocation_notification/sender/state_machine.rs +++ b/aries_vcx/src/protocols/revocation_notification/sender/state_machine.rs @@ -42,7 +42,7 @@ pub struct SenderConfig { impl RevocationNotificationSenderSM { pub fn create() -> Self { Self { - state: SenderFullState::Initial(InitialState::new()), + state: SenderFullState::Initial(InitialState), } } @@ -68,7 +68,11 @@ impl RevocationNotificationSenderSM { } } - pub async fn send(self, config: SenderConfig, send_message: SendClosure) -> VcxResult { + pub async fn send( + self, + config: SenderConfig, + send_message: SendClosure<'_>, + ) -> VcxResult { let state = match self.state { SenderFullState::Initial(_) | SenderFullState::NotificationSent(_) => { let SenderConfig { diff --git a/aries_vcx/src/protocols/revocation_notification/sender/states/initial.rs b/aries_vcx/src/protocols/revocation_notification/sender/states/initial.rs index 57c1641a6a..1f8325ee03 100644 --- a/aries_vcx/src/protocols/revocation_notification/sender/states/initial.rs +++ b/aries_vcx/src/protocols/revocation_notification/sender/states/initial.rs @@ -1,8 +1,2 @@ #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -pub struct InitialState {} - -impl InitialState { - pub fn new() -> Self { - Self {} - } -} +pub struct InitialState; diff --git a/aries_vcx/src/utils/devsetup.rs b/aries_vcx/src/utils/devsetup.rs index c455e132be..1e01db48a6 100644 --- a/aries_vcx/src/utils/devsetup.rs +++ b/aries_vcx/src/utils/devsetup.rs @@ -8,14 +8,7 @@ use std::{ use agency_client::testing::mocking::{enable_agency_mocks, AgencyMockDecrypted}; use aries_vcx_core::{ - global::settings::{ - disable_indy_mocks as disable_indy_mocks_core, enable_indy_mocks as enable_indy_mocks_core, - reset_config_values_ariesvcxcore, - }, - ledger::{ - base_ledger::{AnoncredsLedgerRead, AnoncredsLedgerWrite, IndyLedgerRead, IndyLedgerWrite}, - indy::pool::test_utils::{create_testpool_genesis_txn_file, get_temp_file_path}, - }, + ledger::indy::pool::test_utils::{create_testpool_genesis_txn_file, get_temp_file_path}, wallet::indy::{ did_mocks::DidMocks, wallet::{create_and_open_wallet, create_and_store_my_did}, @@ -25,39 +18,16 @@ use aries_vcx_core::{ }; use chrono::{DateTime, Duration, Utc}; -#[cfg(feature = "modular_libs")] +#[cfg(all(feature = "credx", feature = "vdrtools_wallet"))] use crate::core::profile::modular_libs_profile::ModularLibsProfile; -#[cfg(feature = "vdrtools")] -use crate::core::profile::vdrtools_profile::VdrtoolsProfile; +#[cfg(feature = "vdr_proxy_ledger")] +use crate::core::profile::vdr_proxy_profile::VdrProxyProfile; use crate::{ - core::profile::{ - ledger::{build_ledger_components, VcxPoolConfig}, - profile::Profile, - }, - global::{ - settings, - settings::{ - aries_vcx_disable_indy_mocks, aries_vcx_enable_indy_mocks, init_issuer_config, - reset_config_values_ariesvcx, set_config_value, CONFIG_INSTITUTION_DID, DEFAULT_DID, - }, - }, - utils::{ - constants::{POOL1_TXN, TRUSTEE_SEED}, - file::write_file, - test_logger::LibvcxDefaultLogger, - }, + core::profile::Profile, + global::settings, + utils::{constants::POOL1_TXN, file::write_file, test_logger::LibvcxDefaultLogger}, }; -#[macro_export] -macro_rules! assert_match { - ($pattern:pat, $var:expr) => { - assert!(match $var { - $pattern => true, - _ => false, - }) - }; -} - lazy_static! { static ref TEST_LOGGING_INIT: Once = Once::new(); } @@ -68,25 +38,20 @@ pub fn init_test_logging() { }) } -pub fn create_new_seed() -> String { - let x = rand::random::(); - format!("{x:032}") -} - pub struct SetupEmpty; pub struct SetupDefaults; -pub struct SetupMocks {} +pub struct SetupMocks; pub const AGENCY_ENDPOINT: &str = "http://localhost:8080"; pub const AGENCY_DID: &str = "VsKV7grR1BUE29mG2Fm2kX"; pub const AGENCY_VERKEY: &str = "Hezce2UWMZ3wUhVkh2LfKSs8nDzWwzs2Win7EzNN3YaR"; #[derive(Clone)] -pub struct SetupProfile { +pub struct SetupProfile { pub institution_did: String, - pub profile: Arc, + pub profile: P, pub genesis_file_path: String, } @@ -98,10 +63,6 @@ pub fn reset_global_state() { warn!("reset_global_state >>"); AgencyMockDecrypted::clear_mocks(); DidMocks::clear_mocks(); - aries_vcx_disable_indy_mocks().unwrap(); - disable_indy_mocks_core().unwrap(); - reset_config_values_ariesvcx().unwrap(); - reset_config_values_ariesvcxcore().unwrap() } impl SetupEmpty { @@ -134,9 +95,6 @@ impl SetupMocks { pub fn init() -> SetupMocks { init_test_logging(); enable_agency_mocks(); - aries_vcx_enable_indy_mocks().unwrap(); - enable_indy_mocks_core().unwrap(); - set_config_value(CONFIG_INSTITUTION_DID, DEFAULT_DID).unwrap(); SetupMocks {} } } @@ -164,123 +122,129 @@ pub async fn dev_setup_wallet_indy(key_seed: &str) -> (String, WalletHandle) { let (did, _vk) = create_and_store_my_did(wallet_handle, Some(key_seed), None) .await .unwrap(); - // todo: can we remove following line completely? - init_issuer_config(&did).unwrap(); - (did, wallet_handle) -} - -#[cfg(feature = "vdrtools")] -pub fn dev_build_profile_vdrtools( - genesis_file_path: String, - wallet: Arc, -) -> Arc { - info!("dev_build_profile_vdrtools >>"); - let vcx_pool_config = VcxPoolConfig { - genesis_file_path: genesis_file_path.clone(), - indy_vdr_config: None, - response_cache_config: None, - }; - let (ledger_read, ledger_write) = - build_ledger_components(wallet.clone(), vcx_pool_config).unwrap(); - let anoncreds_ledger_read: Arc = ledger_read.clone(); - let anoncreds_ledger_write: Arc = ledger_write.clone(); - let indy_ledger_read: Arc = ledger_read.clone(); - let indy_ledger_write: Arc = ledger_write.clone(); - Arc::new(VdrtoolsProfile::init( - wallet.clone(), - anoncreds_ledger_read, - anoncreds_ledger_write, - indy_ledger_read, - indy_ledger_write, - )) + (did, wallet_handle) } -#[cfg(feature = "modular_libs")] +#[cfg(all(feature = "credx", feature = "vdrtools_wallet"))] pub fn dev_build_profile_modular( genesis_file_path: String, wallet: Arc, -) -> Arc { +) -> ModularLibsProfile { info!("dev_build_profile_modular >>"); - let vcx_pool_config = VcxPoolConfig { - genesis_file_path: genesis_file_path.clone(), + let vcx_pool_config = crate::core::profile::ledger::VcxPoolConfig { + genesis_file_path, indy_vdr_config: None, response_cache_config: None, }; - Arc::new(ModularLibsProfile::init(wallet, vcx_pool_config).unwrap()) + ModularLibsProfile::init(wallet, vcx_pool_config).unwrap() } #[cfg(feature = "vdr_proxy_ledger")] -pub async fn dev_build_profile_vdr_proxy_ledger(wallet: Arc) -> Arc { +pub async fn dev_build_profile_vdr_proxy_ledger(wallet: Arc) -> VdrProxyProfile { use std::env; use aries_vcx_core::VdrProxyClient; - use crate::core::profile::vdr_proxy_profile::VdrProxyProfile; info!("dev_build_profile_vdr_proxy_ledger >>"); let client_url = env::var("VDR_PROXY_CLIENT_URL").unwrap_or_else(|_| "http://127.0.0.1:3030".to_string()); let client = VdrProxyClient::new(&client_url).unwrap(); - Arc::new(VdrProxyProfile::init(wallet, client).await.unwrap()) + VdrProxyProfile::init(wallet, client).await.unwrap() } #[allow(unreachable_code)] +#[allow(unused_variables)] pub async fn dev_build_featured_profile( genesis_file_path: String, wallet: Arc, -) -> Arc { - // In case of migration test setup, we are starting with vdrtools, then we migrate - #[cfg(feature = "migration")] - return { - info!("SetupProfile >> using indy profile"); - dev_build_profile_vdrtools(genesis_file_path, wallet) - }; - #[cfg(feature = "modular_libs")] - return { - info!("SetupProfile >> using modular profile"); - dev_build_profile_modular(genesis_file_path, wallet) - }; +) -> impl Profile { #[cfg(feature = "vdr_proxy_ledger")] return { info!("SetupProfile >> using vdr proxy profile"); dev_build_profile_vdr_proxy_ledger(wallet).await }; - #[cfg(feature = "vdrtools")] + + #[cfg(all( + feature = "credx", + feature = "vdrtools_wallet", + not(feature = "vdr_proxy_ledger") + ))] return { - info!("SetupProfile >> using indy profile"); - dev_build_profile_vdrtools(genesis_file_path, wallet) + info!("SetupProfile >> using modular profile"); + dev_build_profile_modular(genesis_file_path, wallet) }; -} -impl SetupProfile { - pub async fn run(f: impl FnOnce(Self) -> F) - where - F: Future, - { - init_test_logging(); - - let genesis_file_path = get_temp_file_path(POOL1_TXN).to_str().unwrap().to_string(); - create_testpool_genesis_txn_file(&genesis_file_path); + #[cfg(not(any( + all(feature = "credx", feature = "vdrtools_wallet"), + feature = "vdr_proxy_ledger" + )))] + super::mockdata::profile::mock_profile::MockProfile +} - let (public_did, wallet_handle) = dev_setup_wallet_indy(TRUSTEE_SEED).await; - let wallet = Arc::new(IndySdkWallet::new(wallet_handle)); - let profile = dev_build_featured_profile(genesis_file_path.clone(), wallet).await; +#[macro_export] +macro_rules! run_setup { + ($func:expr) => {{ + use aries_vcx_core::anoncreds::base_anoncreds::BaseAnonCreds; + use $crate::core::profile::Profile; + + $crate::utils::devsetup::init_test_logging(); + + let genesis_file_path = aries_vcx_core::ledger::indy::pool::test_utils::get_temp_file_path( + $crate::utils::constants::POOL1_TXN, + ) + .to_str() + .unwrap() + .to_string(); + aries_vcx_core::ledger::indy::pool::test_utils::create_testpool_genesis_txn_file( + &genesis_file_path, + ); + + let (public_did, wallet_handle) = + $crate::utils::devsetup::dev_setup_wallet_indy($crate::utils::constants::TRUSTEE_SEED) + .await; + let wallet = std::sync::Arc::new(aries_vcx_core::wallet::indy::IndySdkWallet::new( + wallet_handle, + )); + let profile = + $crate::utils::devsetup::dev_build_featured_profile(genesis_file_path.clone(), wallet) + .await; profile - .inject_anoncreds() - .prover_create_link_secret(settings::DEFAULT_LINK_SECRET_ALIAS) + .anoncreds() + .prover_create_link_secret(aries_vcx_core::global::settings::DEFAULT_LINK_SECRET_ALIAS) .await .unwrap(); + $crate::utils::devsetup::SetupProfile::new( + public_did.to_string(), + profile, + genesis_file_path, + ) + .await + .run($func) + }}; +} + +impl

SetupProfile

+where + P: Profile, +{ + pub async fn new(institution_did: String, profile: P, genesis_file_path: String) -> Self { debug!("genesis_file_path: {}", genesis_file_path); - let setup = SetupProfile { - institution_did: public_did.to_string(), + SetupProfile { + institution_did, profile, genesis_file_path, - }; + } + } - f(setup).await; + pub async fn run(self, f: impl FnOnce(Self) -> F) + where + F: Future, + { + f(self).await; reset_global_state(); } } diff --git a/aries_vcx/src/utils/encryption_envelope.rs b/aries_vcx/src/utils/encryption_envelope.rs index f5e32a85d6..d62907ae7a 100644 --- a/aries_vcx/src/utils/encryption_envelope.rs +++ b/aries_vcx/src/utils/encryption_envelope.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use agency_client::testing::mocking::AgencyMockDecrypted; use aries_vcx_core::wallet::base_wallet::BaseWallet; use diddoc_legacy::aries::diddoc::AriesDidDoc; @@ -10,15 +8,17 @@ use messages::{ }; use uuid::Uuid; -use crate::{errors::error::prelude::*, global::settings, utils::constants}; +use crate::{errors::error::prelude::*, utils::constants}; #[derive(Debug)] pub struct EncryptionEnvelope(pub Vec); impl EncryptionEnvelope { + /// Create an Encryption Envelope from a plaintext AriesMessage encoded as sequence of bytes. + /// If did_doc includes routing_keys, then also wrap in appropriate layers of forward message. pub async fn create( - wallet: &Arc, - message: &AriesMessage, + wallet: &impl BaseWallet, + message: &[u8], pw_verkey: Option<&str>, did_doc: &AriesDidDoc, ) -> VcxResult { @@ -29,10 +29,6 @@ impl EncryptionEnvelope { did_doc ); - if settings::indy_mocks_enabled() { - return Ok(EncryptionEnvelope(vec![])); - } - EncryptionEnvelope::encrypt_for_pairwise(wallet, message, pw_verkey, did_doc) .and_then(|message| async move { EncryptionEnvelope::wrap_into_forward_messages(wallet, message, did_doc).await @@ -42,13 +38,11 @@ impl EncryptionEnvelope { } async fn encrypt_for_pairwise( - wallet: &Arc, - message: &AriesMessage, + wallet: &impl BaseWallet, + message: &[u8], pw_verkey: Option<&str>, did_doc: &AriesDidDoc, ) -> VcxResult> { - let message = json!(message).to_string(); - let receiver_keys = json!(did_doc.recipient_keys()?).to_string(); debug!( @@ -57,13 +51,13 @@ impl EncryptionEnvelope { ); wallet - .pack_message(pw_verkey, &receiver_keys, message.as_bytes()) + .pack_message(pw_verkey, &receiver_keys, message) .await .map_err(|err| err.into()) } async fn wrap_into_forward_messages( - wallet: &Arc, + wallet: &impl BaseWallet, mut message: Vec, did_doc: &AriesDidDoc, ) -> VcxResult> { @@ -88,7 +82,7 @@ impl EncryptionEnvelope { } async fn wrap_into_forward( - wallet: &Arc, + wallet: &impl BaseWallet, message: Vec, to: &str, routing_key: &str, @@ -113,7 +107,7 @@ impl EncryptionEnvelope { } async fn _unpack_a2a_message( - wallet: &Arc, + wallet: &impl BaseWallet, payload: Vec, ) -> VcxResult<(String, Option)> { trace!( @@ -123,30 +117,16 @@ impl EncryptionEnvelope { let unpacked_msg = wallet.unpack_message(&payload).await?; - let msg_value: serde_json::Value = serde_json::from_slice(unpacked_msg.as_slice()) - .map_err(|err| { - AriesVcxError::from_msg( - AriesVcxErrorKind::InvalidJson, - format!("Cannot deserialize message: {}", err), - ) - })?; + let sender_vk = unpacked_msg.sender_verkey; - let sender_vk = msg_value["sender_verkey"].as_str().map(String::from); - - let msg_string = msg_value["message"] - .as_str() - .ok_or(AriesVcxError::from_msg( - AriesVcxErrorKind::InvalidJson, - "Cannot find `message` field", - ))? - .to_string(); + let msg_string = unpacked_msg.message; Ok((msg_string, sender_vk)) } // todo: we should use auth_unpack wherever possible pub async fn anon_unpack( - wallet: &Arc, + wallet: &impl BaseWallet, payload: Vec, ) -> VcxResult<(AriesMessage, Option)> { trace!( @@ -172,7 +152,7 @@ impl EncryptionEnvelope { } pub async fn auth_unpack( - wallet: &Arc, + wallet: &impl BaseWallet, payload: Vec, expected_vk: &str, ) -> VcxResult { diff --git a/aries_vcx/src/utils/mockdata/profile/mock_profile.rs b/aries_vcx/src/utils/mockdata/profile/mock_profile.rs index 88aa616ba0..357a9421d4 100644 --- a/aries_vcx/src/utils/mockdata/profile/mock_profile.rs +++ b/aries_vcx/src/utils/mockdata/profile/mock_profile.rs @@ -1,17 +1,8 @@ -use std::sync::Arc; - -use aries_vcx_core::{ - anoncreds::base_anoncreds::BaseAnonCreds, - ledger::base_ledger::{ - AnoncredsLedgerRead, AnoncredsLedgerWrite, IndyLedgerRead, IndyLedgerWrite, - TxnAuthrAgrmtOptions, - }, - wallet::{base_wallet::BaseWallet, mock_wallet::MockWallet}, -}; +use aries_vcx_core::{ledger::base_ledger::TxnAuthrAgrmtOptions, wallet::mock_wallet::MockWallet}; use async_trait::async_trait; use super::{mock_anoncreds::MockAnoncreds, mock_ledger::MockLedger}; -use crate::{core::profile::profile::Profile, errors::error::VcxResult}; +use crate::{core::profile::Profile, errors::error::VcxResult}; /// Implementation of a [Profile] which uses [MockLedger], [MockAnoncreds] and [MockWallet] to /// return mock data for all Profile methods. Only for unit testing purposes @@ -20,28 +11,25 @@ pub struct MockProfile; #[async_trait] impl Profile for MockProfile { - fn inject_indy_ledger_read(&self) -> Arc { - Arc::new(MockLedger {}) - } - - fn inject_indy_ledger_write(&self) -> Arc { - Arc::new(MockLedger {}) - } + type LedgerRead = MockLedger; + type LedgerWrite = MockLedger; + type Anoncreds = MockAnoncreds; + type Wallet = MockWallet; - fn inject_anoncreds(&self) -> Arc { - Arc::new(MockAnoncreds {}) + fn ledger_read(&self) -> &Self::LedgerRead { + &MockLedger } - fn inject_anoncreds_ledger_read(&self) -> Arc { - Arc::new(MockLedger {}) + fn ledger_write(&self) -> &Self::LedgerWrite { + &MockLedger } - fn inject_anoncreds_ledger_write(&self) -> Arc { - Arc::new(MockLedger {}) + fn anoncreds(&self) -> &Self::Anoncreds { + &MockAnoncreds } - fn inject_wallet(&self) -> Arc { - Arc::new(MockWallet {}) + fn wallet(&self) -> &Self::Wallet { + &MockWallet } fn update_taa_configuration(&self, _taa_options: TxnAuthrAgrmtOptions) -> VcxResult<()> { diff --git a/aries_vcx/src/utils/mod.rs b/aries_vcx/src/utils/mod.rs index 71007aa636..f6586aa221 100644 --- a/aries_vcx/src/utils/mod.rs +++ b/aries_vcx/src/utils/mod.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use aries_vcx_core::wallet::base_wallet::BaseWallet; use diddoc_legacy::aries::diddoc::AriesDidDoc; use messages::AriesMessage; @@ -10,7 +8,7 @@ use crate::{ }; #[macro_use] -#[cfg(feature = "vdrtools")] +#[cfg(feature = "vdrtools_wallet")] pub mod devsetup; #[cfg(debug_assertions)] @@ -34,7 +32,6 @@ pub mod constants; pub mod file; pub mod mockdata; pub mod openssl; -#[cfg(feature = "vdrtools")] pub mod provision; pub mod qualifier; pub mod random; @@ -48,7 +45,7 @@ pub mod serialization; pub mod validation; pub async fn send_message( - wallet: Arc, + wallet: &impl BaseWallet, sender_verkey: String, did_doc: AriesDidDoc, message: AriesMessage, @@ -58,8 +55,14 @@ pub async fn send_message( message, &did_doc ); - let EncryptionEnvelope(envelope) = - EncryptionEnvelope::create(&wallet, &message, Some(&sender_verkey), &did_doc).await?; + + let EncryptionEnvelope(envelope) = EncryptionEnvelope::create( + wallet, + json!(message).to_string().as_bytes(), + Some(&sender_verkey), + &did_doc, + ) + .await?; // TODO: Extract from agency client agency_client::httpclient::post_message( @@ -73,7 +76,7 @@ pub async fn send_message( } pub async fn send_message_anonymously( - wallet: Arc, + wallet: &impl BaseWallet, did_doc: &AriesDidDoc, message: &AriesMessage, ) -> VcxResult<()> { @@ -83,7 +86,8 @@ pub async fn send_message_anonymously( &did_doc ); let EncryptionEnvelope(envelope) = - EncryptionEnvelope::create(&wallet, message, None, did_doc).await?; + EncryptionEnvelope::create(wallet, json!(message).to_string().as_bytes(), None, did_doc) + .await?; agency_client::httpclient::post_message( envelope, diff --git a/aries_vcx/src/utils/openssl.rs b/aries_vcx/src/utils/openssl.rs index 904d9acab7..10865562d9 100644 --- a/aries_vcx/src/utils/openssl.rs +++ b/aries_vcx/src/utils/openssl.rs @@ -3,6 +3,7 @@ use sha2::{Digest, Sha256}; use crate::errors::error::prelude::*; +// TODO - does not have to be a result... pub fn encode(s: &str) -> VcxResult { match s.parse::() { Ok(val) => Ok(val.to_string()), diff --git a/aries_vcx/src/utils/provision.rs b/aries_vcx/src/utils/provision.rs index b5e8f6d331..85af15080d 100644 --- a/aries_vcx/src/utils/provision.rs +++ b/aries_vcx/src/utils/provision.rs @@ -12,7 +12,7 @@ use crate::errors::error::prelude::*; pub async fn provision_cloud_agent( client: &mut AgencyClient, - wallet: Arc, + wallet: Arc, provision_config: &AgentProvisionConfig, ) -> VcxResult { let seed = provision_config.agent_seed.as_deref(); diff --git a/aries_vcx/src/utils/random.rs b/aries_vcx/src/utils/random.rs index 053665d6ae..5f70a2d076 100644 --- a/aries_vcx/src/utils/random.rs +++ b/aries_vcx/src/utils/random.rs @@ -1,24 +1,33 @@ use rand::{distributions::Alphanumeric, Rng}; pub fn generate_random_schema_name() -> String { - rand::thread_rng() - .sample_iter(&Alphanumeric) - .take(25) - .collect::() + String::from_utf8( + rand::thread_rng() + .sample_iter(&Alphanumeric) + .take(25) + .collect(), + ) + .unwrap() } pub fn generate_random_name() -> String { - rand::thread_rng() - .sample_iter(&Alphanumeric) - .take(25) - .collect::() + String::from_utf8( + rand::thread_rng() + .sample_iter(&Alphanumeric) + .take(25) + .collect(), + ) + .unwrap() } pub fn generate_random_seed() -> String { - rand::thread_rng() - .sample_iter(&Alphanumeric) - .take(32) - .collect::() + String::from_utf8( + rand::thread_rng() + .sample_iter(&Alphanumeric) + .take(32) + .collect(), + ) + .unwrap() } pub fn generate_random_schema_version() -> String { diff --git a/aries_vcx/tests/test_connection.rs b/aries_vcx/tests/test_connection.rs index 0599bfa18e..15be5b549b 100644 --- a/aries_vcx/tests/test_connection.rs +++ b/aries_vcx/tests/test_connection.rs @@ -6,6 +6,7 @@ pub mod utils; use aries_vcx::{ common::ledger::transactions::write_endpoint_legacy, + core::profile::Profile, protocols::{connection::GenericConnection, mediated_connection::pairwise_info::PairwiseInfo}, utils::{devsetup::*, encryption_envelope::EncryptionEnvelope}, }; @@ -48,13 +49,13 @@ fn build_basic_message(content: String) -> BasicMessage { .build() } -async fn decrypt_message( - consumer: &TestAgent, +async fn decrypt_message( + consumer: &TestAgent

, received: Vec, consumer_to_institution: &GenericConnection, ) -> AriesMessage { EncryptionEnvelope::auth_unpack( - &consumer.profile.inject_wallet(), + consumer.profile.wallet(), received, &consumer_to_institution.remote_vk().unwrap(), ) @@ -62,30 +63,28 @@ async fn decrypt_message( .unwrap() } -async fn send_and_receive_message( - consumer: &TestAgent, - insitution: &TestAgent, +async fn send_and_receive_message( + consumer: &TestAgent, + insitution: &TestAgent, institution_to_consumer: &GenericConnection, consumer_to_institution: &GenericConnection, message: &AriesMessage, ) -> AriesMessage { let encrypted_message = institution_to_consumer - .encrypt_message(&insitution.profile.inject_wallet(), message) + .encrypt_message(insitution.profile.wallet(), message) .await .unwrap() .0; decrypt_message(consumer, encrypted_message, consumer_to_institution).await } -async fn create_service(faber: &TestAgent) { - let pairwise_info = PairwiseInfo::create(&faber.profile.inject_wallet()) - .await - .unwrap(); +async fn create_service(faber: &TestAgent

) { + let pairwise_info = PairwiseInfo::create(faber.profile.wallet()).await.unwrap(); let service = AriesService::create() .set_service_endpoint("http://dummy.org".parse().unwrap()) .set_recipient_keys(vec![pairwise_info.pw_vk.clone()]); write_endpoint_legacy( - &faber.profile.inject_indy_ledger_write(), + faber.profile.ledger_write(), &faber.institution_did, &service, ) diff --git a/aries_vcx/tests/test_credential_issuance.rs b/aries_vcx/tests/test_credential_issuance.rs index 4eb25875b0..6f8b85da6c 100644 --- a/aries_vcx/tests/test_credential_issuance.rs +++ b/aries_vcx/tests/test_credential_issuance.rs @@ -11,8 +11,6 @@ use aries_vcx::{ utils::devsetup::*, }; -#[cfg(feature = "migration")] -use crate::utils::migration::Migratable; use crate::utils::{ scenarios::{ accept_credential_proposal, accept_offer, create_address_schema_creddef_revreg, @@ -45,11 +43,6 @@ async fn test_agency_pool_double_issuance_issuer_is_verifier() { ) .await; - // NOTE: Credx-anoncreds-implementation-generated presentation is not compatible with - // vdrtools anoncreds implementation as the presentation fails to deserialize - // #[cfg(feature = "migration")] - // consumer.migrate().await; - let verifier = exchange_proof( &mut institution, &mut consumer, @@ -63,9 +56,6 @@ async fn test_agency_pool_double_issuance_issuer_is_verifier() { PresentationVerificationStatus::Valid ); - #[cfg(feature = "migration")] - institution.migrate().await; - let verifier = exchange_proof( &mut institution, &mut consumer, @@ -84,6 +74,7 @@ async fn test_agency_pool_double_issuance_issuer_is_verifier() { #[tokio::test] #[ignore] +#[allow(unused_mut)] async fn test_agency_pool_two_creds_one_rev_reg() { SetupPoolDirectory::run(|setup| async move { let mut issuer = create_test_agent_trustee(setup.genesis_file_path.clone()).await; @@ -103,9 +94,6 @@ async fn test_agency_pool_two_creds_one_rev_reg() { ) .await; - #[cfg(feature = "migration")] - issuer.migrate().await; - let _credential_handle2 = exchange_credential( &mut consumer, &mut issuer, @@ -116,9 +104,6 @@ async fn test_agency_pool_two_creds_one_rev_reg() { ) .await; - #[cfg(feature = "migration")] - verifier.migrate().await; - let verifier_handler = exchange_proof( &mut verifier, &mut consumer, @@ -132,9 +117,6 @@ async fn test_agency_pool_two_creds_one_rev_reg() { PresentationVerificationStatus::Valid ); - #[cfg(feature = "migration")] - consumer.migrate().await; - let verifier_handler = exchange_proof( &mut verifier, &mut consumer, @@ -153,6 +135,7 @@ async fn test_agency_pool_two_creds_one_rev_reg() { #[tokio::test] #[ignore] +#[allow(unused_mut)] async fn test_agency_pool_credential_exchange_via_proposal() { SetupPoolDirectory::run(|setup| async move { let mut institution = create_test_agent_trustee(setup.genesis_file_path.clone()).await; @@ -164,9 +147,6 @@ async fn test_agency_pool_credential_exchange_via_proposal() { ) .await; - #[cfg(feature = "migration")] - institution.migrate().await; - exchange_credential_with_proposal( &mut consumer, &mut institution, @@ -183,6 +163,7 @@ async fn test_agency_pool_credential_exchange_via_proposal() { #[tokio::test] #[ignore] +#[allow(unused_mut)] async fn test_agency_pool_credential_exchange_via_proposal_failed() { SetupPoolDirectory::run(|setup| async move { let mut institution = create_test_agent_trustee(setup.genesis_file_path.clone()).await; @@ -199,9 +180,6 @@ async fn test_agency_pool_credential_exchange_via_proposal_failed() { let mut holder = create_holder_from_proposal(cred_proposal.clone()); let mut issuer = create_issuer_from_proposal(cred_proposal.clone()); - #[cfg(feature = "migration")] - institution.migrate().await; - let cred_offer = accept_credential_proposal( &mut institution, &mut issuer, @@ -224,6 +202,7 @@ async fn test_agency_pool_credential_exchange_via_proposal_failed() { // TODO: Maybe duplicates test_agency_pool_credential_exchange_via_proposal #[tokio::test] #[ignore] +#[allow(unused_mut)] async fn test_agency_pool_credential_exchange_via_proposal_with_negotiation() { SetupPoolDirectory::run(|setup| async move { let mut institution = create_test_agent_trustee(setup.genesis_file_path.clone()).await; @@ -235,17 +214,11 @@ async fn test_agency_pool_credential_exchange_via_proposal_with_negotiation() { ) .await; - #[cfg(feature = "migration")] - institution.migrate().await; - let cred_proposal = create_credential_proposal(&schema.schema_id, &cred_def.get_cred_def_id(), "comment"); let mut holder = create_holder_from_proposal(cred_proposal.clone()); let mut issuer = create_issuer_from_proposal(cred_proposal.clone()); - #[cfg(feature = "migration")] - consumer.migrate().await; - let cred_proposal_1 = create_credential_proposal(&schema.schema_id, &cred_def.get_cred_def_id(), "comment"); let cred_offer_1 = accept_credential_proposal( diff --git a/aries_vcx/tests/test_credential_issuance_v2.rs b/aries_vcx/tests/test_credential_issuance_v2.rs new file mode 100644 index 0000000000..20722ce0db --- /dev/null +++ b/aries_vcx/tests/test_credential_issuance_v2.rs @@ -0,0 +1,409 @@ +use std::{collections::HashMap, sync::Arc, time::Duration}; + +use agency_client::httpclient::post_message; +use aries_vcx::{ + common::test_utils::{create_and_write_test_cred_def, create_and_write_test_schema}, + core::profile::{ledger::VcxPoolConfig, modular_libs_profile::ModularLibsProfile, Profile}, + errors::error::VcxResult, + global::settings, + protocols::{ + connection::Connection, + issuance_v2::{ + formats::{ + holder::hyperledger_indy::{ + HyperledgerIndyCreateProposalInput, HyperledgerIndyCreateRequestInput, + HyperledgerIndyCredentialFilterBuilder, + HyperledgerIndyHolderCredentialIssuanceFormat, + HyperledgerIndyStoreCredentialInput, + }, + issuer::hyperledger_indy::{ + HyperledgerIndyCreateCredentialInput, HyperledgerIndyCreateOfferInput, + HyperledgerIndyIssuerCredentialIssuanceFormat, + }, + }, + holder::{ + states::{offer_received::OfferReceived, proposal_prepared::ProposalPrepared}, + HolderV2, + }, + issuer::{states::proposal_received::ProposalReceived, IssuerV2}, + }, + mediated_connection::pairwise_info::PairwiseInfo, + }, + run_setup, + transport::Transport, +}; +use aries_vcx_core::{ + anoncreds::base_anoncreds::BaseAnonCreds, + wallet::{ + base_wallet::BaseWallet, + indy::{wallet::create_and_open_wallet, IndySdkWallet, WalletConfigBuilder}, + }, +}; +use async_trait::async_trait; +use messages::msg_fields::protocols::{ + connection::response::Response, + cred_issuance::{ + common::CredentialAttr, + v2::{ + issue_credential::IssueCredentialV2, offer_credential::OfferCredentialV2, + CredentialPreviewV2, + }, + }, +}; +use serde::de::DeserializeOwned; +use serde_json::{json, Value}; +use url::Url; + +#[tokio::test] +#[ignore] +async fn test_hlindy_non_revocable_credential_issuance_v2_from_proposal() { + run_setup!(|setup| async move { + let anoncreds = setup.profile.anoncreds(); + let ledger_read = setup.profile.ledger_read(); + + let schema = create_and_write_test_schema( + anoncreds, + setup.profile.ledger_write(), + &setup.institution_did, + aries_vcx::utils::constants::DEFAULT_SCHEMA_ATTRS, + ) + .await; + let cred_def = create_and_write_test_cred_def( + anoncreds, + ledger_read, + setup.profile.ledger_write(), + &setup.institution_did, + &schema.schema_id, + false, + ) + .await; + + let proposal_input = HyperledgerIndyCreateProposalInput { + cred_filter: HyperledgerIndyCredentialFilterBuilder::default() + .cred_def_id(cred_def.get_cred_def_id()) + .build() + .unwrap(), + }; + let proposal_preview = CredentialPreviewV2::new(vec![CredentialAttr::builder() + .name(String::from("address")) + .value(String::from("123 Main St")) + .build()]); + let holder = HolderV2::< + ProposalPrepared>, + >::with_proposal(&proposal_input, Some(proposal_preview.clone())) + .await + .unwrap(); + + let proposal_msg = holder.get_proposal().clone(); + + let issuer = IssuerV2::< + ProposalReceived>, + >::from_proposal(proposal_msg); + + // issuer checks details of the proposal + let (received_filter, received_proposal_preview) = issuer.get_proposal_details().unwrap(); + assert_eq!(received_filter, proposal_input.cred_filter); + assert_eq!(received_proposal_preview.unwrap(), &proposal_preview); + + let offer_data = HyperledgerIndyCreateOfferInput { + anoncreds: anoncreds, + cred_def_id: cred_def.get_cred_def_id(), + }; + let offer_preview = CredentialPreviewV2::new(vec![ + CredentialAttr::builder() + .name(String::from("address1")) + .value(String::from("123 Main St")) + .build(), + CredentialAttr::builder() + .name(String::from("address2")) + .value(String::from("Suite 3")) + .build(), + CredentialAttr::builder() + .name(String::from("city")) + .value(String::from("Draper")) + .build(), + CredentialAttr::builder() + .name(String::from("state")) + .value(String::from("UT")) + .build(), + CredentialAttr::builder() + .name(String::from("zip")) + .value(String::from("84000")) + .build(), + ]); + let issuer = issuer + .prepare_offer(&offer_data, offer_preview.clone(), None) + .await + .unwrap(); + + let offer_msg = issuer.get_offer().clone(); + + let holder = holder.receive_offer(offer_msg).unwrap(); + + // holder checks details of the offer + let (received_offer_details, received_offer_preview) = holder.get_offer_details().unwrap(); + assert_eq!( + received_offer_details.cred_def_id, + cred_def.get_cred_def_id() + ); + assert_eq!(received_offer_details.schema_id, cred_def.get_schema_id()); + assert_eq!(received_offer_preview, &offer_preview); + + // usually this would be the DID from the connection, but does not really matter + let pw = PairwiseInfo::create(setup.profile.wallet()).await.unwrap(); + let request_input = HyperledgerIndyCreateRequestInput { + my_pairwise_did: pw.pw_did, + ledger: ledger_read, + anoncreds: anoncreds, + }; + + let holder = holder + .prepare_credential_request(&request_input) + .await + .unwrap(); + + let request_msg = holder.get_request().clone(); + + let issuer = issuer.receive_request(request_msg).unwrap(); + + let cred_data = HyperledgerIndyCreateCredentialInput { + anoncreds: anoncreds, + credential_attributes: HashMap::from([ + (String::from("address1"), String::from("123 Main St")), + ( + String::from("address2"), + String::from( + "Suite + 3", + ), + ), + (String::from("city"), String::from("Draper")), + (String::from("state"), String::from("UT")), + (String::from("zip"), String::from("84000")), + ]), + revocation_info: None, + }; + + let issuer = issuer + .prepare_credential(&cred_data, Some(true), None) + .await + .unwrap(); + let issuer_cred_metadata = issuer.get_credential_creation_metadata().clone(); + + let cred_msg = issuer.get_credential().clone(); + + let receive_input = HyperledgerIndyStoreCredentialInput { + ledger: ledger_read, + anoncreds: anoncreds, + }; + + let holder = holder + .receive_credential(cred_msg, &receive_input) + .await + .unwrap(); + let holder_cred_metadata = holder.get_stored_credential_metadata().clone(); + let holder = holder.prepare_ack_if_required(); + + let ack_msg = holder.get_ack().unwrap().clone(); + + let _issuer = issuer.complete_with_ack(ack_msg); + + // check final states + assert!(issuer_cred_metadata.credential_revocation_id.is_none()); + + let holder_cred_id = holder_cred_metadata.credential_id; + let cred = anoncreds + .prover_get_credential(&holder_cred_id) + .await + .unwrap(); + assert!(!cred.is_empty()); + }) + .await +} + +// TODO -DELETE BELOW +#[tokio::test] +#[ignore] +async fn manual_test_holder_against_acapy() { + let relay_external_endpoint = + String::from("https://fa5b-203-123-120-210.ngrok-free.app/send_user_message/user-123"); + let relay_internal_endpoint = + String::from("https://fa5b-203-123-120-210.ngrok-free.app/pop_user_message/user-123"); + + fn fix_malformed_thread_decorator(msg: &mut Value) { + // remove thread decorator if it is empty (acapy sends it empty) + let Some(thread) = msg.get_mut("~thread") else { + return; + }; + + if thread.as_object().unwrap().is_empty() { + thread.take(); + } + } + + async fn get_next_aries_msg( + relay: &str, + wallet: &impl BaseWallet, + ) -> VcxResult { + let enc_bytes = reqwest::get(relay) + .await + .unwrap() + .bytes() + .await + .unwrap() + .to_vec(); + + let unpacked = wallet.unpack_message(&enc_bytes).await?; + let mut msg = serde_json::from_str(&unpacked.message)?; + fix_malformed_thread_decorator(&mut msg); + Ok(serde_json::from_value(msg)?) + } + + async fn await_next_aries_msg(relay: &str, wallet: &impl BaseWallet) -> T { + loop { + match get_next_aries_msg(relay, wallet).await { + Ok(data) => return data, + Err(e) => println!("failed to fetch msg, trying again: {e:?}"), + } + + std::thread::sleep(Duration::from_millis(500)) + } + } + + let config_wallet = WalletConfigBuilder::default() + .wallet_name("wallet1") + .wallet_key(settings::DEFAULT_WALLET_KEY) + .wallet_key_derivation(settings::WALLET_KDF_RAW) + .build() + .unwrap(); + + let wh = create_and_open_wallet(&config_wallet).await.unwrap(); + let wallet = Arc::new(IndySdkWallet::new(wh)); + let vcx_pool_config = VcxPoolConfig { + genesis_file_path: String::from("/Users/gmulhearne/Documents/dev/rust/aries-vcx/testnet"), + indy_vdr_config: None, + response_cache_config: None, + }; + let profile = ModularLibsProfile::init(wallet, vcx_pool_config).unwrap(); + let wallet = profile.wallet(); + let indy_read = profile.ledger_read(); + let anoncreds_read = profile.ledger_read(); + let anoncreds = profile.anoncreds(); + + anoncreds + .prover_create_link_secret(settings::DEFAULT_LINK_SECRET_ALIAS) + .await + .ok(); + + let pairwise_info = PairwiseInfo::create(wallet).await.unwrap(); + let inviter = Connection::new_invitee(String::from("Mr Vcx"), pairwise_info.clone()); + + // acccept invite + let invitation_json = json!( + { + "@type": "did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/connections/1.0/invitation", + "@id": "ade68e30-6880-47e7-9dae-b5588e41b815", + "label": "Bob3", + "recipientKeys": [ + "Ab2L1WaK5rhTqZFCb2RHyjQjVzygf6xAo3jAayH1r8XM" + ], + "serviceEndpoint": "http://localhost:8200" + } + ); + let invitation = serde_json::from_value(invitation_json).unwrap(); + + let inviter = inviter + .accept_invitation(indy_read, invitation) + .await + .unwrap(); + + let inviter = inviter + .prepare_request(relay_external_endpoint.parse().unwrap(), vec![]) + .await + .unwrap(); + let request_msg = inviter.get_request().clone(); + inviter + .send_message(wallet, &request_msg.into(), &HttpClient) + .await + .unwrap(); + + // get and accept response + let response = await_next_aries_msg::(&relay_internal_endpoint, wallet).await; + let conn = inviter + .handle_response(wallet, response.try_into().unwrap()) + .await + .unwrap(); + + // send back an ack + conn.send_message(wallet, &conn.get_ack().into(), &HttpClient) + .await + .unwrap(); + + println!("CONN ESTABLISHED"); + + // start the credential fun :) + + // get offer + println!("WAITING FOR CRED OFFER, GO DO IT"); + + let offer = await_next_aries_msg::(&relay_internal_endpoint, wallet).await; + println!("{offer:?}"); + println!("{}", serde_json::to_string(&offer).unwrap()); + + let holder = + HolderV2::>>::from_offer( + offer, + ); + + println!("{:?}", holder.get_offer_details().unwrap()); + + // send request + + let holder = holder + .prepare_credential_request(&HyperledgerIndyCreateRequestInput { + my_pairwise_did: pairwise_info.pw_did, + ledger: anoncreds_read, + anoncreds: anoncreds, + }) + .await + .unwrap(); + + let msg = holder.get_request().to_owned().into(); + conn.send_message(wallet, &msg, &HttpClient).await.unwrap(); + + // get cred + let cred = await_next_aries_msg::(&relay_internal_endpoint, wallet).await; + println!("{cred:?}"); + println!("{}", serde_json::to_string(&cred).unwrap()); + + let holder = holder + .receive_credential( + cred, + &HyperledgerIndyStoreCredentialInput { + ledger: anoncreds_read, + anoncreds: anoncreds, + }, + ) + .await + .unwrap(); + + println!("{:?}", holder.get_stored_credential_metadata()); + + // check cred made in wallet! + let stored_cred = anoncreds + .prover_get_credential(&holder.get_stored_credential_metadata().credential_id) + .await + .unwrap(); + + println!("{stored_cred}"); +} + +// TODO - DELETE ME, for acapy test +pub struct HttpClient; +#[async_trait] +impl Transport for HttpClient { + async fn send_message(&self, msg: Vec, service_endpoint: Url) -> VcxResult<()> { + post_message(msg, service_endpoint).await?; + Ok(()) + } +} diff --git a/aries_vcx/tests/test_credential_retrieval.rs b/aries_vcx/tests/test_credential_retrieval.rs index 0e767dfa99..5deba6aca2 100644 --- a/aries_vcx/tests/test_credential_retrieval.rs +++ b/aries_vcx/tests/test_credential_retrieval.rs @@ -1,3 +1,5 @@ +#![allow(clippy::diverging_sub_expression)] + #[macro_use] extern crate log; #[macro_use] @@ -19,8 +21,10 @@ use aries_vcx::{ proof_presentation::{prover::Prover, types::RetrievedCredentials}, util::AttachmentId, }, - utils::{constants::DEFAULT_SCHEMA_ATTRS, devsetup::SetupProfile}, + run_setup, + utils::constants::DEFAULT_SCHEMA_ATTRS, }; +use base64::{engine::general_purpose, Engine}; use messages::{ decorators::attachment::{Attachment, AttachmentData, AttachmentType}, misc::MimeType, @@ -29,14 +33,11 @@ use messages::{ }, }; -#[cfg(feature = "migration")] -use crate::utils::migration::Migratable; - #[tokio::test] #[ignore] // TODO: This should be a unit test async fn test_agency_pool_retrieve_credentials_empty() { - SetupProfile::run(|mut setup| async move { + run_setup!(|setup| async move { // create skeleton proof request attachment data let mut req = json!({ "nonce":"123432421212", @@ -49,7 +50,9 @@ async fn test_agency_pool_retrieve_credentials_empty() { let pres_req_data: PresentationRequestData = serde_json::from_str(&req.to_string()).unwrap(); - let attach_type = AttachmentType::Base64(base64::encode(&json!(pres_req_data).to_string())); + let attach_type = AttachmentType::Base64( + general_purpose::STANDARD.encode(json!(pres_req_data).to_string()), + ); let attach_data = AttachmentData::builder().content(attach_type).build(); let attach = Attachment::builder() .data(attach_data) @@ -69,11 +72,8 @@ async fn test_agency_pool_retrieve_credentials_empty() { .build(); let proof: Prover = Prover::create_from_request("1", proof_req).unwrap(); - #[cfg(feature = "migration")] - setup.migrate().await; - let retrieved_creds = proof - .retrieve_credentials(&setup.profile.inject_anoncreds()) + .retrieve_credentials(setup.profile.anoncreds()) .await .unwrap(); assert_eq!( @@ -87,7 +87,9 @@ async fn test_agency_pool_retrieve_credentials_empty() { let pres_req_data: PresentationRequestData = serde_json::from_str(&req.to_string()).unwrap(); - let attach_type = AttachmentType::Base64(base64::encode(&json!(pres_req_data).to_string())); + let attach_type = AttachmentType::Base64( + general_purpose::STANDARD.encode(json!(pres_req_data).to_string()), + ); let attach_data = AttachmentData::builder().content(attach_type).build(); let attach = Attachment::builder() .data(attach_data) @@ -109,7 +111,7 @@ async fn test_agency_pool_retrieve_credentials_empty() { let proof: Prover = Prover::create_from_request("2", proof_req).unwrap(); let retrieved_creds = proof - .retrieve_credentials(&setup.profile.inject_anoncreds()) + .retrieve_credentials(setup.profile.anoncreds()) .await .unwrap(); assert_eq!( @@ -130,26 +132,26 @@ async fn test_agency_pool_retrieve_credentials_empty() { #[ignore] // TODO: This should be a unit test async fn test_agency_pool_case_for_proof_req_doesnt_matter_for_retrieve_creds() { - SetupProfile::run(|mut setup| async move { + run_setup!(|setup| async move { let schema = create_and_write_test_schema( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_write(), &setup.institution_did, DEFAULT_SCHEMA_ATTRS, ) .await; let cred_def = create_and_write_test_cred_def( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), &setup.institution_did, &schema.schema_id, true, ) .await; create_and_write_credential( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds(), + setup.profile.anoncreds(), + setup.profile.anoncreds(), &setup.institution_did, &cred_def, None, @@ -173,7 +175,9 @@ async fn test_agency_pool_case_for_proof_req_doesnt_matter_for_retrieve_creds() serde_json::from_str(&req.to_string()).unwrap(); let id = "test_id".to_owned(); - let attach_type = AttachmentType::Base64(base64::encode(&json!(pres_req_data).to_string())); + let attach_type = AttachmentType::Base64( + general_purpose::STANDARD.encode(json!(pres_req_data).to_string()), + ); let attach_data = AttachmentData::builder().content(attach_type).build(); let attach = Attachment::builder() .data(attach_data) @@ -193,7 +197,7 @@ async fn test_agency_pool_case_for_proof_req_doesnt_matter_for_retrieve_creds() // All lower case let retrieved_creds = proof - .retrieve_credentials(&setup.profile.inject_anoncreds()) + .retrieve_credentials(setup.profile.anoncreds()) .await .unwrap(); assert_eq!( @@ -209,7 +213,9 @@ async fn test_agency_pool_case_for_proof_req_doesnt_matter_for_retrieve_creds() serde_json::from_str(&req.to_string()).unwrap(); let id = "test_id".to_owned(); - let attach_type = AttachmentType::Base64(base64::encode(&json!(pres_req_data).to_string())); + let attach_type = AttachmentType::Base64( + general_purpose::STANDARD.encode(json!(pres_req_data).to_string()), + ); let attach_data = AttachmentData::builder().content(attach_type).build(); let attach = Attachment::builder() .data(attach_data) @@ -221,16 +227,13 @@ async fn test_agency_pool_case_for_proof_req_doesnt_matter_for_retrieve_creds() .request_presentations_attach(vec![attach]) .build(); - #[cfg(feature = "migration")] - setup.migrate().await; - let proof_req = RequestPresentation::builder() .id(id) .content(content) .build(); let proof: Prover = Prover::create_from_request("2", proof_req).unwrap(); let retrieved_creds2 = proof - .retrieve_credentials(&setup.profile.inject_anoncreds()) + .retrieve_credentials(setup.profile.anoncreds()) .await .unwrap(); assert_eq!( @@ -246,7 +249,9 @@ async fn test_agency_pool_case_for_proof_req_doesnt_matter_for_retrieve_creds() serde_json::from_str(&req.to_string()).unwrap(); let id = "test_id".to_owned(); - let attach_type = AttachmentType::Base64(base64::encode(&json!(pres_req_data).to_string())); + let attach_type = AttachmentType::Base64( + general_purpose::STANDARD.encode(json!(pres_req_data).to_string()), + ); let attach_data = AttachmentData::builder().content(attach_type).build(); let attach = Attachment::builder() .data(attach_data) @@ -264,7 +269,7 @@ async fn test_agency_pool_case_for_proof_req_doesnt_matter_for_retrieve_creds() .build(); let proof: Prover = Prover::create_from_request("1", proof_req).unwrap(); let retrieved_creds3 = proof - .retrieve_credentials(&setup.profile.inject_anoncreds()) + .retrieve_credentials(setup.profile.anoncreds()) .await .unwrap(); assert_eq!( @@ -278,9 +283,10 @@ async fn test_agency_pool_case_for_proof_req_doesnt_matter_for_retrieve_creds() } // todo: credx implementation does not support checking credential value in respect to predicate -#[cfg(not(feature = "modular_libs"))] +#[cfg(not(feature = "credx"))] #[tokio::test] #[ignore] +#[allow(unused_mut)] async fn test_agency_pool_it_should_fail_to_select_credentials_for_predicate() { use aries_vcx::utils::devsetup::SetupPoolDirectory; use utils::{ @@ -299,9 +305,6 @@ async fn test_agency_pool_it_should_fail_to_select_credentials_for_predicate() { issue_address_credential(&mut consumer, &mut institution).await; - #[cfg(feature = "migration")] - institution.migrate().await; - let requested_preds_string = serde_json::to_string(&json!([{ "name": "zip", "p_type": ">=", @@ -314,9 +317,6 @@ async fn test_agency_pool_it_should_fail_to_select_credentials_for_predicate() { .await; let mut verifier = create_verifier_from_request_data(presentation_request_data).await; - #[cfg(feature = "migration")] - consumer.migrate().await; - let presentation_request = verifier.get_presentation_request_msg().unwrap(); let mut prover = create_prover_from_request(presentation_request.clone()).await; let selected_credentials = diff --git a/aries_vcx/tests/test_mysql_wallet.rs b/aries_vcx/tests/test_mysql_wallet.rs index 42d32e5d15..2853e6c7ef 100644 --- a/aries_vcx/tests/test_mysql_wallet.rs +++ b/aries_vcx/tests/test_mysql_wallet.rs @@ -3,10 +3,7 @@ extern crate serde_json; #[cfg(test)] mod dbtests { - use aries_vcx::{ - global::{settings, settings::init_issuer_config}, - utils::test_logger::LibvcxDefaultLogger, - }; + use aries_vcx::{global::settings, utils::test_logger::LibvcxDefaultLogger}; use aries_vcx_core::wallet::{ base_wallet::BaseWallet, indy::{ @@ -45,10 +42,10 @@ mod dbtests { .unwrap(); let wallet_handle = create_and_open_wallet(&config_wallet).await.unwrap(); - let config_issuer = wallet_configure_issuer(wallet_handle, enterprise_seed) + let _config_issuer = wallet_configure_issuer(wallet_handle, enterprise_seed) .await .unwrap(); - init_issuer_config(&config_issuer.institution_did).unwrap(); + let (_, _) = IndySdkWallet::new(wallet_handle) .create_and_store_my_did(None, None) .await diff --git a/aries_vcx/tests/test_pool.rs b/aries_vcx/tests/test_pool.rs index f24041a753..e39bce0e4d 100644 --- a/aries_vcx/tests/test_pool.rs +++ b/aries_vcx/tests/test_pool.rs @@ -1,11 +1,12 @@ +#![allow(clippy::diverging_sub_expression)] + #[macro_use] extern crate log; #[macro_use] extern crate serde_json; pub mod utils; - -use std::{sync::Arc, thread, time::Duration}; +use std::{thread, time::Duration}; use aries_vcx::{ common::{ @@ -24,14 +25,16 @@ use aries_vcx::{ revocation_registry_delta::RevocationRegistryDelta, }, test_utils::{ - create_and_write_test_cred_def, create_and_write_test_rev_reg, + create_and_publish_test_rev_reg, create_and_write_test_cred_def, create_and_write_test_schema, }, }, + core::profile::Profile, errors::error::AriesVcxErrorKind, + run_setup, utils::{ - constants::DEFAULT_SCHEMA_ATTRS, - devsetup::{SetupPoolDirectory, SetupProfile}, + constants::{DEFAULT_SCHEMA_ATTRS, TEST_TAILS_URL}, + devsetup::SetupPoolDirectory, }, }; use aries_vcx_core::{ @@ -40,12 +43,10 @@ use aries_vcx_core::{ base_ledger::{AnoncredsLedgerRead, AnoncredsLedgerWrite}, indy::pool::test_utils::get_temp_file_path, }, - wallet::indy::wallet::get_verkey_from_wallet, + wallet::{base_wallet::BaseWallet, indy::wallet::get_verkey_from_wallet}, }; use diddoc_legacy::aries::service::AriesService; -#[cfg(feature = "migration")] -use crate::utils::migration::Migratable; use crate::utils::{ scenarios::attr_names_address_list, test_agent::{create_test_agent, create_test_agent_trustee}, @@ -53,9 +54,9 @@ use crate::utils::{ // TODO: Deduplicate with create_and_store_revocable_credential_def async fn create_and_store_nonrevocable_credential_def( - anoncreds: &Arc, - ledger_read: &Arc, - ledger_write: &Arc, + anoncreds: &impl BaseAnonCreds, + ledger_read: &impl AnoncredsLedgerRead, + ledger_write: &impl AnoncredsLedgerWrite, issuer_did: &str, attr_list: &str, ) -> (String, String, String, String, CredentialDef) { @@ -84,9 +85,9 @@ async fn create_and_store_nonrevocable_credential_def( // TODO: Deduplicate with create_and_store_nonrevocable_credential_def async fn create_and_store_revocable_credential_def( - anoncreds: &Arc, - ledger_read: &Arc, - ledger_write: &Arc, + anoncreds: &impl BaseAnonCreds, + ledger_read: &impl AnoncredsLedgerRead, + ledger_write: &impl AnoncredsLedgerWrite, issuer_did: &str, attr_list: &str, ) -> (Schema, CredentialDef, RevocationRegistry) { @@ -100,46 +101,42 @@ async fn create_and_store_revocable_credential_def( true, ) .await; - let rev_reg = create_and_write_test_rev_reg( + let rev_reg = create_and_publish_test_rev_reg( anoncreds, ledger_write, issuer_did, &cred_def.get_cred_def_id(), ) .await; - tokio::time::sleep(Duration::from_millis(1000)).await; + (schema, cred_def, rev_reg) } #[tokio::test] #[ignore] async fn test_pool_rotate_verkey() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let (did, verkey) = add_new_did( - &setup.profile.inject_wallet(), - &setup.profile.inject_indy_ledger_write(), + setup.profile.wallet(), + setup.profile.ledger_write(), &setup.institution_did, None, ) .await .unwrap(); - rotate_verkey( - &setup.profile.inject_wallet(), - &setup.profile.inject_indy_ledger_write(), - &did, - ) - .await - .unwrap(); + rotate_verkey(setup.profile.wallet(), setup.profile.ledger_write(), &did) + .await + .unwrap(); tokio::time::sleep(Duration::from_millis(1000)).await; let local_verkey = setup .profile - .inject_wallet() + .wallet() .key_for_local_did(&did) .await .unwrap(); - let ledger_verkey = get_verkey_from_ledger(&setup.profile.inject_indy_ledger_read(), &did) + let ledger_verkey = get_verkey_from_ledger(setup.profile.ledger_read(), &did) .await .unwrap(); assert_ne!(verkey, ledger_verkey); @@ -151,25 +148,21 @@ async fn test_pool_rotate_verkey() { #[tokio::test] #[ignore] async fn test_pool_add_get_service() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let did = setup.institution_did.clone(); let expect_service = AriesService::default(); - write_endpoint_legacy( - &setup.profile.inject_indy_ledger_write(), - &did, - &expect_service, - ) - .await - .unwrap(); + write_endpoint_legacy(setup.profile.ledger_write(), &did, &expect_service) + .await + .unwrap(); thread::sleep(Duration::from_millis(50)); - let service = get_service(&setup.profile.inject_indy_ledger_read(), &did) + let service = get_service(setup.profile.ledger_read(), &did) .await .unwrap(); assert_eq!(expect_service, service); // clean up written legacy service clear_attr( - &setup.profile.inject_indy_ledger_write(), + setup.profile.ledger_write(), &setup.institution_did, "service", ) @@ -186,7 +179,7 @@ async fn test_pool_write_new_endorser_did() { let faber = create_test_agent_trustee(setup.genesis_file_path.clone()).await; let acme = create_test_agent(setup.genesis_file_path.clone()).await; let acme_vk = get_verkey_from_wallet( - acme.profile.inject_wallet().get_wallet_handle(), + acme.profile.wallet().get_wallet_handle(), &acme.institution_did, ) .await @@ -194,14 +187,14 @@ async fn test_pool_write_new_endorser_did() { let attrib_json = json!({ "attrib_name": "foo"}).to_string(); assert!(add_attr( - &acme.profile.inject_indy_ledger_write(), + acme.profile.ledger_write(), &acme.institution_did, &attrib_json ) .await .is_err()); write_endorser_did( - &faber.profile.inject_indy_ledger_write(), + faber.profile.ledger_write(), &faber.institution_did, &acme.institution_did, &acme_vk, @@ -211,7 +204,7 @@ async fn test_pool_write_new_endorser_did() { .unwrap(); thread::sleep(Duration::from_millis(50)); add_attr( - &acme.profile.inject_indy_ledger_write(), + acme.profile.ledger_write(), &acme.institution_did, &attrib_json, ) @@ -224,28 +217,22 @@ async fn test_pool_write_new_endorser_did() { #[tokio::test] #[ignore] async fn test_pool_add_get_service_public() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let did = setup.institution_did.clone(); let create_service = EndpointDidSov::create() .set_service_endpoint("https://example.org".parse().unwrap()) .set_routing_keys(Some(vec!["did:sov:456".into()])); - write_endpoint( - &setup.profile.inject_indy_ledger_write(), - &did, - &create_service, - ) - .await - .unwrap(); + write_endpoint(setup.profile.ledger_write(), &did, &create_service) + .await + .unwrap(); thread::sleep(Duration::from_millis(50)); - let service = get_service(&setup.profile.inject_indy_ledger_read(), &did) + let service = get_service(setup.profile.ledger_read(), &did) .await .unwrap(); - let expect_recipient_key = get_verkey_from_ledger( - &setup.profile.inject_indy_ledger_read(), - &setup.institution_did, - ) - .await - .unwrap(); + let expect_recipient_key = + get_verkey_from_ledger(setup.profile.ledger_read(), &setup.institution_did) + .await + .unwrap(); let expect_service = AriesService::default() .set_service_endpoint("https://example.org".parse().unwrap()) .set_recipient_keys(vec![expect_recipient_key]) @@ -254,7 +241,7 @@ async fn test_pool_add_get_service_public() { // clean up written endpoint clear_attr( - &setup.profile.inject_indy_ledger_write(), + setup.profile.ledger_write(), &setup.institution_did, "endpoint", ) @@ -267,28 +254,22 @@ async fn test_pool_add_get_service_public() { #[tokio::test] #[ignore] async fn test_pool_add_get_service_public_none_routing_keys() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let did = setup.institution_did.clone(); let create_service = EndpointDidSov::create() .set_service_endpoint("https://example.org".parse().unwrap()) .set_routing_keys(None); - write_endpoint( - &setup.profile.inject_indy_ledger_write(), - &did, - &create_service, - ) - .await - .unwrap(); + write_endpoint(setup.profile.ledger_write(), &did, &create_service) + .await + .unwrap(); thread::sleep(Duration::from_millis(50)); - let service = get_service(&setup.profile.inject_indy_ledger_read(), &did) + let service = get_service(setup.profile.ledger_read(), &did) .await .unwrap(); - let expect_recipient_key = get_verkey_from_ledger( - &setup.profile.inject_indy_ledger_read(), - &setup.institution_did, - ) - .await - .unwrap(); + let expect_recipient_key = + get_verkey_from_ledger(setup.profile.ledger_read(), &setup.institution_did) + .await + .unwrap(); let expect_service = AriesService::default() .set_service_endpoint("https://example.org".parse().unwrap()) .set_recipient_keys(vec![expect_recipient_key]) @@ -297,7 +278,7 @@ async fn test_pool_add_get_service_public_none_routing_keys() { // clean up written endpoint clear_attr( - &setup.profile.inject_indy_ledger_write(), + setup.profile.ledger_write(), &setup.institution_did, "endpoint", ) @@ -310,7 +291,7 @@ async fn test_pool_add_get_service_public_none_routing_keys() { #[tokio::test] #[ignore] async fn test_pool_multiple_service_formats() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let did = setup.institution_did.clone(); // Write legacy service format @@ -318,12 +299,12 @@ async fn test_pool_multiple_service_formats() { .set_service_endpoint("https://example1.org".parse().unwrap()) .set_recipient_keys(vec!["did:sov:123".into()]) .set_routing_keys(vec!["did:sov:456".into()]); - write_endpoint_legacy(&setup.profile.inject_indy_ledger_write(), &did, &service_1) + write_endpoint_legacy(setup.profile.ledger_write(), &did, &service_1) .await .unwrap(); // Get service and verify it is in the old format - let service = get_service(&setup.profile.inject_indy_ledger_read(), &did) + let service = get_service(setup.profile.ledger_read(), &did) .await .unwrap(); assert_eq!(service_1, service); @@ -334,22 +315,20 @@ async fn test_pool_multiple_service_formats() { let service_2 = EndpointDidSov::create() .set_service_endpoint(endpoint_url_2.parse().unwrap()) .set_routing_keys(Some(routing_keys_2.clone())); - write_endpoint(&setup.profile.inject_indy_ledger_write(), &did, &service_2) + write_endpoint(setup.profile.ledger_write(), &did, &service_2) .await .unwrap(); thread::sleep(Duration::from_millis(50)); // Get service and verify it is in the new format - let service = get_service(&setup.profile.inject_indy_ledger_read(), &did) + let service = get_service(setup.profile.ledger_read(), &did) .await .unwrap(); - let expect_recipient_key = get_verkey_from_ledger( - &setup.profile.inject_indy_ledger_read(), - &setup.institution_did, - ) - .await - .unwrap(); + let expect_recipient_key = + get_verkey_from_ledger(setup.profile.ledger_read(), &setup.institution_did) + .await + .unwrap(); let expect_service = AriesService::default() .set_service_endpoint(endpoint_url_2.parse().unwrap()) .set_recipient_keys(vec![expect_recipient_key]) @@ -358,7 +337,7 @@ async fn test_pool_multiple_service_formats() { // Clear up written endpoint clear_attr( - &setup.profile.inject_indy_ledger_write(), + setup.profile.ledger_write(), &setup.institution_did, "endpoint", ) @@ -368,7 +347,7 @@ async fn test_pool_multiple_service_formats() { thread::sleep(Duration::from_millis(50)); // Get service and verify it is in the old format - let service = get_service(&setup.profile.inject_indy_ledger_read(), &did) + let service = get_service(setup.profile.ledger_read(), &did) .await .unwrap(); assert_eq!(service_1, service); @@ -379,7 +358,7 @@ async fn test_pool_multiple_service_formats() { #[tokio::test] #[ignore] async fn test_pool_add_get_attr() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let did = setup.institution_did.clone(); let attr_json = json!({ "attr_json": { @@ -387,35 +366,27 @@ async fn test_pool_add_get_attr() { "attr_key_2": "attr_value_2", } }); - add_attr( - &setup.profile.inject_indy_ledger_write(), - &did, - &attr_json.to_string(), - ) - .await - .unwrap(); + add_attr(setup.profile.ledger_write(), &did, &attr_json.to_string()) + .await + .unwrap(); thread::sleep(Duration::from_millis(50)); - let attr = get_attr(&setup.profile.inject_indy_ledger_read(), &did, "attr_json") + let attr = get_attr(setup.profile.ledger_read(), &did, "attr_json") .await .unwrap(); assert_eq!(attr, attr_json["attr_json"].to_string()); - clear_attr(&setup.profile.inject_indy_ledger_write(), &did, "attr_json") + clear_attr(setup.profile.ledger_write(), &did, "attr_json") .await .unwrap(); thread::sleep(Duration::from_millis(50)); - let attr = get_attr(&setup.profile.inject_indy_ledger_read(), &did, "attr_json") + let attr = get_attr(setup.profile.ledger_read(), &did, "attr_json") .await .unwrap(); assert_eq!(attr, ""); - let attr = get_attr( - &setup.profile.inject_indy_ledger_read(), - &did, - "nonexistent", - ) - .await - .unwrap(); + let attr = get_attr(setup.profile.ledger_read(), &did, "nonexistent") + .await + .unwrap(); assert_eq!(attr, ""); }) .await; @@ -424,20 +395,17 @@ async fn test_pool_add_get_attr() { #[tokio::test] #[ignore] async fn test_agency_pool_get_credential_def() { - SetupProfile::run(|mut setup| async move { + run_setup!(|setup| async move { let (_, _, cred_def_id, cred_def_json, _) = create_and_store_nonrevocable_credential_def( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), &setup.institution_did, DEFAULT_SCHEMA_ATTRS, ) .await; - #[cfg(feature = "migration")] - setup.migrate().await; - - let ledger = Arc::clone(&setup.profile).inject_anoncreds_ledger_read(); + let ledger = setup.profile.ledger_read(); let r_cred_def_json = ledger.get_cred_def(&cred_def_id, None).await.unwrap(); let def1: serde_json::Value = serde_json::from_str(&cred_def_json).unwrap(); @@ -450,20 +418,20 @@ async fn test_agency_pool_get_credential_def() { #[tokio::test] #[ignore] async fn test_pool_rev_reg_def_fails_for_cred_def_created_without_revocation() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { // Cred def is created with support_revocation=false, // revoc_reg_def will fail in libindy because cred_Def doesn't have revocation keys let (_, _, cred_def_id, _, _) = create_and_store_nonrevocable_credential_def( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), &setup.institution_did, DEFAULT_SCHEMA_ATTRS, ) .await; let rc = generate_rev_reg( - &setup.profile.inject_anoncreds(), + setup.profile.anoncreds(), &setup.institution_did, &cred_def_id, get_temp_file_path("path.txt").to_str().unwrap(), @@ -472,9 +440,9 @@ async fn test_pool_rev_reg_def_fails_for_cred_def_created_without_revocation() { ) .await; - #[cfg(feature = "modular_libs")] + #[cfg(feature = "credx")] assert_eq!(rc.unwrap_err().kind(), AriesVcxErrorKind::InvalidState); - #[cfg(not(feature = "modular_libs"))] + #[cfg(not(feature = "credx"))] assert_eq!(rc.unwrap_err().kind(), AriesVcxErrorKind::InvalidInput); }) .await; @@ -483,18 +451,18 @@ async fn test_pool_rev_reg_def_fails_for_cred_def_created_without_revocation() { #[tokio::test] #[ignore] async fn test_pool_get_rev_reg_def_json() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let attrs = format!("{:?}", attr_names_address_list()); let (_, _, rev_reg) = create_and_store_revocable_credential_def( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), &setup.institution_did, &attrs, ) .await; - let ledger = Arc::clone(&setup.profile).inject_anoncreds_ledger_read(); + let ledger = setup.profile.ledger_read(); let _json = ledger .get_rev_reg_def_json(&rev_reg.rev_reg_id) .await @@ -506,18 +474,18 @@ async fn test_pool_get_rev_reg_def_json() { #[tokio::test] #[ignore] async fn test_pool_get_rev_reg_delta_json() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let attrs = format!("{:?}", attr_names_address_list()); let (_, _, rev_reg) = create_and_store_revocable_credential_def( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), &setup.institution_did, &attrs, ) .await; - let ledger = Arc::clone(&setup.profile).inject_anoncreds_ledger_read(); + let ledger = setup.profile.ledger_read(); let (id, _delta, _timestamp) = ledger .get_rev_reg_delta_json(&rev_reg.rev_reg_id, None, None) .await @@ -531,18 +499,22 @@ async fn test_pool_get_rev_reg_delta_json() { #[tokio::test] #[ignore] async fn test_pool_get_rev_reg() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let attrs = format!("{:?}", attr_names_address_list()); let (_, _, rev_reg) = create_and_store_revocable_credential_def( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), &setup.institution_did, &attrs, ) .await; + assert_eq!( + TEST_TAILS_URL, + rev_reg.get_rev_reg_def().value.tails_location + ); - let ledger = Arc::clone(&setup.profile).inject_anoncreds_ledger_read(); + let ledger = setup.profile.ledger_read(); let (id, _rev_reg, _timestamp) = ledger .get_rev_reg( &rev_reg.rev_reg_id, @@ -559,16 +531,16 @@ async fn test_pool_get_rev_reg() { #[tokio::test] #[ignore] async fn test_pool_create_and_get_schema() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let schema = create_and_write_test_schema( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_write(), &setup.institution_did, DEFAULT_SCHEMA_ATTRS, ) .await; - let ledger = Arc::clone(&setup.profile).inject_anoncreds_ledger_read(); + let ledger = setup.profile.ledger_read(); let rc = ledger.get_schema(&schema.schema_id, None).await; let retrieved_schema = rc.unwrap(); @@ -580,12 +552,12 @@ async fn test_pool_create_and_get_schema() { #[tokio::test] #[ignore] async fn test_pool_create_rev_reg_delta_from_ledger() { - SetupProfile::run(|setup| async move { + run_setup!(|setup| async move { let attrs = format!("{:?}", attr_names_address_list()); let (_, _, rev_reg) = create_and_store_revocable_credential_def( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), &setup.institution_did, &attrs, ) @@ -593,7 +565,7 @@ async fn test_pool_create_rev_reg_delta_from_ledger() { let (_, rev_reg_delta_json, _) = setup .profile - .inject_anoncreds_ledger_read() + .ledger_read() .get_rev_reg_delta_json(&rev_reg.rev_reg_id, None, None) .await .unwrap(); diff --git a/aries_vcx/tests/test_proof_presentation.rs b/aries_vcx/tests/test_proof_presentation.rs index aaaa03ae55..4ba070df07 100644 --- a/aries_vcx/tests/test_proof_presentation.rs +++ b/aries_vcx/tests/test_proof_presentation.rs @@ -1,3 +1,5 @@ +#![allow(clippy::diverging_sub_expression)] + #[macro_use] extern crate log; #[macro_use] @@ -9,8 +11,8 @@ use aries_vcx::{ common::{ proofs::proof_request::PresentationRequestData, test_utils::{ - create_and_write_credential, create_and_write_test_cred_def, - create_and_write_test_rev_reg, create_and_write_test_schema, + create_and_publish_test_rev_reg, create_and_write_credential, + create_and_write_test_cred_def, create_and_write_test_schema, }, }, handlers::proof_presentation::{prover::Prover, verifier::Verifier}, @@ -20,12 +22,11 @@ use aries_vcx::{ state_machine::VerifierState, verification_status::PresentationVerificationStatus, }, }, - utils::devsetup::{SetupProfile, *}, + run_setup, + utils::devsetup::*, }; use messages::{msg_fields::protocols::present_proof::PresentProof, AriesMessage}; -#[cfg(feature = "migration")] -use crate::utils::migration::Migratable; use crate::utils::{ scenarios::{ accept_proof_proposal, create_address_schema_creddef_revreg, create_proof_proposal, @@ -38,33 +39,33 @@ use crate::utils::{ #[tokio::test] #[ignore] async fn test_agency_pool_generate_proof_with_predicates() { - SetupProfile::run(|mut setup| async move { + run_setup!(|setup| async move { let schema = create_and_write_test_schema( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_write(), &setup.institution_did, aries_vcx::utils::constants::DEFAULT_SCHEMA_ATTRS, ) .await; let cred_def = create_and_write_test_cred_def( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds_ledger_write(), + setup.profile.anoncreds(), + setup.profile.ledger_read(), + setup.profile.ledger_write(), &setup.institution_did, &schema.schema_id, true, ) .await; - let rev_reg = create_and_write_test_rev_reg( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds_ledger_write(), + let rev_reg = create_and_publish_test_rev_reg( + setup.profile.anoncreds(), + setup.profile.ledger_write(), &setup.institution_did, &cred_def.get_cred_def_id(), ) .await; let _cred_id = create_and_write_credential( - &setup.profile.inject_anoncreds(), - &setup.profile.inject_anoncreds(), + setup.profile.anoncreds(), + setup.profile.anoncreds(), &setup.institution_did, &cred_def, Some(&rev_reg), @@ -108,11 +109,8 @@ async fn test_agency_pool_generate_proof_with_predicates() { let mut proof: Prover = Prover::create_from_request("1", proof_req).unwrap(); - #[cfg(feature = "migration")] - setup.migrate().await; - let all_creds = proof - .retrieve_credentials(&setup.profile.inject_anoncreds()) + .retrieve_credentials(setup.profile.anoncreds()) .await .unwrap(); let selected_credentials: serde_json::Value = json!({ @@ -136,8 +134,8 @@ async fn test_agency_pool_generate_proof_with_predicates() { }); proof .generate_presentation( - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds(), + setup.profile.ledger_read(), + setup.profile.anoncreds(), serde_json::from_value(selected_credentials).unwrap(), serde_json::from_value(self_attested).unwrap(), ) @@ -147,8 +145,8 @@ async fn test_agency_pool_generate_proof_with_predicates() { let final_message = verifier .verify_presentation( - &setup.profile.inject_anoncreds_ledger_read(), - &setup.profile.inject_anoncreds(), + setup.profile.ledger_read(), + setup.profile.anoncreds(), proof.get_presentation_msg().unwrap(), ) .await @@ -169,6 +167,7 @@ async fn test_agency_pool_generate_proof_with_predicates() { #[tokio::test] #[ignore] +#[allow(unused_mut)] async fn test_agency_pool_presentation_via_proposal() { SetupPoolDirectory::run(|setup| async move { let mut institution = create_test_agent_trustee(setup.genesis_file_path.clone()).await; @@ -181,9 +180,6 @@ async fn test_agency_pool_presentation_via_proposal() { .await; let tails_dir = rev_reg.get_tails_dir(); - #[cfg(feature = "migration")] - institution.migrate().await; - exchange_credential_with_proposal( &mut consumer, &mut institution, @@ -201,9 +197,6 @@ async fn test_agency_pool_presentation_via_proposal() { let presentation_request = accept_proof_proposal(&mut institution, &mut verifier, presentation_proposal).await; - #[cfg(feature = "migration")] - consumer.migrate().await; - let selected_credentials = prover_select_credentials(&mut prover, &mut consumer, presentation_request, None).await; let presentation = @@ -217,6 +210,7 @@ async fn test_agency_pool_presentation_via_proposal() { #[tokio::test] #[ignore] +#[allow(unused_mut)] async fn test_agency_pool_presentation_via_proposal_with_rejection() { SetupPoolDirectory::run(|setup| async move { let mut institution = create_test_agent_trustee(setup.genesis_file_path.clone()).await; @@ -229,9 +223,6 @@ async fn test_agency_pool_presentation_via_proposal_with_rejection() { .await; let tails_dir = rev_reg.get_tails_dir(); - #[cfg(feature = "migration")] - institution.migrate().await; - exchange_credential_with_proposal( &mut consumer, &mut institution, @@ -253,6 +244,7 @@ async fn test_agency_pool_presentation_via_proposal_with_rejection() { #[tokio::test] #[ignore] +#[allow(unused_mut)] async fn test_agency_pool_presentation_via_proposal_with_negotiation() { SetupPoolDirectory::run(|setup| async move { let mut institution = create_test_agent_trustee(setup.genesis_file_path.clone()).await; @@ -265,9 +257,6 @@ async fn test_agency_pool_presentation_via_proposal_with_negotiation() { .await; let tails_dir = rev_reg.get_tails_dir(); - #[cfg(feature = "migration")] - institution.migrate().await; - exchange_credential_with_proposal( &mut consumer, &mut institution, @@ -281,9 +270,6 @@ async fn test_agency_pool_presentation_via_proposal_with_negotiation() { let mut prover = Prover::create("1").unwrap(); let mut verifier = Verifier::create("1").unwrap(); - #[cfg(feature = "migration")] - consumer.migrate().await; - let presentation_proposal = create_proof_proposal(&mut prover, &cred_def.get_cred_def_id()).await; let presentation_request = diff --git a/aries_vcx/tests/test_revocations.rs b/aries_vcx/tests/test_revocations.rs index e6d9ceb750..8fe1aa29d0 100644 --- a/aries_vcx/tests/test_revocations.rs +++ b/aries_vcx/tests/test_revocations.rs @@ -8,14 +8,13 @@ pub mod utils; use std::{thread, time::Duration}; use aries_vcx::{ + core::profile::Profile, protocols::proof_presentation::verifier::{ state_machine::VerifierState, verification_status::PresentationVerificationStatus, }, utils::devsetup::*, }; -#[cfg(feature = "migration")] -use crate::utils::migration::Migratable; use crate::utils::{ scenarios::{ create_address_schema_creddef_revreg, create_proof_request_data, @@ -38,25 +37,19 @@ async fn test_agency_pool_basic_revocation() { let (schema, cred_def, rev_reg, issuer) = issue_address_credential(&mut consumer, &mut institution).await; - #[cfg(feature = "migration")] - institution.migrate().await; - assert!(!issuer - .is_revoked(&institution.profile.inject_anoncreds_ledger_read()) + .is_revoked(institution.profile.ledger_read()) .await .unwrap()); let time_before_revocation = time::OffsetDateTime::now_utc().unix_timestamp() as u64; revoke_credential_and_publish_accumulator(&mut institution, &issuer, &rev_reg).await; - #[cfg(feature = "migration")] - consumer.migrate().await; - tokio::time::sleep(Duration::from_millis(1000)).await; let time_after_revocation = time::OffsetDateTime::now_utc().unix_timestamp() as u64; assert!(issuer - .is_revoked(&institution.profile.inject_anoncreds_ledger_read()) + .is_revoked(institution.profile.ledger_read()) .await .unwrap()); @@ -87,8 +80,8 @@ async fn test_agency_pool_basic_revocation() { verifier .verify_presentation( - &institution.profile.inject_anoncreds_ledger_read(), - &institution.profile.inject_anoncreds(), + institution.profile.ledger_read(), + institution.profile.anoncreds(), presentation, ) .await @@ -113,13 +106,10 @@ async fn test_agency_pool_revoked_credential_might_still_work() { issue_address_credential(&mut consumer, &mut institution).await; assert!(!issuer - .is_revoked(&institution.profile.inject_anoncreds_ledger_read()) + .is_revoked(institution.profile.ledger_read()) .await .unwrap()); - #[cfg(feature = "migration")] - institution.migrate().await; - tokio::time::sleep(Duration::from_millis(1000)).await; let time_before_revocation = time::OffsetDateTime::now_utc().unix_timestamp() as u64; tokio::time::sleep(Duration::from_millis(1000)).await; @@ -127,9 +117,6 @@ async fn test_agency_pool_revoked_credential_might_still_work() { revoke_credential_and_publish_accumulator(&mut institution, &issuer, &rev_reg).await; tokio::time::sleep(Duration::from_millis(1000)).await; - #[cfg(feature = "migration")] - consumer.migrate().await; - let from = time_before_revocation - 100; let to = time_before_revocation; let requested_attrs = requested_attrs_address( @@ -159,8 +146,8 @@ async fn test_agency_pool_revoked_credential_might_still_work() { verifier .verify_presentation( - &institution.profile.inject_anoncreds_ledger_read(), - &institution.profile.inject_anoncreds(), + institution.profile.ledger_read(), + institution.profile.anoncreds(), presentation, ) .await @@ -184,12 +171,9 @@ async fn test_agency_pool_local_revocation() { let (schema, cred_def, rev_reg, issuer) = issue_address_credential(&mut consumer, &mut institution).await; - #[cfg(feature = "migration")] - institution.migrate().await; - revoke_credential_local(&mut institution, &issuer, &rev_reg.rev_reg_id).await; assert!(!issuer - .is_revoked(&institution.profile.inject_anoncreds_ledger_read()) + .is_revoked(institution.profile.ledger_read()) .await .unwrap()); @@ -207,7 +191,7 @@ async fn test_agency_pool_local_revocation() { ); assert!(!issuer - .is_revoked(&institution.profile.inject_anoncreds_ledger_read()) + .is_revoked(institution.profile.ledger_read()) .await .unwrap()); @@ -227,7 +211,7 @@ async fn test_agency_pool_local_revocation() { ); assert!(issuer - .is_revoked(&institution.profile.inject_anoncreds_ledger_read()) + .is_revoked(institution.profile.ledger_read()) .await .unwrap()); }) @@ -260,9 +244,6 @@ async fn test_agency_batch_revocation() { ) .await; - #[cfg(feature = "migration")] - institution.migrate().await; - let issuer_credential2 = exchange_credential( &mut consumer2, &mut institution, @@ -273,9 +254,6 @@ async fn test_agency_batch_revocation() { ) .await; - #[cfg(feature = "migration")] - consumer1.migrate().await; - let issuer_credential3 = exchange_credential( &mut consumer3, &mut institution, @@ -289,24 +267,18 @@ async fn test_agency_batch_revocation() { revoke_credential_local(&mut institution, &issuer_credential1, &rev_reg.rev_reg_id).await; revoke_credential_local(&mut institution, &issuer_credential2, &rev_reg.rev_reg_id).await; assert!(!issuer_credential1 - .is_revoked(&institution.profile.inject_anoncreds_ledger_read()) + .is_revoked(institution.profile.ledger_read()) .await .unwrap()); assert!(!issuer_credential2 - .is_revoked(&institution.profile.inject_anoncreds_ledger_read()) + .is_revoked(institution.profile.ledger_read()) .await .unwrap()); assert!(!issuer_credential3 - .is_revoked(&institution.profile.inject_anoncreds_ledger_read()) + .is_revoked(institution.profile.ledger_read()) .await .unwrap()); - #[cfg(feature = "migration")] - consumer2.migrate().await; - - #[cfg(feature = "migration")] - consumer3.migrate().await; - // Revoke two locally and verify their are all still valid let verifier_handler = exchange_proof( &mut institution, @@ -350,15 +322,15 @@ async fn test_agency_batch_revocation() { tokio::time::sleep(Duration::from_millis(1000)).await; assert!(issuer_credential1 - .is_revoked(&institution.profile.inject_anoncreds_ledger_read()) + .is_revoked(institution.profile.ledger_read()) .await .unwrap()); assert!(issuer_credential2 - .is_revoked(&institution.profile.inject_anoncreds_ledger_read()) + .is_revoked(institution.profile.ledger_read()) .await .unwrap()); assert!(!issuer_credential3 - .is_revoked(&institution.profile.inject_anoncreds_ledger_read()) + .is_revoked(institution.profile.ledger_read()) .await .unwrap()); @@ -404,6 +376,7 @@ async fn test_agency_batch_revocation() { #[tokio::test] #[ignore] +#[allow(unused_mut)] async fn test_agency_pool_two_creds_one_rev_reg_revoke_first() { SetupPoolDirectory::run(|setup| async move { let mut issuer = create_test_agent_trustee(setup.genesis_file_path.clone()).await; @@ -423,9 +396,6 @@ async fn test_agency_pool_two_creds_one_rev_reg_revoke_first() { ) .await; - #[cfg(feature = "migration")] - issuer.migrate().await; - let credential_data2 = credential_data_address_2().to_string(); let issuer_credential2 = exchange_credential( &mut consumer, @@ -438,17 +408,14 @@ async fn test_agency_pool_two_creds_one_rev_reg_revoke_first() { .await; assert!(!issuer_credential1 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); assert!(!issuer_credential2 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); - #[cfg(feature = "migration")] - verifier.migrate().await; - revoke_credential_and_publish_accumulator(&mut issuer, &issuer_credential1, &rev_reg).await; let mut proof_verifier = verifier_create_proof_and_send_request( @@ -467,8 +434,8 @@ async fn test_agency_pool_two_creds_one_rev_reg_revoke_first() { .await; proof_verifier .verify_presentation( - &verifier.profile.inject_anoncreds_ledger_read(), - &verifier.profile.inject_anoncreds(), + verifier.profile.ledger_read(), + verifier.profile.anoncreds(), presentation, ) .await @@ -494,13 +461,10 @@ async fn test_agency_pool_two_creds_one_rev_reg_revoke_first() { ) .await; - #[cfg(feature = "migration")] - consumer.migrate().await; - proof_verifier .verify_presentation( - &verifier.profile.inject_anoncreds_ledger_read(), - &verifier.profile.inject_anoncreds(), + verifier.profile.ledger_read(), + verifier.profile.anoncreds(), presentation, ) .await @@ -511,11 +475,11 @@ async fn test_agency_pool_two_creds_one_rev_reg_revoke_first() { ); assert!(issuer_credential1 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); assert!(!issuer_credential2 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); }) @@ -524,6 +488,7 @@ async fn test_agency_pool_two_creds_one_rev_reg_revoke_first() { #[tokio::test] #[ignore] +#[allow(unused_mut)] async fn test_agency_pool_two_creds_one_rev_reg_revoke_second() { SetupPoolDirectory::run(|setup| async move { let mut issuer = create_test_agent_trustee(setup.genesis_file_path.clone()).await; @@ -543,9 +508,6 @@ async fn test_agency_pool_two_creds_one_rev_reg_revoke_second() { ) .await; - #[cfg(feature = "migration")] - issuer.migrate().await; - let credential_data2 = credential_data_address_2().to_string(); let issuer_credential2 = exchange_credential( &mut consumer, @@ -558,17 +520,14 @@ async fn test_agency_pool_two_creds_one_rev_reg_revoke_second() { .await; assert!(!issuer_credential1 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); assert!(!issuer_credential2 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); - #[cfg(feature = "migration")] - verifier.migrate().await; - revoke_credential_and_publish_accumulator(&mut issuer, &issuer_credential2, &rev_reg).await; let mut proof_verifier = verifier_create_proof_and_send_request( @@ -586,8 +545,8 @@ async fn test_agency_pool_two_creds_one_rev_reg_revoke_second() { .await; proof_verifier .verify_presentation( - &verifier.profile.inject_anoncreds_ledger_read(), - &verifier.profile.inject_anoncreds(), + verifier.profile.ledger_read(), + verifier.profile.anoncreds(), presentation, ) .await @@ -612,13 +571,10 @@ async fn test_agency_pool_two_creds_one_rev_reg_revoke_second() { ) .await; - #[cfg(feature = "migration")] - consumer.migrate().await; - proof_verifier .verify_presentation( - &verifier.profile.inject_anoncreds_ledger_read(), - &verifier.profile.inject_anoncreds(), + verifier.profile.ledger_read(), + verifier.profile.anoncreds(), presentation, ) .await @@ -629,11 +585,11 @@ async fn test_agency_pool_two_creds_one_rev_reg_revoke_second() { ); assert!(!issuer_credential1 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); assert!(issuer_credential2 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); }) @@ -661,9 +617,6 @@ async fn test_agency_pool_two_creds_two_rev_reg_id() { ) .await; - #[cfg(feature = "migration")] - issuer.migrate().await; - let rev_reg_2 = rotate_rev_reg(&mut issuer, &cred_def, &rev_reg).await; let credential_data2 = credential_data_address_2().to_string(); let issuer_credential2 = exchange_credential( @@ -691,8 +644,8 @@ async fn test_agency_pool_two_creds_two_rev_reg_id() { .await; proof_verifier .verify_presentation( - &verifier.profile.inject_anoncreds_ledger_read(), - &verifier.profile.inject_anoncreds(), + verifier.profile.ledger_read(), + verifier.profile.anoncreds(), presentation, ) .await @@ -703,9 +656,6 @@ async fn test_agency_pool_two_creds_two_rev_reg_id() { PresentationVerificationStatus::Valid ); - #[cfg(feature = "migration")] - verifier.migrate().await; - let mut proof_verifier = verifier_create_proof_and_send_request( &mut verifier, &schema.schema_id, @@ -714,9 +664,6 @@ async fn test_agency_pool_two_creds_two_rev_reg_id() { ) .await; - #[cfg(feature = "migration")] - consumer.migrate().await; - let presentation = prover_select_credentials_and_send_proof( &mut consumer, proof_verifier.get_presentation_request_msg().unwrap(), @@ -725,8 +672,8 @@ async fn test_agency_pool_two_creds_two_rev_reg_id() { .await; proof_verifier .verify_presentation( - &verifier.profile.inject_anoncreds_ledger_read(), - &verifier.profile.inject_anoncreds(), + verifier.profile.ledger_read(), + verifier.profile.anoncreds(), presentation, ) .await @@ -738,11 +685,11 @@ async fn test_agency_pool_two_creds_two_rev_reg_id() { ); assert!(!issuer_credential1 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); assert!(!issuer_credential2 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); }) @@ -751,6 +698,7 @@ async fn test_agency_pool_two_creds_two_rev_reg_id() { #[tokio::test] #[ignore] +#[allow(unused_mut)] async fn test_agency_pool_two_creds_two_rev_reg_id_revoke_first() { SetupPoolDirectory::run(|setup| async move { let mut issuer = create_test_agent_trustee(setup.genesis_file_path.clone()).await; @@ -770,9 +718,6 @@ async fn test_agency_pool_two_creds_two_rev_reg_id_revoke_first() { ) .await; - #[cfg(feature = "migration")] - issuer.migrate().await; - let rev_reg_2 = rotate_rev_reg(&mut issuer, &cred_def, &rev_reg).await; let credential_data2 = credential_data_address_2().to_string(); let issuer_credential2 = exchange_credential( @@ -786,19 +731,16 @@ async fn test_agency_pool_two_creds_two_rev_reg_id_revoke_first() { .await; assert!(!issuer_credential1 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); assert!(!issuer_credential2 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); revoke_credential_and_publish_accumulator(&mut issuer, &issuer_credential1, &rev_reg).await; - #[cfg(feature = "migration")] - verifier.migrate().await; - let mut proof_verifier = verifier_create_proof_and_send_request( &mut verifier, &schema.schema_id, @@ -814,8 +756,8 @@ async fn test_agency_pool_two_creds_two_rev_reg_id_revoke_first() { .await; proof_verifier .verify_presentation( - &verifier.profile.inject_anoncreds_ledger_read(), - &verifier.profile.inject_anoncreds(), + verifier.profile.ledger_read(), + verifier.profile.anoncreds(), presentation, ) .await @@ -840,13 +782,10 @@ async fn test_agency_pool_two_creds_two_rev_reg_id_revoke_first() { ) .await; - #[cfg(feature = "migration")] - consumer.migrate().await; - proof_verifier .verify_presentation( - &verifier.profile.inject_anoncreds_ledger_read(), - &verifier.profile.inject_anoncreds(), + verifier.profile.ledger_read(), + verifier.profile.anoncreds(), presentation, ) .await @@ -857,11 +796,11 @@ async fn test_agency_pool_two_creds_two_rev_reg_id_revoke_first() { ); assert!(issuer_credential1 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); assert!(!issuer_credential2 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); }) @@ -889,9 +828,6 @@ async fn test_agency_pool_two_creds_two_rev_reg_id_revoke_second() { ) .await; - #[cfg(feature = "migration")] - issuer.migrate().await; - let rev_reg_2 = rotate_rev_reg(&mut issuer, &cred_def, &rev_reg).await; let credential_data2 = credential_data_address_2().to_string(); let issuer_credential2 = exchange_credential( @@ -905,11 +841,11 @@ async fn test_agency_pool_two_creds_two_rev_reg_id_revoke_second() { .await; assert!(!issuer_credential1 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); assert!(!issuer_credential2 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); @@ -930,13 +866,10 @@ async fn test_agency_pool_two_creds_two_rev_reg_id_revoke_second() { ) .await; - #[cfg(feature = "migration")] - verifier.migrate().await; - proof_verifier .verify_presentation( - &verifier.profile.inject_anoncreds_ledger_read(), - &verifier.profile.inject_anoncreds(), + verifier.profile.ledger_read(), + verifier.profile.anoncreds(), presentation, ) .await @@ -961,13 +894,10 @@ async fn test_agency_pool_two_creds_two_rev_reg_id_revoke_second() { ) .await; - #[cfg(feature = "migration")] - consumer.migrate().await; - proof_verifier .verify_presentation( - &verifier.profile.inject_anoncreds_ledger_read(), - &verifier.profile.inject_anoncreds(), + verifier.profile.ledger_read(), + verifier.profile.anoncreds(), presentation, ) .await @@ -979,11 +909,11 @@ async fn test_agency_pool_two_creds_two_rev_reg_id_revoke_second() { ); assert!(!issuer_credential1 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); assert!(issuer_credential2 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); }) @@ -1011,7 +941,7 @@ async fn test_agency_pool_three_creds_one_rev_reg_revoke_all() { .await; assert!(!issuer_credential1 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); revoke_credential_local(&mut issuer, &issuer_credential1, &rev_reg.rev_reg_id).await; @@ -1027,16 +957,10 @@ async fn test_agency_pool_three_creds_one_rev_reg_revoke_all() { .await; assert!(!issuer_credential2 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); - #[cfg(feature = "migration")] - issuer.migrate().await; - - #[cfg(feature = "migration")] - consumer.migrate().await; - revoke_credential_local(&mut issuer, &issuer_credential2, &rev_reg.rev_reg_id).await; let issuer_credential3 = exchange_credential( @@ -1053,15 +977,15 @@ async fn test_agency_pool_three_creds_one_rev_reg_revoke_all() { thread::sleep(Duration::from_millis(100)); assert!(issuer_credential1 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); assert!(issuer_credential2 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); assert!(issuer_credential3 - .is_revoked(&issuer.profile.inject_anoncreds_ledger_read()) + .is_revoked(issuer.profile.ledger_read()) .await .unwrap()); }) diff --git a/aries_vcx/tests/utils/migration.rs b/aries_vcx/tests/utils/migration.rs deleted file mode 100644 index 96265ba007..0000000000 --- a/aries_vcx/tests/utils/migration.rs +++ /dev/null @@ -1,68 +0,0 @@ -use std::sync::Arc; - -use aries_vcx::{ - global::settings::WALLET_KDF_RAW, - utils::devsetup::{dev_build_profile_modular, SetupProfile}, -}; -use aries_vcx_core::{ - wallet::indy::{wallet::create_and_open_wallet, IndySdkWallet, WalletConfig}, - WalletHandle, -}; -use async_trait::async_trait; -use uuid::Uuid; - -use crate::utils::test_agent::TestAgent; - -#[async_trait] -pub trait Migratable { - async fn migrate(&mut self); -} - -#[async_trait] -impl Migratable for SetupProfile { - async fn migrate(&mut self) { - info!("SetupProfile::migrate >>>"); - let old_wh = self.profile.wallet_handle().unwrap(); - let new_wh = migrate_to_new_wallet(old_wh).await; - let wallet = Arc::new(IndySdkWallet::new(new_wh)); - self.profile = dev_build_profile_modular(self.genesis_file_path.clone(), wallet); - } -} - -#[async_trait] -impl Migratable for TestAgent { - async fn migrate(&mut self) { - info!("Faber::migrate >>>"); - let old_wh = self.profile.wallet_handle().unwrap(); - let new_wh = migrate_to_new_wallet(old_wh).await; - let wallet = Arc::new(IndySdkWallet::new(new_wh)); - self.profile = dev_build_profile_modular(self.genesis_file_path.clone(), wallet.clone()); - } -} - -async fn migrate_to_new_wallet(src_wallet_handle: WalletHandle) -> WalletHandle { - let wallet_config = make_wallet_config(); - let dest_wallet_handle = create_and_open_wallet(&wallet_config).await.unwrap(); - - wallet_migrator::migrate_wallet( - src_wallet_handle, - dest_wallet_handle, - wallet_migrator::vdrtools2credx::migrate_any_record, - ) - .await - .unwrap(); - - dest_wallet_handle -} - -fn make_wallet_config() -> WalletConfig { - let wallet_key = "8dvfYSt5d1taSd6yJdpjq4emkwsPDDLYxkNFysFD2cZY".to_owned(); - let wallet_name = format!("wallet_{}", Uuid::new_v4()); - - WalletConfig { - wallet_name, - wallet_key, - wallet_key_derivation: WALLET_KDF_RAW.to_string(), - ..Default::default() - } -} diff --git a/aries_vcx/tests/utils/mod.rs b/aries_vcx/tests/utils/mod.rs index c1c8ca93b5..eabb184af4 100644 --- a/aries_vcx/tests/utils/mod.rs +++ b/aries_vcx/tests/utils/mod.rs @@ -1,5 +1,3 @@ -#[cfg(feature = "migration")] -pub mod migration; pub mod scenarios; pub mod test_agent; pub mod test_macros; diff --git a/aries_vcx/tests/utils/scenarios/connection.rs b/aries_vcx/tests/utils/scenarios/connection.rs index f3f3c4ed7e..dfee4c1359 100644 --- a/aries_vcx/tests/utils/scenarios/connection.rs +++ b/aries_vcx/tests/utils/scenarios/connection.rs @@ -1,5 +1,6 @@ use aries_vcx::{ common::ledger::transactions::into_did_doc, + core::profile::Profile, errors::error::VcxResult, handlers::{out_of_band::sender::OutOfBandSender, util::AnyInvitation}, protocols::{ @@ -24,9 +25,9 @@ use uuid::Uuid; use crate::utils::test_agent::TestAgent; -async fn establish_connection_from_invite( - alice: &mut TestAgent, - faber: &mut TestAgent, +async fn establish_connection_from_invite( + alice: &mut TestAgent, + faber: &mut TestAgent, invitation: AnyInvitation, inviter_pairwise_info: PairwiseInfo, ) -> (GenericConnection, GenericConnection) { @@ -40,11 +41,9 @@ async fn establish_connection_from_invite( } } - let invitee_pairwise_info = PairwiseInfo::create(&alice.profile.inject_wallet()) - .await - .unwrap(); + let invitee_pairwise_info = PairwiseInfo::create(alice.profile.wallet()).await.unwrap(); let invitee = Connection::new_invitee("".to_owned(), invitee_pairwise_info) - .accept_invitation(&alice.profile.inject_indy_ledger_read(), invitation.clone()) + .accept_invitation(alice.profile.ledger_read(), invitation.clone()) .await .unwrap() .prepare_request("http://dummy.org".parse().unwrap(), vec![]) @@ -55,18 +54,17 @@ async fn establish_connection_from_invite( let inviter = Connection::new_inviter("".to_owned(), inviter_pairwise_info) .into_invited(invitation.id()) .handle_request( - &faber.profile.inject_wallet(), + faber.profile.wallet(), request, "http://dummy.org".parse().unwrap(), vec![], - &DummyHttpClient, ) .await .unwrap(); let response = inviter.get_connection_response_msg(); let invitee = invitee - .handle_response(&alice.profile.inject_wallet(), response, &DummyHttpClient) + .handle_response(alice.profile.wallet(), response) .await .unwrap(); let ack = invitee.get_ack(); @@ -76,9 +74,9 @@ async fn establish_connection_from_invite( (invitee.into(), inviter.into()) } -pub async fn create_connections_via_oob_invite( - alice: &mut TestAgent, - faber: &mut TestAgent, +pub async fn create_connections_via_oob_invite( + alice: &mut TestAgent, + faber: &mut TestAgent, ) -> (GenericConnection, GenericConnection) { let oob_sender = OutOfBandSender::create() .set_label("test-label") @@ -90,20 +88,20 @@ pub async fn create_connections_via_oob_invite( ))) .unwrap(); let invitation = AnyInvitation::Oob(oob_sender.oob.clone()); - let ddo = into_did_doc(&alice.profile.inject_indy_ledger_read(), &invitation) + let ddo = into_did_doc(alice.profile.ledger_read(), &invitation) .await .unwrap(); // TODO: Create a key and write on ledger instead let inviter_pairwise_info = PairwiseInfo { - pw_did: ddo.clone().id.clone(), + pw_did: ddo.clone().id, pw_vk: ddo.recipient_keys().unwrap().first().unwrap().to_string(), }; establish_connection_from_invite(alice, faber, invitation, inviter_pairwise_info).await } -pub async fn create_connections_via_public_invite( - alice: &mut TestAgent, - faber: &mut TestAgent, +pub async fn create_connections_via_public_invite( + alice: &mut TestAgent, + faber: &mut TestAgent, ) -> (GenericConnection, GenericConnection) { let content = InvitationContent::builder_public() .label("faber".to_owned()) @@ -116,25 +114,23 @@ pub async fn create_connections_via_public_invite( .content(content) .build(), ); - let ddo = into_did_doc(&alice.profile.inject_indy_ledger_read(), &public_invite) + let ddo = into_did_doc(alice.profile.ledger_read(), &public_invite) .await .unwrap(); // TODO: Create a key and write on ledger instead let inviter_pairwise_info = PairwiseInfo { - pw_did: ddo.clone().id.clone(), + pw_did: ddo.clone().id, pw_vk: ddo.recipient_keys().unwrap().first().unwrap().to_string(), }; establish_connection_from_invite(alice, faber, public_invite.clone(), inviter_pairwise_info) .await } -pub async fn create_connections_via_pairwise_invite( - alice: &mut TestAgent, - faber: &mut TestAgent, +pub async fn create_connections_via_pairwise_invite( + alice: &mut TestAgent, + faber: &mut TestAgent, ) -> (GenericConnection, GenericConnection) { - let inviter_pairwise_info = PairwiseInfo::create(&faber.profile.inject_wallet()) - .await - .unwrap(); + let inviter_pairwise_info = PairwiseInfo::create(faber.profile.wallet()).await.unwrap(); let invite = { let id = Uuid::new_v4().to_string(); let content = InvitationContent::builder_pairwise() diff --git a/aries_vcx/tests/utils/scenarios/credential_issuance.rs b/aries_vcx/tests/utils/scenarios/credential_issuance.rs index 403ea141a0..7763ce78e0 100644 --- a/aries_vcx/tests/utils/scenarios/credential_issuance.rs +++ b/aries_vcx/tests/utils/scenarios/credential_issuance.rs @@ -1,4 +1,4 @@ -use std::{sync::Arc, thread, time::Duration}; +use std::{thread, time::Duration}; use aries_vcx::{ common::{ @@ -7,11 +7,11 @@ use aries_vcx::{ revocation_registry::RevocationRegistry, }, test_utils::{ - create_and_write_test_cred_def, create_and_write_test_rev_reg, + create_and_publish_test_rev_reg, create_and_write_test_cred_def, create_and_write_test_schema, }, }, - core::profile::profile::Profile, + core::profile::Profile, handlers::{ issuance::{holder::Holder, issuer::Issuer}, util::OfferInfo, @@ -34,33 +34,33 @@ use serde_json::json; use super::{attr_names_address_list, create_credential_proposal, credential_data_address_1}; use crate::utils::test_agent::TestAgent; -pub async fn create_address_schema_creddef_revreg( - profile: &Arc, +pub async fn create_address_schema_creddef_revreg( + profile: &P, institution_did: &str, ) -> (Schema, CredentialDef, RevocationRegistry) { - let ledger_read = profile.inject_anoncreds_ledger_read(); - let ledger_write = profile.inject_anoncreds_ledger_write(); - let anoncreds = profile.inject_anoncreds(); + let ledger_read = profile.ledger_read(); + let ledger_write = profile.ledger_write(); + let anoncreds = profile.anoncreds(); let schema = create_and_write_test_schema( - &anoncreds, - &ledger_write, + anoncreds, + ledger_write, institution_did, &json!(attr_names_address_list()).to_string(), ) .await; let cred_def = create_and_write_test_cred_def( - &anoncreds, - &ledger_read, - &ledger_write, + anoncreds, + ledger_read, + ledger_write, institution_did, &schema.schema_id, true, ) .await; - let rev_reg = create_and_write_test_rev_reg( - &anoncreds, - &ledger_write, + let rev_reg = create_and_publish_test_rev_reg( + anoncreds, + ledger_write, institution_did, &cred_def.get_cred_def_id(), ) @@ -83,8 +83,8 @@ pub fn create_issuer_from_proposal(proposal: ProposeCredentialV1) -> Issuer { issuer } -pub async fn accept_credential_proposal( - faber: &mut TestAgent, +pub async fn accept_credential_proposal( + faber: &mut TestAgent

, issuer: &mut Issuer, cred_proposal: ProposeCredentialV1, rev_reg_id: Option, @@ -98,7 +98,7 @@ pub async fn accept_credential_proposal( }; issuer .build_credential_offer_msg( - &faber.profile.inject_anoncreds(), + faber.profile.anoncreds(), offer_info, Some("comment".into()), ) @@ -107,16 +107,16 @@ pub async fn accept_credential_proposal( issuer.get_credential_offer().unwrap() } -pub async fn accept_offer( - alice: &mut TestAgent, +pub async fn accept_offer( + alice: &mut TestAgent

, cred_offer: OfferCredentialV1, holder: &mut Holder, ) -> RequestCredentialV1 { // TODO: Replace with message-specific handler holder .process_aries_msg( - &alice.profile.inject_anoncreds_ledger_read(), - &alice.profile.inject_anoncreds(), + alice.profile.ledger_read(), + alice.profile.anoncreds(), cred_offer.into(), ) .await @@ -125,9 +125,9 @@ pub async fn accept_offer( assert!(holder.get_offer().is_ok()); holder .prepare_credential_request( - &alice.profile.inject_anoncreds_ledger_read(), - &alice.profile.inject_anoncreds(), - PairwiseInfo::create(&alice.profile.inject_wallet()) + alice.profile.ledger_read(), + alice.profile.anoncreds(), + PairwiseInfo::create(alice.profile.wallet()) .await .unwrap() .pw_did, @@ -138,16 +138,16 @@ pub async fn accept_offer( holder.get_msg_credential_request().unwrap() } -pub async fn decline_offer( - alice: &mut TestAgent, +pub async fn decline_offer( + alice: &mut TestAgent

, cred_offer: OfferCredentialV1, holder: &mut Holder, ) -> ProblemReport { // TODO: Replace with message-specific handler holder .process_aries_msg( - &alice.profile.inject_anoncreds_ledger_read(), - &alice.profile.inject_anoncreds(), + alice.profile.ledger_read(), + alice.profile.anoncreds(), cred_offer.into(), ) .await @@ -158,9 +158,9 @@ pub async fn decline_offer( problem_report } -pub async fn send_credential( - alice: &mut TestAgent, - faber: &mut TestAgent, +pub async fn send_credential( + alice: &mut TestAgent, + faber: &mut TestAgent, issuer_credential: &mut Issuer, holder_credential: &mut Holder, cred_request: RequestCredentialV1, @@ -179,7 +179,7 @@ pub async fn send_credential( assert_eq!(thread_id, issuer_credential.get_thread_id().unwrap()); issuer_credential - .build_credential(&faber.profile.inject_anoncreds()) + .build_credential(faber.profile.anoncreds()) .await .unwrap(); let credential = issuer_credential.get_msg_issue_credential().unwrap(); @@ -188,15 +188,15 @@ pub async fn send_credential( assert_eq!(thread_id, holder_credential.get_thread_id().unwrap()); assert_eq!( holder_credential - .is_revokable(&alice.profile.inject_anoncreds_ledger_read()) + .is_revokable(alice.profile.ledger_read()) .await .unwrap(), revokable ); holder_credential .process_credential( - &alice.profile.inject_anoncreds_ledger_read(), - &alice.profile.inject_anoncreds(), + alice.profile.ledger_read(), + alice.profile.anoncreds(), credential, ) .await @@ -204,7 +204,7 @@ pub async fn send_credential( assert_eq!(HolderState::Finished, holder_credential.get_state()); assert_eq!( holder_credential - .is_revokable(&alice.profile.inject_anoncreds_ledger_read()) + .is_revokable(alice.profile.ledger_read()) .await .unwrap(), revokable @@ -220,9 +220,9 @@ pub async fn send_credential( } } -pub async fn issue_address_credential( - consumer: &mut TestAgent, - institution: &mut TestAgent, +pub async fn issue_address_credential( + consumer: &mut TestAgent, + institution: &mut TestAgent, ) -> (Schema, CredentialDef, RevocationRegistry, Issuer) { let (schema, cred_def, rev_reg) = create_address_schema_creddef_revreg(&institution.profile, &institution.institution_did) @@ -239,9 +239,9 @@ pub async fn issue_address_credential( (schema, cred_def, rev_reg, issuer) } -pub async fn exchange_credential( - consumer: &mut TestAgent, - institution: &mut TestAgent, +pub async fn exchange_credential( + consumer: &mut TestAgent, + institution: &mut TestAgent, credential_data: String, cred_def: &CredentialDef, rev_reg: &RevocationRegistry, @@ -262,18 +262,15 @@ pub async fn exchange_credential( ) .await; assert!(!holder_credential - .is_revoked( - &consumer.profile.inject_anoncreds_ledger_read(), - &consumer.profile.inject_anoncreds(), - ) + .is_revoked(consumer.profile.ledger_read(), consumer.profile.anoncreds(),) .await .unwrap()); issuer } -pub async fn exchange_credential_with_proposal( - consumer: &mut TestAgent, - institution: &mut TestAgent, +pub async fn exchange_credential_with_proposal( + consumer: &mut TestAgent, + institution: &mut TestAgent, schema_id: &str, cred_def_id: &str, rev_reg_id: Option, @@ -304,8 +301,8 @@ pub async fn exchange_credential_with_proposal( (holder, issuer) } -async fn create_credential_offer( - faber: &mut TestAgent, +async fn create_credential_offer( + faber: &mut TestAgent

, cred_def: &CredentialDef, rev_reg: &RevocationRegistry, credential_json: &str, @@ -320,7 +317,7 @@ async fn create_credential_offer( let mut issuer = Issuer::create("1").unwrap(); issuer .build_credential_offer_msg( - &faber.profile.inject_anoncreds(), + faber.profile.anoncreds(), offer_info, comment.map(String::from), ) @@ -329,14 +326,17 @@ async fn create_credential_offer( issuer } -async fn create_credential_request(alice: &mut TestAgent, cred_offer: OfferCredentialV1) -> Holder { +async fn create_credential_request( + alice: &mut TestAgent

, + cred_offer: OfferCredentialV1, +) -> Holder { let mut holder = Holder::create_from_offer("TEST_CREDENTIAL", cred_offer).unwrap(); assert_eq!(HolderState::OfferReceived, holder.get_state()); holder .prepare_credential_request( - &alice.profile.inject_anoncreds_ledger_read(), - &alice.profile.inject_anoncreds(), - PairwiseInfo::create(&alice.profile.inject_wallet()) + alice.profile.ledger_read(), + alice.profile.anoncreds(), + PairwiseInfo::create(alice.profile.wallet()) .await .unwrap() .pw_did, diff --git a/aries_vcx/tests/utils/scenarios/proof_presentation.rs b/aries_vcx/tests/utils/scenarios/proof_presentation.rs index 0bfb10511b..1b6d327d0b 100644 --- a/aries_vcx/tests/utils/scenarios/proof_presentation.rs +++ b/aries_vcx/tests/utils/scenarios/proof_presentation.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, sync::Arc}; +use std::collections::HashMap; use aries_vcx::{ common::{ @@ -7,6 +7,7 @@ use aries_vcx::{ }, proofs::{proof_request::PresentationRequestData, proof_request_internal::AttrInfo}, }, + core::profile::Profile, handlers::{ issuance::issuer::Issuer, proof_presentation::{ @@ -24,7 +25,9 @@ use aries_vcx::{ }, utils::constants::{DEFAULT_PROOF_NAME, TEST_TAILS_URL}, }; -use aries_vcx_core::ledger::indy::pool::test_utils::get_temp_dir_path; +use aries_vcx_core::ledger::{ + base_ledger::AnoncredsLedgerRead, indy::pool::test_utils::get_temp_dir_path, +}; use messages::{ msg_fields::protocols::{ present_proof::{ @@ -54,15 +57,15 @@ pub async fn create_proof_proposal(prover: &mut Prover, cred_def_id: &str) -> Pr proposal } -pub async fn accept_proof_proposal( - faber: &mut TestAgent, +pub async fn accept_proof_proposal( + faber: &mut TestAgent

, verifier: &mut Verifier, presentation_proposal: ProposePresentation, ) -> RequestPresentation { verifier .process_aries_msg( - &faber.profile.inject_anoncreds_ledger_read(), - &faber.profile.inject_anoncreds(), + faber.profile.ledger_read(), + faber.profile.anoncreds(), presentation_proposal.clone().into(), ) .await @@ -82,7 +85,7 @@ pub async fn accept_proof_proposal( }) .collect(); let presentation_request_data = - PresentationRequestData::create(&faber.profile.inject_anoncreds(), "request-1") + PresentationRequestData::create(faber.profile.anoncreds(), "request-1") .await .unwrap() .set_requested_attributes_as_vec(attrs) @@ -113,25 +116,22 @@ pub async fn receive_proof_proposal_rejection(prover: &mut Prover, rejection: Pr assert_eq!(prover.get_state(), ProverState::Failed); } -pub async fn create_proof_request_data( - faber: &mut TestAgent, +pub async fn create_proof_request_data( + faber: &mut TestAgent

, requested_attrs: &str, requested_preds: &str, revocation_interval: &str, request_name: Option<&str>, ) -> PresentationRequestData { - PresentationRequestData::create( - &faber.profile.inject_anoncreds(), - request_name.unwrap_or("name"), - ) - .await - .unwrap() - .set_requested_attributes_as_string(requested_attrs.to_string()) - .unwrap() - .set_requested_predicates_as_string(requested_preds.to_string()) - .unwrap() - .set_not_revoked_interval(revocation_interval.to_string()) - .unwrap() + PresentationRequestData::create(faber.profile.anoncreds(), request_name.unwrap_or("name")) + .await + .unwrap() + .set_requested_attributes_as_string(requested_attrs.to_string()) + .unwrap() + .set_requested_predicates_as_string(requested_preds.to_string()) + .unwrap() + .set_not_revoked_interval(revocation_interval.to_string()) + .unwrap() } pub async fn create_prover_from_request(presentation_request: RequestPresentation) -> Prover { @@ -147,8 +147,8 @@ pub async fn create_verifier_from_request_data( verifier } -pub async fn generate_and_send_proof( - alice: &mut TestAgent, +pub async fn generate_and_send_proof( + alice: &mut TestAgent

, prover: &mut Prover, selected_credentials: SelectedCredentials, ) -> Option { @@ -159,8 +159,8 @@ pub async fn generate_and_send_proof( ); prover .generate_presentation( - &alice.profile.inject_anoncreds_ledger_read(), - &alice.profile.inject_anoncreds(), + alice.profile.ledger_read(), + alice.profile.anoncreds(), selected_credentials, HashMap::new(), ) @@ -182,15 +182,15 @@ pub async fn generate_and_send_proof( } } -pub async fn verify_proof( - faber: &mut TestAgent, +pub async fn verify_proof( + faber: &mut TestAgent

, verifier: &mut Verifier, presentation: Presentation, ) -> AckPresentation { let msg = verifier .verify_presentation( - &faber.profile.inject_anoncreds_ledger_read(), - &faber.profile.inject_anoncreds(), + faber.profile.ledger_read(), + faber.profile.anoncreds(), presentation, ) .await @@ -208,8 +208,8 @@ pub async fn verify_proof( msg } -pub async fn revoke_credential_and_publish_accumulator( - faber: &mut TestAgent, +pub async fn revoke_credential_and_publish_accumulator( + faber: &mut TestAgent

, issuer_credential: &Issuer, rev_reg: &RevocationRegistry, ) { @@ -217,27 +217,27 @@ pub async fn revoke_credential_and_publish_accumulator( rev_reg .publish_local_revocations( - &faber.profile.inject_anoncreds(), - &faber.profile.inject_anoncreds_ledger_write(), + faber.profile.anoncreds(), + faber.profile.ledger_write(), &faber.institution_did, ) .await .unwrap(); } -pub async fn revoke_credential_local( - faber: &mut TestAgent, +pub async fn revoke_credential_local( + faber: &mut TestAgent

, issuer_credential: &Issuer, rev_reg_id: &str, ) { - let ledger = Arc::clone(&faber.profile).inject_anoncreds_ledger_read(); + let ledger = faber.profile.ledger_read(); let (_, delta, timestamp) = ledger .get_rev_reg_delta_json(rev_reg_id, None, None) .await .unwrap(); issuer_credential - .revoke_credential_local(&faber.profile.inject_anoncreds()) + .revoke_credential_local(faber.profile.anoncreds()) .await .unwrap(); @@ -250,13 +250,13 @@ pub async fn revoke_credential_local( // cache } -pub async fn rotate_rev_reg( - faber: &mut TestAgent, +pub async fn rotate_rev_reg( + faber: &mut TestAgent

, credential_def: &CredentialDef, rev_reg: &RevocationRegistry, ) -> RevocationRegistry { let mut rev_reg = RevocationRegistry::create( - &faber.profile.inject_anoncreds(), + faber.profile.anoncreds(), &faber.institution_did, &credential_def.get_cred_def_id(), &rev_reg.get_tails_dir(), @@ -266,28 +266,28 @@ pub async fn rotate_rev_reg( .await .unwrap(); rev_reg - .publish_revocation_primitives( - &faber.profile.inject_anoncreds_ledger_write(), - TEST_TAILS_URL, - ) + .publish_revocation_primitives(faber.profile.ledger_write(), TEST_TAILS_URL) .await .unwrap(); rev_reg } -pub async fn publish_revocation(institution: &mut TestAgent, rev_reg: &RevocationRegistry) { +pub async fn publish_revocation( + institution: &mut TestAgent

, + rev_reg: &RevocationRegistry, +) { rev_reg .publish_local_revocations( - &institution.profile.inject_anoncreds(), - &institution.profile.inject_anoncreds_ledger_write(), + institution.profile.anoncreds(), + institution.profile.ledger_write(), &institution.institution_did, ) .await .unwrap(); } -pub async fn verifier_create_proof_and_send_request( - institution: &mut TestAgent, +pub async fn verifier_create_proof_and_send_request( + institution: &mut TestAgent

, schema_id: &str, cred_def_id: &str, request_name: Option<&str>, @@ -310,9 +310,9 @@ pub async fn verifier_create_proof_and_send_request( create_verifier_from_request_data(presentation_request_data).await } -pub async fn prover_select_credentials( +pub async fn prover_select_credentials( prover: &mut Prover, - alice: &mut TestAgent, + alice: &mut TestAgent

, presentation_request: RequestPresentation, preselected_credentials: Option<&str>, ) -> SelectedCredentials { @@ -322,7 +322,7 @@ pub async fn prover_select_credentials( .unwrap(); assert_eq!(prover.get_state(), ProverState::PresentationRequestReceived); let retrieved_credentials = prover - .retrieve_credentials(&alice.profile.inject_anoncreds()) + .retrieve_credentials(alice.profile.anoncreds()) .await .unwrap(); info!("prover_select_credentials >> retrieved_credentials: {retrieved_credentials:?}"); @@ -341,8 +341,8 @@ pub async fn prover_select_credentials( } } -pub async fn prover_select_credentials_and_send_proof( - alice: &mut TestAgent, +pub async fn prover_select_credentials_and_send_proof( + alice: &mut TestAgent

, presentation_request: RequestPresentation, preselected_credentials: Option<&str>, ) -> Presentation { @@ -432,9 +432,9 @@ pub fn match_preselected_credentials( selected_credentials } -pub async fn exchange_proof( - institution: &mut TestAgent, - consumer: &mut TestAgent, +pub async fn exchange_proof( + institution: &mut TestAgent, + consumer: &mut TestAgent, schema_id: &str, cred_def_id: &str, request_name: Option<&str>, @@ -451,8 +451,8 @@ pub async fn exchange_proof( verifier .verify_presentation( - &institution.profile.inject_anoncreds_ledger_read(), - &institution.profile.inject_anoncreds(), + institution.profile.ledger_read(), + institution.profile.anoncreds(), presentation, ) .await diff --git a/aries_vcx/tests/utils/test_agent.rs b/aries_vcx/tests/utils/test_agent.rs index 6c5820f1e4..38b2ba77d8 100644 --- a/aries_vcx/tests/utils/test_agent.rs +++ b/aries_vcx/tests/utils/test_agent.rs @@ -1,7 +1,9 @@ +#![allow(clippy::diverging_sub_expression)] + use std::sync::Arc; use aries_vcx::{ - core::profile::profile::Profile, + core::profile::Profile, global::settings::DEFAULT_LINK_SECRET_ALIAS, utils::{ constants::TRUSTEE_SEED, @@ -9,20 +11,23 @@ use aries_vcx::{ random::generate_random_seed, }, }; -use aries_vcx_core::wallet::indy::IndySdkWallet; +use aries_vcx_core::{anoncreds::base_anoncreds::BaseAnonCreds, wallet::indy::IndySdkWallet}; -pub struct TestAgent { - pub profile: Arc, +pub struct TestAgent { + pub profile: P, pub institution_did: String, pub genesis_file_path: String, } -async fn create_test_agent_from_seed(seed: &str, genesis_file_path: String) -> TestAgent { +async fn create_test_agent_from_seed( + seed: &str, + genesis_file_path: String, +) -> TestAgent { let (institution_did, wallet_handle) = dev_setup_wallet_indy(seed).await; let wallet = Arc::new(IndySdkWallet::new(wallet_handle)); let profile = dev_build_featured_profile(genesis_file_path.clone(), wallet).await; profile - .inject_anoncreds() + .anoncreds() .prover_create_link_secret(DEFAULT_LINK_SECRET_ALIAS) .await .unwrap(); @@ -33,10 +38,10 @@ async fn create_test_agent_from_seed(seed: &str, genesis_file_path: String) -> T } } -pub async fn create_test_agent_trustee(genesis_file_path: String) -> TestAgent { +pub async fn create_test_agent_trustee(genesis_file_path: String) -> TestAgent { create_test_agent_from_seed(TRUSTEE_SEED, genesis_file_path).await } -pub async fn create_test_agent(genesis_file_path: String) -> TestAgent { +pub async fn create_test_agent(genesis_file_path: String) -> TestAgent { create_test_agent_from_seed(&generate_random_seed(), genesis_file_path).await } diff --git a/aries_vcx_core/Cargo.toml b/aries_vcx_core/Cargo.toml index 5ddbe5fbea..b121fef3a6 100644 --- a/aries_vcx_core/Cargo.toml +++ b/aries_vcx_core/Cargo.toml @@ -5,36 +5,34 @@ edition = "2021" [features] ########################## DEP FLAGS ################################ -# Feature flag to include the libvdrtools dependency -vdrtools_anoncreds = ["dep:libvdrtools", "dep:indy-api-types"] vdrtools_wallet = ["dep:libvdrtools", "dep:indy-api-types"] # Feature flag to include the 'modular library' dependencies (vdrtools alternatives; indy-vdr, indy-credx) -modular_libs = ["dep:indy-credx"] -vdr_proxy_ledger = ["modular_libs", "dep:indy-vdr-proxy-client"] +credx = ["dep:indy-credx"] +vdr_proxy_ledger = ["credx", "dep:indy-vdr-proxy-client"] # Feature flag to allow legacy proof verification legacy_proof = [] [dependencies] agency_client = { path = "../agency_client" } -indy-vdr = { git = "https://github.com/hyperledger/indy-vdr.git", rev = "879e29e", default-features = false, features = ["log"] } -indy-credx = { git = "https://github.com/hyperledger/indy-shared-rs", tag = "v1.0.1", optional = true } +indy-vdr = { git = "https://github.com/hyperledger/indy-vdr.git", rev = "c143268", default-features = false, features = ["log"] } +indy-credx = { git = "https://github.com/hyperledger/indy-shared-rs", tag = "v1.1.0", optional = true } libvdrtools = { path = "../libvdrtools", optional = true } indy-api-types = { path = "../libvdrtools/indy-api-types", optional = true } async-trait = "0.1.68" futures = { version = "0.3", default-features = false } serde_json = "1.0.95" -time = "=0.3.20" +time = "0.3.20" serde = { version = "1.0.159", features = ["derive"] } -rand = "0.7.3" +rand = "0.8.5" log = "0.4.17" thiserror = "1.0.40" lazy_static = "1.4.0" derive_builder = "0.12.0" uuid = { version = "1.3.0", default-features = false, features = ["v4"] } tokio = { version = "1.20" } -indy-vdr-proxy-client = { git = "https://github.com/hyperledger/indy-vdr.git", rev = "879e29e", optional = true } +indy-vdr-proxy-client = { git = "https://github.com/hyperledger/indy-vdr.git", rev = "c143268", optional = true } indy-ledger-response-parser = { path = "../indy_ledger_response_parser" } -lru = { version = "0.10.0" } +lru = { version = "0.12.0" } [dev-dependencies] tokio = { version = "1.20", features = ["rt", "macros", "rt-multi-thread"] } diff --git a/aries_vcx_core/src/anoncreds/credx_anoncreds.rs b/aries_vcx_core/src/anoncreds/credx_anoncreds.rs index e4c6a3a180..6b2d2f3988 100644 --- a/aries_vcx_core/src/anoncreds/credx_anoncreds.rs +++ b/aries_vcx_core/src/anoncreds/credx_anoncreds.rs @@ -19,17 +19,21 @@ use credx::{ }; use indy_credx as credx; use serde::{de::DeserializeOwned, Deserialize, Serialize}; -use serde_json::Value; +use serde_json::{json, Value}; use uuid::Uuid; use super::base_anoncreds::BaseAnonCreds; use crate::{ errors::error::{AriesVcxCoreError, AriesVcxCoreErrorKind, VcxCoreResult}, utils::{ + async_fn_iterator::AsyncFnIterator, constants::ATTRS, json::{AsTypeOrDeserializationError, TryGetIndex}, }, - wallet::base_wallet::{AsyncFnIteratorCollect, BaseWallet}, + wallet::{ + base_wallet::{AsyncFnIteratorCollect, BaseWallet}, + structs_io::UnpackMessageOutput, + }, }; pub const CATEGORY_LINK_SECRET: &str = "VCX_LINK_SECRET"; @@ -56,14 +60,144 @@ pub struct RevocationRegistryInfo { pub used_ids: HashSet, } +/// Adapter used so that credx does not depend strictly on the vdrtools-wallet +/// Will get removed when the wallet and anoncreds interfaces are de-coupled. +#[derive(Debug)] +struct WalletAdapter(Arc); + +#[async_trait] +impl BaseWallet for WalletAdapter { + #[cfg(feature = "vdrtools_wallet")] + fn get_wallet_handle(&self) -> indy_api_types::WalletHandle { + self.0.get_wallet_handle() + } + + async fn create_and_store_my_did( + &self, + seed: Option<&str>, + kdf_method_name: Option<&str>, + ) -> VcxCoreResult<(String, String)> { + self.0.create_and_store_my_did(seed, kdf_method_name).await + } + + async fn key_for_local_did(&self, did: &str) -> VcxCoreResult { + self.0.key_for_local_did(did).await + } + + async fn replace_did_keys_start(&self, target_did: &str) -> VcxCoreResult { + self.0.replace_did_keys_start(target_did).await + } + + async fn replace_did_keys_apply(&self, target_did: &str) -> VcxCoreResult<()> { + self.0.replace_did_keys_apply(target_did).await + } + + async fn add_wallet_record( + &self, + xtype: &str, + id: &str, + value: &str, + tags: Option>, + ) -> VcxCoreResult<()> { + self.0.add_wallet_record(xtype, id, value, tags).await + } + + async fn get_wallet_record( + &self, + xtype: &str, + id: &str, + options: &str, + ) -> VcxCoreResult { + self.0.get_wallet_record(xtype, id, options).await + } + + async fn get_wallet_record_value(&self, xtype: &str, id: &str) -> VcxCoreResult { + self.0.get_wallet_record_value(xtype, id).await + } + + async fn delete_wallet_record(&self, xtype: &str, id: &str) -> VcxCoreResult<()> { + self.0.delete_wallet_record(xtype, id).await + } + + async fn update_wallet_record_value( + &self, + xtype: &str, + id: &str, + value: &str, + ) -> VcxCoreResult<()> { + self.0.update_wallet_record_value(xtype, id, value).await + } + + async fn add_wallet_record_tags( + &self, + xtype: &str, + id: &str, + tags: HashMap, + ) -> VcxCoreResult<()> { + self.0.add_wallet_record_tags(xtype, id, tags).await + } + + async fn update_wallet_record_tags( + &self, + xtype: &str, + id: &str, + tags: HashMap, + ) -> VcxCoreResult<()> { + self.0.update_wallet_record_tags(xtype, id, tags).await + } + + async fn delete_wallet_record_tags( + &self, + xtype: &str, + id: &str, + tag_names: &str, + ) -> VcxCoreResult<()> { + self.0.delete_wallet_record_tags(xtype, id, tag_names).await + } + + async fn iterate_wallet_records( + &self, + xtype: &str, + query: &str, + options: &str, + ) -> VcxCoreResult>>> { + self.0.iterate_wallet_records(xtype, query, options).await + } + + // ---- crypto + + async fn sign(&self, my_vk: &str, msg: &[u8]) -> VcxCoreResult> { + self.0.sign(my_vk, msg).await + } + + async fn verify(&self, vk: &str, msg: &[u8], signature: &[u8]) -> VcxCoreResult { + self.0.verify(vk, msg, signature).await + } + + async fn pack_message( + &self, + sender_vk: Option<&str>, + receiver_keys: &str, + msg: &[u8], + ) -> VcxCoreResult> { + self.0.pack_message(sender_vk, receiver_keys, msg).await + } + + async fn unpack_message(&self, msg: &[u8]) -> VcxCoreResult { + self.0.unpack_message(msg).await + } +} + #[derive(Debug)] pub struct IndyCredxAnonCreds { - wallet: Arc, + wallet: WalletAdapter, } impl IndyCredxAnonCreds { pub fn new(wallet: Arc) -> Self { - IndyCredxAnonCreds { wallet } + IndyCredxAnonCreds { + wallet: WalletAdapter(wallet), + } } async fn get_wallet_record_value(&self, category: &str, id: &str) -> VcxCoreResult diff --git a/aries_vcx_core/src/anoncreds/indy/credentials/holder/mod.rs b/aries_vcx_core/src/anoncreds/indy/credentials/holder/mod.rs deleted file mode 100644 index 5e328d3f54..0000000000 --- a/aries_vcx_core/src/anoncreds/indy/credentials/holder/mod.rs +++ /dev/null @@ -1,128 +0,0 @@ -use vdrtools::{ - Credential, CredentialDefinition, CredentialOffer, CredentialRequestMetadata, DidValue, - Locator, RevocationRegistryDefinition, -}; - -use crate::{errors::error::VcxCoreResult, global::settings, utils, WalletHandle}; - -pub async fn libindy_prover_store_credential( - wallet_handle: WalletHandle, - cred_id: Option<&str>, - cred_req_meta: &str, - cred_json: &str, - cred_def_json: &str, - rev_reg_def_json: Option<&str>, -) -> VcxCoreResult { - trace!( - "libindy_prover_store_credential >>> cred_id: {:?}, cred_req_meta: {}, cred_json: {}, \ - cred_def_json: {}, rev_reg_def_json: {:?}", - cred_id, - cred_req_meta, - cred_json, - cred_def_json, - rev_reg_def_json, - ); - - if settings::indy_mocks_enabled() { - return Ok("cred_id".to_string()); - } - - let cred_req_meta = serde_json::from_str::(cred_req_meta)?; - - let cred_json = serde_json::from_str::(cred_json)?; - - let cred_def_json = serde_json::from_str::(cred_def_json)?; - - let rev_reg_def_json = match rev_reg_def_json { - None => None, - Some(s) => Some(serde_json::from_str::(s)?), - }; - - let res = Locator::instance() - .prover_controller - .store_credential( - wallet_handle, - cred_id.map(ToOwned::to_owned), - cred_req_meta, - cred_json, - cred_def_json, - rev_reg_def_json, - ) - .await?; - - Ok(res) -} - -pub async fn libindy_prover_get_credential( - wallet_handle: WalletHandle, - cred_id: &str, -) -> VcxCoreResult { - trace!("libindy_prover_get_credential >>> cred_id: {:?}", cred_id,); - - let res = Locator::instance() - .prover_controller - .get_credential(wallet_handle, cred_id.into()) - .await?; - - Ok(res) -} - -pub async fn libindy_prover_delete_credential( - wallet_handle: WalletHandle, - cred_id: &str, -) -> VcxCoreResult<()> { - Locator::instance() - .prover_controller - .delete_credential(wallet_handle, cred_id.into()) - .await?; - - Ok(()) -} - -pub async fn libindy_prover_create_master_secret( - wallet_handle: WalletHandle, - master_secret_id: &str, -) -> VcxCoreResult { - if settings::indy_mocks_enabled() { - return Ok(settings::DEFAULT_LINK_SECRET_ALIAS.to_string()); - } - - let res = Locator::instance() - .prover_controller - .create_master_secret(wallet_handle, Some(master_secret_id.into())) - .await?; - - Ok(res) -} - -pub async fn libindy_prover_create_credential_req( - wallet_handle: WalletHandle, - prover_did: &str, - credential_offer_json: &str, - credential_def_json: &str, - master_secret_name: &str, -) -> VcxCoreResult<(String, String)> { - if settings::indy_mocks_enabled() { - return Ok(( - utils::constants::CREDENTIAL_REQ_STRING.to_owned(), - String::new(), - )); - } - - let cred_offer = serde_json::from_str::(credential_offer_json)?; - - let cred_def = serde_json::from_str::(credential_def_json)?; - - let res = Locator::instance() - .prover_controller - .create_credential_request( - wallet_handle, - DidValue(prover_did.into()), - cred_offer, - cred_def, - master_secret_name.into(), - ) - .await?; - - Ok(res) -} diff --git a/aries_vcx_core/src/anoncreds/indy/credentials/issuer/mod.rs b/aries_vcx_core/src/anoncreds/indy/credentials/issuer/mod.rs deleted file mode 100644 index efd9e4046b..0000000000 --- a/aries_vcx_core/src/anoncreds/indy/credentials/issuer/mod.rs +++ /dev/null @@ -1,181 +0,0 @@ -use vdrtools::{ - CredentialOffer, CredentialRequest, CredentialValues, DidValue, Locator, RevocationRegistryId, -}; - -use crate::{ - anoncreds::indy::{general, general::blob_storage_open_reader}, - errors::error::VcxCoreResult, - global::settings, - indy::utils::parse_and_validate, - utils, - utils::constants::LIBINDY_CRED_OFFER, - wallet::indy::wallet_non_secrets::{get_rev_reg_delta, set_rev_reg_delta}, - WalletHandle, -}; - -pub async fn libindy_issuer_create_credential_offer( - wallet_handle: WalletHandle, - cred_def_id: &str, -) -> VcxCoreResult { - if settings::indy_mocks_enabled() { - return Ok(LIBINDY_CRED_OFFER.to_string()); - } - - let res = Locator::instance() - .issuer_controller - .create_credential_offer( - wallet_handle, - vdrtools::CredentialDefinitionId(cred_def_id.into()), - ) - .await?; - - Ok(res) -} - -pub async fn libindy_issuer_create_credential( - wallet_handle: WalletHandle, - cred_offer_json: &str, - cred_req_json: &str, - cred_values_json: &str, - rev_reg_id: Option, - tails_file: Option, -) -> VcxCoreResult<(String, Option, Option)> { - if settings::indy_mocks_enabled() { - return Ok((utils::constants::CREDENTIAL_JSON.to_owned(), None, None)); - } - - let blob_handle = match tails_file { - Some(x) => Some(blob_storage_open_reader(&x).await?), - None => None, - }; - - let res = Locator::instance() - .issuer_controller - .new_credential( - wallet_handle, - parse_and_validate::(cred_offer_json)?, - parse_and_validate::(cred_req_json)?, - parse_and_validate::(cred_values_json)?, - rev_reg_id.map(RevocationRegistryId), - blob_handle, - ) - .await?; - - Ok(res) -} - -pub const BLOB_STORAGE_TYPE: &str = "default"; - -pub async fn libindy_create_and_store_revoc_reg( - wallet_handle: WalletHandle, - issuer_did: &str, - cred_def_id: &str, - tails_dir: &str, - max_creds: u32, - tag: &str, -) -> VcxCoreResult<(String, String, String)> { - trace!( - "creating revocation: {}, {}, {}", - cred_def_id, - tails_dir, - max_creds - ); - - let tails_config = json!({"base_dir": tails_dir,"uri_pattern": ""}).to_string(); - - let writer = Locator::instance() - .blob_storage_controller - .open_writer(BLOB_STORAGE_TYPE.into(), tails_config) - .await?; - - let res = Locator::instance() - .issuer_controller - .create_and_store_revocation_registry( - wallet_handle, - DidValue(issuer_did.into()), - None, - tag.into(), - vdrtools::CredentialDefinitionId(cred_def_id.into()), - vdrtools::RevocationRegistryConfig { - issuance_type: Some(vdrtools::IssuanceType::ISSUANCE_BY_DEFAULT), - max_cred_num: Some(max_creds), - }, - writer, - ) - .await?; - - Ok(res) -} - -pub async fn libindy_issuer_revoke_credential( - wallet_handle: WalletHandle, - tails_file: &str, - rev_reg_id: &str, - cred_rev_id: &str, -) -> VcxCoreResult { - let blob_handle = general::blob_storage_open_reader(tails_file).await?; - - let res = Locator::instance() - .issuer_controller - .revoke_credential( - wallet_handle, - blob_handle, - vdrtools::RevocationRegistryId(rev_reg_id.into()), - cred_rev_id.into(), - ) - .await?; - - Ok(res) -} - -pub async fn libindy_issuer_merge_revocation_registry_deltas( - old_delta: &str, - new_delta: &str, -) -> VcxCoreResult { - let res = Locator::instance() - .issuer_controller - .merge_revocation_registry_deltas( - parse_and_validate(old_delta)?, - parse_and_validate(new_delta)?, - )?; - - Ok(res) -} - -pub async fn revoke_credential_local( - wallet_handle: WalletHandle, - tails_file: &str, - rev_reg_id: &str, - cred_rev_id: &str, -) -> VcxCoreResult<()> { - if settings::indy_mocks_enabled() { - return Ok(()); - } - - let mut new_delta_json = - libindy_issuer_revoke_credential(wallet_handle, tails_file, rev_reg_id, cred_rev_id) - .await?; - - debug!( - "revoke_credential_local >>> new_delta_json: {}", - new_delta_json - ); - - if let Some(old_delta_json) = get_rev_reg_delta(wallet_handle, rev_reg_id).await { - debug!( - "revoke_credential_local >>> old_delta_json: {}", - old_delta_json - ); - new_delta_json = libindy_issuer_merge_revocation_registry_deltas( - old_delta_json.as_str(), - new_delta_json.as_str(), - ) - .await?; - debug!( - "revoke_credential_local >>> merged_delta_json: {}", - new_delta_json - ); - } - - set_rev_reg_delta(wallet_handle, rev_reg_id, &new_delta_json).await -} diff --git a/aries_vcx_core/src/anoncreds/indy/general.rs b/aries_vcx_core/src/anoncreds/indy/general.rs deleted file mode 100644 index fdd3edaeb3..0000000000 --- a/aries_vcx_core/src/anoncreds/indy/general.rs +++ /dev/null @@ -1,34 +0,0 @@ -use vdrtools::Locator; - -use crate::{errors::error::VcxCoreResult, SearchHandle}; - -pub(crate) async fn blob_storage_open_reader(base_dir: &str) -> VcxCoreResult { - let tails_config = json!( - { - "base_dir": base_dir, - "uri_pattern": "" // TODO remove, unused - } - ) - .to_string(); - - let res = Locator::instance() - .blob_storage_controller - .open_reader("default".into(), tails_config) - .await?; - - Ok(res) -} - -pub(crate) async fn close_search_handle(search_handle: SearchHandle) -> VcxCoreResult<()> { - Locator::instance() - .prover_controller - .close_credentials_search_for_proof_req(search_handle) - .await?; - - Ok(()) -} - -pub async fn generate_nonce() -> VcxCoreResult { - let res = Locator::instance().verifier_controller.generate_nonce()?; - Ok(res) -} diff --git a/aries_vcx_core/src/anoncreds/indy/mod.rs b/aries_vcx_core/src/anoncreds/indy/mod.rs deleted file mode 100644 index 699364c196..0000000000 --- a/aries_vcx_core/src/anoncreds/indy/mod.rs +++ /dev/null @@ -1,4 +0,0 @@ -pub(crate) mod credentials; -pub(crate) mod general; -pub mod primitives; -pub(crate) mod proofs; diff --git a/aries_vcx_core/src/anoncreds/indy/primitives/credential_schema.rs b/aries_vcx_core/src/anoncreds/indy/primitives/credential_schema.rs deleted file mode 100644 index 635d85ec89..0000000000 --- a/aries_vcx_core/src/anoncreds/indy/primitives/credential_schema.rs +++ /dev/null @@ -1,34 +0,0 @@ -use log::trace; -use vdrtools::{ - domain::{anoncreds::schema::AttributeNames, crypto::did::DidValue}, - Locator, -}; - -use crate::errors::error::VcxCoreResult; - -// consider relocating out of primitive -pub async fn libindy_issuer_create_schema( - issuer_did: &str, - name: &str, - version: &str, - attrs: &str, -) -> VcxCoreResult<(String, String)> { - trace!( - "libindy_issuer_create_schema >>> issuer_did: {}, name: {}, version: {}, attrs: {}", - issuer_did, - name, - version, - attrs - ); - - let attrs = serde_json::from_str::(attrs)?; - - let res = Locator::instance().issuer_controller.create_schema( - DidValue(issuer_did.into()), - name.into(), - version.into(), - attrs, - )?; - - Ok(res) -} diff --git a/aries_vcx_core/src/anoncreds/indy/primitives/mod.rs b/aries_vcx_core/src/anoncreds/indy/primitives/mod.rs deleted file mode 100644 index 910adda11f..0000000000 --- a/aries_vcx_core/src/anoncreds/indy/primitives/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod credential_schema; diff --git a/aries_vcx_core/src/anoncreds/indy/proofs/mod.rs b/aries_vcx_core/src/anoncreds/indy/proofs/mod.rs deleted file mode 100644 index 96e9e18468..0000000000 --- a/aries_vcx_core/src/anoncreds/indy/proofs/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod prover; -pub mod verifier; diff --git a/aries_vcx_core/src/anoncreds/indy/proofs/prover.rs b/aries_vcx_core/src/anoncreds/indy/proofs/prover.rs deleted file mode 100644 index 2ff5e63c99..0000000000 --- a/aries_vcx_core/src/anoncreds/indy/proofs/prover.rs +++ /dev/null @@ -1,203 +0,0 @@ -use serde_json::{Map, Value}; -use vdrtools::{Locator, SearchHandle}; - -use crate::{ - anoncreds::indy::general::{blob_storage_open_reader, close_search_handle}, - errors::error::{prelude::*, VcxCoreResult}, - global::{mockdata::mock_settings::get_mock_creds_retrieved_for_proof_request, settings}, - indy::utils::parse_and_validate, - utils, - utils::constants::{ATTRS, PROOF_REQUESTED_PREDICATES, REQUESTED_ATTRIBUTES, REV_STATE_JSON}, - WalletHandle, -}; - -pub async fn libindy_prover_create_revocation_state( - tails_file_path: &str, - rev_reg_def_json: &str, - rev_reg_delta_json: &str, - timestamp: u64, - cred_rev_id: &str, -) -> VcxCoreResult { - if settings::indy_mocks_enabled() { - return Ok(REV_STATE_JSON.to_string()); - } - - let blob_handle = blob_storage_open_reader(tails_file_path).await?; - - let res = Locator::instance() - .prover_controller - .create_revocation_state( - blob_handle, - parse_and_validate(rev_reg_def_json)?, - parse_and_validate(rev_reg_delta_json)?, - timestamp, - cred_rev_id.into(), - ) - .await?; - - Ok(res) -} - -pub async fn libindy_prover_create_proof( - wallet_handle: WalletHandle, - proof_req_json: &str, - requested_credentials_json: &str, - master_secret_id: &str, - schemas_json: &str, - credential_defs_json: &str, - revoc_states_json: Option<&str>, -) -> VcxCoreResult { - if settings::indy_mocks_enabled() { - return Ok(utils::constants::PROOF_JSON.to_owned()); - } - - let revoc_states_json = revoc_states_json.unwrap_or("{}"); - - let res = Locator::instance() - .prover_controller - .create_proof( - wallet_handle, - parse_and_validate(proof_req_json)?, - parse_and_validate(requested_credentials_json)?, - master_secret_id.into(), - serde_json::from_str(schemas_json)?, - serde_json::from_str(credential_defs_json)?, - serde_json::from_str(revoc_states_json)?, - ) - .await?; - - Ok(res) -} - -async fn fetch_credentials( - search_handle: SearchHandle, - requested_attributes: Map, -) -> VcxCoreResult { - let mut v: Value = json!({}); - - for item_referent in requested_attributes.keys() { - v[ATTRS][item_referent] = serde_json::from_str( - &Locator::instance() - .prover_controller - .fetch_credential_for_proof_request(search_handle, item_referent.clone(), 100) - .await - .map_err(|_| { - error!( - "Invalid Json Parsing of Object Returned from Libindy. Did Libindy change \ - its structure?" - ); - AriesVcxCoreError::from_msg( - AriesVcxCoreErrorKind::InvalidConfiguration, - "Invalid Json Parsing of Object Returned from Libindy. Did Libindy change \ - its structure?", - ) - })?, - )? - } - - Ok(v.to_string()) -} - -pub async fn libindy_prover_get_credentials( - wallet_handle: WalletHandle, - filter_json: Option<&str>, -) -> VcxCoreResult { - let res = Locator::instance() - .prover_controller - .get_credentials(wallet_handle, filter_json.map(String::from)) - .await - .map_err(|ec| { - error!("Getting prover credentials failed."); - ec - })?; - Ok(res) -} - -pub async fn libindy_prover_get_credentials_for_proof_req( - wallet_handle: WalletHandle, - proof_req: &str, -) -> VcxCoreResult { - trace!( - "libindy_prover_get_credentials_for_proof_req >>> proof_req: {}", - proof_req - ); - - { - match get_mock_creds_retrieved_for_proof_request() { - None => {} - Some(mocked_creds) => { - warn!("get_mock_creds_retrieved_for_proof_request returning mocked response"); - return Ok(mocked_creds); - } - } - } - - // this may be too redundant since Prover::search_credentials will validate the proof reqeuest - // already. - let proof_request_json: Map = - serde_json::from_str(proof_req).map_err(|err| { - AriesVcxCoreError::from_msg( - AriesVcxCoreErrorKind::InvalidProofRequest, - format!("Cannot deserialize ProofRequest: {err:?}"), - ) - })?; - - // since the search_credentials_for_proof request validates that the proof_req is properly - // structured, this get() fn should never fail, unless libindy changes their formats. - let requested_attributes: Option> = - proof_request_json.get(REQUESTED_ATTRIBUTES).and_then(|v| { - serde_json::from_value(v.clone()) - .map_err(|_| { - error!( - "Invalid Json Parsing of Requested Attributes Retrieved From Libindy. Did \ - Libindy change its structure?" - ); - }) - .ok() - }); - - let requested_predicates: Option> = proof_request_json - .get(PROOF_REQUESTED_PREDICATES) - .and_then(|v| { - serde_json::from_value(v.clone()) - .map_err(|_| { - error!( - "Invalid Json Parsing of Requested Predicates Retrieved From Libindy. Did \ - Libindy change its structure?" - ); - }) - .ok() - }); - - // handle special case of "empty because json is bad" vs "empty because no attributes sepected" - if requested_attributes.is_none() && requested_predicates.is_none() { - return Err(AriesVcxCoreError::from_msg( - AriesVcxCoreErrorKind::InvalidAttributesStructure, - "Invalid Json Parsing of Requested Attributes Retrieved From Libindy", - )); - } - - let mut fetch_attrs: Map = match requested_attributes { - Some(attrs) => attrs.clone(), - None => Map::new(), - }; - if let Some(attrs) = requested_predicates { - fetch_attrs.extend(attrs) - } - if !fetch_attrs.is_empty() { - let search_handle = Locator::instance() - .prover_controller - .search_credentials_for_proof_req(wallet_handle, serde_json::from_str(proof_req)?, None) - .await?; - - let creds: String = fetch_credentials(search_handle, fetch_attrs).await?; - - // should an error on closing a search handle throw an error, or just a warning? - // for now we're are just outputting to the user that there is an issue, and continuing on. - let _ = close_search_handle(search_handle).await; - - Ok(creds) - } else { - Ok("{}".to_string()) - } -} diff --git a/aries_vcx_core/src/anoncreds/indy/proofs/verifier.rs b/aries_vcx_core/src/anoncreds/indy/proofs/verifier.rs deleted file mode 100644 index 5c700e7b82..0000000000 --- a/aries_vcx_core/src/anoncreds/indy/proofs/verifier.rs +++ /dev/null @@ -1,23 +0,0 @@ -use vdrtools::Locator; - -use crate::{errors::error::VcxCoreResult, indy::utils::parse_and_validate}; - -pub async fn libindy_verifier_verify_proof( - proof_req_json: &str, - proof_json: &str, - schemas_json: &str, - credential_defs_json: &str, - rev_reg_defs_json: &str, - rev_regs_json: &str, -) -> VcxCoreResult { - let res = Locator::instance().verifier_controller.verify_proof( - parse_and_validate(proof_req_json)?, - parse_and_validate(proof_json)?, - serde_json::from_str(schemas_json)?, - serde_json::from_str(credential_defs_json)?, - serde_json::from_str(rev_reg_defs_json)?, - serde_json::from_str(rev_regs_json)?, - )?; - - Ok(res) -} diff --git a/aries_vcx_core/src/anoncreds/indy_anoncreds.rs b/aries_vcx_core/src/anoncreds/indy_anoncreds.rs deleted file mode 100644 index 2aebefb665..0000000000 --- a/aries_vcx_core/src/anoncreds/indy_anoncreds.rs +++ /dev/null @@ -1,268 +0,0 @@ -use async_trait::async_trait; -use vdrtools::{DidValue, Locator}; - -use super::base_anoncreds::BaseAnonCreds; -use crate::{ - anoncreds, - anoncreds::indy::primitives::credential_schema::libindy_issuer_create_schema, - errors::error::VcxCoreResult, - indy::utils::parse_and_validate, - wallet::indy::wallet_non_secrets::{clear_rev_reg_delta, get_rev_reg_delta}, - WalletHandle, -}; - -#[derive(Debug)] -pub struct IndySdkAnonCreds { - indy_wallet_handle: WalletHandle, -} - -impl IndySdkAnonCreds { - pub fn new(indy_wallet_handle: WalletHandle) -> Self { - IndySdkAnonCreds { indy_wallet_handle } - } -} - -#[async_trait] -impl BaseAnonCreds for IndySdkAnonCreds { - async fn verifier_verify_proof( - &self, - proof_req_json: &str, - proof_json: &str, - schemas_json: &str, - credential_defs_json: &str, - rev_reg_defs_json: &str, - rev_regs_json: &str, - ) -> VcxCoreResult { - anoncreds::indy::proofs::verifier::libindy_verifier_verify_proof( - proof_req_json, - proof_json, - schemas_json, - credential_defs_json, - rev_reg_defs_json, - rev_regs_json, - ) - .await - } - - async fn issuer_create_and_store_revoc_reg( - &self, - issuer_did: &str, - cred_def_id: &str, - tails_dir: &str, - max_creds: u32, - tag: &str, - ) -> VcxCoreResult<(String, String, String)> { - anoncreds::indy::credentials::issuer::libindy_create_and_store_revoc_reg( - self.indy_wallet_handle, - issuer_did, - cred_def_id, - tails_dir, - max_creds, - tag, - ) - .await - } - - async fn issuer_create_and_store_credential_def( - &self, - issuer_did: &str, - schema_json: &str, - tag: &str, - sig_type: Option<&str>, - config_json: &str, - ) -> VcxCoreResult<(String, String)> { - let res = Locator::instance() - .issuer_controller - .create_and_store_credential_definition( - self.indy_wallet_handle, - DidValue(issuer_did.into()), - parse_and_validate(schema_json)?, - tag.into(), - sig_type.map(|s| s.into()), - Some(serde_json::from_str(config_json)?), - ) - .await?; - - Ok(res) - } - - async fn issuer_create_credential_offer(&self, cred_def_id: &str) -> VcxCoreResult { - anoncreds::indy::credentials::issuer::libindy_issuer_create_credential_offer( - self.indy_wallet_handle, - cred_def_id, - ) - .await - } - - async fn issuer_create_credential( - &self, - cred_offer_json: &str, - cred_req_json: &str, - cred_values_json: &str, - rev_reg_id: Option, - tails_dir: Option, - ) -> VcxCoreResult<(String, Option, Option)> { - anoncreds::indy::credentials::issuer::libindy_issuer_create_credential( - self.indy_wallet_handle, - cred_offer_json, - cred_req_json, - cred_values_json, - rev_reg_id, - tails_dir, - ) - .await - } - - async fn prover_create_proof( - &self, - proof_req_json: &str, - requested_credentials_json: &str, - master_secret_id: &str, - schemas_json: &str, - credential_defs_json: &str, - revoc_states_json: Option<&str>, - ) -> VcxCoreResult { - anoncreds::indy::proofs::prover::libindy_prover_create_proof( - self.indy_wallet_handle, - proof_req_json, - requested_credentials_json, - master_secret_id, - schemas_json, - credential_defs_json, - revoc_states_json, - ) - .await - } - - async fn prover_get_credential(&self, cred_id: &str) -> VcxCoreResult { - anoncreds::indy::credentials::holder::libindy_prover_get_credential( - self.indy_wallet_handle, - cred_id, - ) - .await - } - - async fn prover_get_credentials(&self, filter_json: Option<&str>) -> VcxCoreResult { - anoncreds::indy::proofs::prover::libindy_prover_get_credentials( - self.indy_wallet_handle, - filter_json, - ) - .await - } - - async fn prover_get_credentials_for_proof_req(&self, proof_req: &str) -> VcxCoreResult { - anoncreds::indy::proofs::prover::libindy_prover_get_credentials_for_proof_req( - self.indy_wallet_handle, - proof_req, - ) - .await - } - - async fn prover_create_credential_req( - &self, - prover_did: &str, - credential_offer_json: &str, - credential_def_json: &str, - master_secret_id: &str, - ) -> VcxCoreResult<(String, String)> { - anoncreds::indy::credentials::holder::libindy_prover_create_credential_req( - self.indy_wallet_handle, - prover_did, - credential_offer_json, - credential_def_json, - master_secret_id, - ) - .await - } - - async fn create_revocation_state( - &self, - tails_dir: &str, - rev_reg_def_json: &str, - rev_reg_delta_json: &str, - timestamp: u64, - cred_rev_id: &str, - ) -> VcxCoreResult { - anoncreds::indy::proofs::prover::libindy_prover_create_revocation_state( - tails_dir, - rev_reg_def_json, - rev_reg_delta_json, - timestamp, - cred_rev_id, - ) - .await - } - - async fn prover_store_credential( - &self, - cred_id: Option<&str>, - cred_req_meta: &str, - cred_json: &str, - cred_def_json: &str, - rev_reg_def_json: Option<&str>, - ) -> VcxCoreResult { - anoncreds::indy::credentials::holder::libindy_prover_store_credential( - self.indy_wallet_handle, - cred_id, - cred_req_meta, - cred_json, - cred_def_json, - rev_reg_def_json, - ) - .await - } - - async fn prover_delete_credential(&self, cred_id: &str) -> VcxCoreResult<()> { - anoncreds::indy::credentials::holder::libindy_prover_delete_credential( - self.indy_wallet_handle, - cred_id, - ) - .await - } - - async fn prover_create_link_secret(&self, master_secret_id: &str) -> VcxCoreResult { - anoncreds::indy::credentials::holder::libindy_prover_create_master_secret( - self.indy_wallet_handle, - master_secret_id, - ) - .await - } - - async fn issuer_create_schema( - &self, - issuer_did: &str, - name: &str, - version: &str, - attrs: &str, - ) -> VcxCoreResult<(String, String)> { - libindy_issuer_create_schema(issuer_did, name, version, attrs).await - } - - async fn revoke_credential_local( - &self, - tails_dir: &str, - rev_reg_id: &str, - cred_rev_id: &str, - ) -> VcxCoreResult<()> { - anoncreds::indy::credentials::issuer::revoke_credential_local( - self.indy_wallet_handle, - tails_dir, - rev_reg_id, - cred_rev_id, - ) - .await - } - - async fn get_rev_reg_delta(&self, rev_reg_id: &str) -> VcxCoreResult> { - Ok(get_rev_reg_delta(self.indy_wallet_handle, rev_reg_id).await) - } - - async fn clear_rev_reg_delta(&self, rev_reg_id: &str) -> VcxCoreResult<()> { - clear_rev_reg_delta(self.indy_wallet_handle, rev_reg_id).await?; - Ok(()) - } - - async fn generate_nonce(&self) -> VcxCoreResult { - anoncreds::indy::general::generate_nonce().await - } -} diff --git a/aries_vcx_core/src/anoncreds/mod.rs b/aries_vcx_core/src/anoncreds/mod.rs index 744331e109..754b6642d8 100644 --- a/aries_vcx_core/src/anoncreds/mod.rs +++ b/aries_vcx_core/src/anoncreds/mod.rs @@ -1,9 +1,3 @@ pub mod base_anoncreds; - -#[cfg(any(feature = "modular_libs", feature = "vdr_proxy_ledger"))] +#[cfg(feature = "credx")] pub mod credx_anoncreds; - -#[cfg(feature = "vdrtools_anoncreds")] -pub mod indy; -#[cfg(feature = "vdrtools_anoncreds")] -pub mod indy_anoncreds; diff --git a/aries_vcx_core/src/errors/mod.rs b/aries_vcx_core/src/errors/mod.rs index 9ef99a5d30..841ab45fe2 100644 --- a/aries_vcx_core/src/errors/mod.rs +++ b/aries_vcx_core/src/errors/mod.rs @@ -1,8 +1,8 @@ pub mod error; mod mapping_agency_client; -#[cfg(feature = "modular_libs")] +#[cfg(feature = "credx")] mod mapping_credx; -#[cfg(any(feature = "vdrtools_anoncreds", feature = "vdrtools_wallet"))] +#[cfg(feature = "vdrtools_wallet")] mod mapping_indy_api_types; mod mapping_indyvdr; #[cfg(feature = "vdr_proxy_ledger")] diff --git a/aries_vcx_core/src/global/settings.rs b/aries_vcx_core/src/global/settings.rs index a43cad9c47..2da0d5556d 100644 --- a/aries_vcx_core/src/global/settings.rs +++ b/aries_vcx_core/src/global/settings.rs @@ -1,110 +1,2 @@ -use std::{collections::HashMap, sync::RwLock}; - -use crate::errors::error::prelude::*; - -pub static CONFIG_INSTITUTION_DID: &str = "institution_did"; - -// functionally not used -pub static CONFIG_ENABLE_TEST_MODE: &str = "enable_test_mode"; -pub static CONFIG_WALLET_BACKUP_KEY: &str = "backup_key"; -pub static CONFIG_WALLET_KEY: &str = "wallet_key"; -pub static CONFIG_PROTOCOL_VERSION: &str = "protocol_version"; -pub static CONFIG_TXN_AUTHOR_AGREEMENT: &str = "author_agreement"; -pub static DEFAULT_PROTOCOL_VERSION: usize = 2; -pub static MAX_SUPPORTED_PROTOCOL_VERSION: usize = 2; pub static DEFAULT_LINK_SECRET_ALIAS: &str = "main"; pub static DEFAULT_DID: &str = "2hoqvcwupRTUNkXn6ArYzs"; - -lazy_static! { - static ref SETTINGS: RwLock> = RwLock::new(HashMap::new()); -} - -pub fn enable_indy_mocks() -> VcxCoreResult<()> { - debug!("enable_indy_mocks >>>"); - set_config_value(CONFIG_ENABLE_TEST_MODE, "true") -} - -pub fn disable_indy_mocks() -> VcxCoreResult<()> { - debug!("disable_indy_mocks >>>"); - set_config_value(CONFIG_ENABLE_TEST_MODE, "false") -} - -pub fn indy_mocks_enabled() -> bool { - let config = SETTINGS.read().expect("Unable to access SETTINGS"); - - match config.get(CONFIG_ENABLE_TEST_MODE) { - None => false, - Some(value) => { - debug!("indy_mocks_enabled >>> {}", value); - value == "true" || value == "indy" - } - } -} - -pub fn get_config_value(key: &str) -> VcxCoreResult { - trace!("get_config_value >>> key: {}", key); - - SETTINGS - .read() - .or(Err(AriesVcxCoreError::from_msg( - AriesVcxCoreErrorKind::InvalidConfiguration, - "Cannot read settings", - )))? - .get(key) - .map(|v| v.to_string()) - .ok_or(AriesVcxCoreError::from_msg( - AriesVcxCoreErrorKind::InvalidConfiguration, - format!("Cannot read \"{key}\" from settings"), - )) -} - -pub fn set_config_value(key: &str, value: &str) -> VcxCoreResult<()> { - trace!("set_config_value >>> key: {}, value: {}", key, value); - SETTINGS - .write() - .or(Err(AriesVcxCoreError::from_msg( - AriesVcxCoreErrorKind::UnknownError, - "Cannot write settings", - )))? - .insert(key.to_string(), value.to_string()); - Ok(()) -} - -pub fn reset_config_values_ariesvcxcore() -> VcxCoreResult<()> { - trace!("reset_config_values >>>"); - let mut config = SETTINGS.write()?; - config.clear(); - Ok(()) -} - -pub fn get_protocol_version() -> usize { - let protocol_version = match get_config_value(CONFIG_PROTOCOL_VERSION) { - Ok(ver) => ver.parse::().unwrap_or_else(|err| { - warn!( - "Can't parse value of protocol version from config ({}), use default one ({})", - err, DEFAULT_PROTOCOL_VERSION - ); - DEFAULT_PROTOCOL_VERSION - }), - Err(err) => { - info!( - "Can't fetch protocol version from config ({}), use default one ({})", - err, DEFAULT_PROTOCOL_VERSION - ); - DEFAULT_PROTOCOL_VERSION - } - }; - if protocol_version > MAX_SUPPORTED_PROTOCOL_VERSION { - error!( - "Protocol version from config {}, greater then maximal supported {}, use maximum one", - protocol_version, MAX_SUPPORTED_PROTOCOL_VERSION - ); - MAX_SUPPORTED_PROTOCOL_VERSION - } else { - protocol_version - } -} - -pub fn get_sample_did() -> String { - DEFAULT_DID.to_string() -} diff --git a/aries_vcx_core/src/indy/mod.rs b/aries_vcx_core/src/indy/mod.rs deleted file mode 100644 index b5614dd823..0000000000 --- a/aries_vcx_core/src/indy/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod utils; diff --git a/aries_vcx_core/src/indy/utils/mod.rs b/aries_vcx_core/src/indy/utils/mod.rs deleted file mode 100644 index d36378faec..0000000000 --- a/aries_vcx_core/src/indy/utils/mod.rs +++ /dev/null @@ -1,27 +0,0 @@ -use std::sync::atomic::{AtomicUsize, Ordering}; - -use vdrtools::{types::validation::Validatable, CommandHandle}; - -use crate::errors::error::{AriesVcxCoreError, AriesVcxCoreErrorKind, VcxCoreResult}; - -static COMMAND_HANDLE_COUNTER: AtomicUsize = AtomicUsize::new(1); - -pub fn next_command_handle() -> CommandHandle { - (COMMAND_HANDLE_COUNTER.fetch_add(1, Ordering::SeqCst) + 1) as CommandHandle -} - -pub fn parse_and_validate<'a, T>(s: &'a str) -> VcxCoreResult -where - T: Validatable, - T: serde::Deserialize<'a>, -{ - let data = serde_json::from_str::(s)?; - - match data.validate() { - Ok(_) => Ok(data), - Err(s) => Err(AriesVcxCoreError::from_msg( - AriesVcxCoreErrorKind::LibindyInvalidStructure, - s, - )), - } -} diff --git a/aries_vcx_core/src/ledger/request_submitter/vdr_ledger.rs b/aries_vcx_core/src/ledger/request_submitter/vdr_ledger.rs index db1c518b21..b7efe67eca 100644 --- a/aries_vcx_core/src/ledger/request_submitter/vdr_ledger.rs +++ b/aries_vcx_core/src/ledger/request_submitter/vdr_ledger.rs @@ -1,7 +1,6 @@ use std::{ collections::{hash_map::RandomState, HashMap}, fmt::{Debug, Formatter}, - sync::Arc, }; use async_trait::async_trait; @@ -66,11 +65,11 @@ impl Debug for IndyVdrLedgerPool { } pub struct IndyVdrSubmitter { - pool: Arc, + pool: IndyVdrLedgerPool, } impl IndyVdrSubmitter { - pub fn new(pool: Arc) -> Self { + pub fn new(pool: IndyVdrLedgerPool) -> Self { Self { pool } } } diff --git a/aries_vcx_core/src/lib.rs b/aries_vcx_core/src/lib.rs index c53e13c92a..adbbf43082 100644 --- a/aries_vcx_core/src/lib.rs +++ b/aries_vcx_core/src/lib.rs @@ -17,20 +17,16 @@ macro_rules! secret { #[macro_use] extern crate lazy_static; -#[macro_use] extern crate serde_json; #[macro_use] extern crate log; -#[macro_use] extern crate derive_builder; pub mod anoncreds; pub mod errors; pub mod global; -#[cfg(feature = "vdrtools_anoncreds")] -pub mod indy; pub mod ledger; pub mod utils; pub mod wallet; diff --git a/aries_vcx_core/src/wallet/agency_client_wallet.rs b/aries_vcx_core/src/wallet/agency_client_wallet.rs index 7430d22e5e..ed2c2798f3 100644 --- a/aries_vcx_core/src/wallet/agency_client_wallet.rs +++ b/aries_vcx_core/src/wallet/agency_client_wallet.rs @@ -8,6 +8,7 @@ use async_trait::async_trait; #[cfg(feature = "vdrtools_wallet")] use vdrtools::WalletHandle; +use super::structs_io::UnpackMessageOutput; use crate::{ errors::error::{AriesVcxCoreError, AriesVcxCoreErrorKind, VcxCoreResult}, utils::async_fn_iterator::AsyncFnIterator, @@ -15,7 +16,7 @@ use crate::{ }; #[derive(Debug)] -pub(crate) struct AgencyClientWallet { +pub struct AgencyClientWallet { inner: Arc, } @@ -162,8 +163,11 @@ impl BaseWallet for AgencyClientWallet { .await?) } - async fn unpack_message(&self, msg: &[u8]) -> VcxCoreResult> { - Ok(self.inner.unpack_message(msg).await?) + async fn unpack_message(&self, msg: &[u8]) -> VcxCoreResult { + let unpack_json_bytes = self.inner.unpack_message(msg).await?; + serde_json::from_slice(&unpack_json_bytes[..]).map_err(|err| { + AriesVcxCoreError::from_msg(AriesVcxCoreErrorKind::ParsingError, err.to_string()) + }) } #[cfg(feature = "vdrtools_wallet")] @@ -173,14 +177,14 @@ impl BaseWallet for AgencyClientWallet { } pub trait ToBaseWallet { - fn to_base_wallet(&self) -> Arc; + fn to_base_wallet(&self) -> AgencyClientWallet; } impl ToBaseWallet for Arc { - fn to_base_wallet(&self) -> Arc { - Arc::new(AgencyClientWallet { + fn to_base_wallet(&self) -> AgencyClientWallet { + AgencyClientWallet { inner: Arc::clone(self), - }) + } } } @@ -219,11 +223,17 @@ impl BaseAgencyClientWallet for BaseWalletAgencyClientWallet { } async fn unpack_message(&self, msg: &[u8]) -> AgencyClientResult> { - self.inner.unpack_message(msg).await.map_err(|e| { + let unpack = self.inner.unpack_message(msg).await.map_err(|e| { AgencyClientError::from_msg( AgencyClientErrorKind::UnknownError, format!("A VCXError occured while calling unpack_message: {e:?}"), ) + })?; + serde_json::to_vec(&unpack).map_err(|err| { + AgencyClientError::from_msg( + AgencyClientErrorKind::UnknownError, + format!("A VCXError occured while calling unpack_message: {err:?}"), + ) }) } } @@ -232,10 +242,12 @@ pub trait ToBaseAgencyClientWallet { fn to_base_agency_client_wallet(&self) -> Arc; } -impl ToBaseAgencyClientWallet for Arc { +impl ToBaseAgencyClientWallet for Arc +where + T: BaseWallet + 'static, +{ fn to_base_agency_client_wallet(&self) -> Arc { - Arc::new(BaseWalletAgencyClientWallet { - inner: Arc::clone(self), - }) + let x = self.clone(); + Arc::new(BaseWalletAgencyClientWallet { inner: x }) } } diff --git a/aries_vcx_core/src/wallet/base_wallet.rs b/aries_vcx_core/src/wallet/base_wallet.rs index 34339ef5a2..2ee756b77b 100644 --- a/aries_vcx_core/src/wallet/base_wallet.rs +++ b/aries_vcx_core/src/wallet/base_wallet.rs @@ -2,6 +2,7 @@ use std::collections::HashMap; use async_trait::async_trait; +use super::structs_io::UnpackMessageOutput; #[cfg(feature = "vdrtools_wallet")] use crate::WalletHandle; use crate::{errors::error::VcxCoreResult, utils::async_fn_iterator::AsyncFnIterator}; @@ -99,7 +100,7 @@ pub trait BaseWallet: std::fmt::Debug + Send + Sync { msg: &[u8], ) -> VcxCoreResult>; - async fn unpack_message(&self, msg: &[u8]) -> VcxCoreResult>; + async fn unpack_message(&self, msg: &[u8]) -> VcxCoreResult; } #[async_trait] diff --git a/aries_vcx_core/src/wallet/indy/indy_wallet.rs b/aries_vcx_core/src/wallet/indy/indy_wallet.rs index ce0ce178bd..fb57aee12c 100644 --- a/aries_vcx_core/src/wallet/indy/indy_wallet.rs +++ b/aries_vcx_core/src/wallet/indy/indy_wallet.rs @@ -9,6 +9,7 @@ use crate::{ wallet::{ base_wallet::BaseWallet, indy::{internal, IndySdkWallet, IndyWalletRecordIterator, WalletRecord}, + structs_io::UnpackMessageOutput, }, WalletHandle, }; @@ -145,8 +146,12 @@ impl BaseWallet for IndySdkWallet { wallet::indy::signing::pack_message(self.wallet_handle, sender_vk, receiver_keys, msg).await } - async fn unpack_message(&self, msg: &[u8]) -> VcxCoreResult> { - wallet::indy::signing::unpack_message(self.wallet_handle, msg).await + async fn unpack_message(&self, msg: &[u8]) -> VcxCoreResult { + let unpack_json_bytes = + wallet::indy::signing::unpack_message(self.wallet_handle, msg).await?; + serde_json::from_slice(&unpack_json_bytes[..]).map_err(|err| { + AriesVcxCoreError::from_msg(AriesVcxCoreErrorKind::ParsingError, err.to_string()) + }) } #[cfg(feature = "vdrtools_wallet")] diff --git a/aries_vcx_core/src/wallet/indy/internal/mod.rs b/aries_vcx_core/src/wallet/indy/internal/mod.rs index 288cce97c1..9736937c29 100644 --- a/aries_vcx_core/src/wallet/indy/internal/mod.rs +++ b/aries_vcx_core/src/wallet/indy/internal/mod.rs @@ -1,6 +1,6 @@ use vdrtools::{Locator, SearchHandle, WalletHandle}; -use crate::{errors::error::VcxCoreResult, global::settings}; +use crate::errors::error::VcxCoreResult; pub(crate) async fn add_wallet_record( wallet_handle: WalletHandle, @@ -17,10 +17,6 @@ pub(crate) async fn add_wallet_record( secret!(&tags) ); - if settings::indy_mocks_enabled() { - return Ok(()); - } - Locator::instance() .non_secret_controller .add_record( @@ -48,12 +44,6 @@ pub(crate) async fn get_wallet_record( options ); - if settings::indy_mocks_enabled() { - return Ok( - r#"{"id":"123","type":"record type","value":"record value","tags":null}"#.to_string(), - ); - } - let res = Locator::instance() .non_secret_controller .get_record(wallet_handle, xtype.into(), id.into(), options.into()) @@ -73,10 +63,6 @@ pub async fn delete_wallet_record( secret!(&id) ); - if settings::indy_mocks_enabled() { - return Ok(()); - } - Locator::instance() .non_secret_controller .delete_record(wallet_handle, xtype.into(), id.into()) @@ -98,10 +84,6 @@ pub(crate) async fn update_wallet_record_value( secret!(&value) ); - if settings::indy_mocks_enabled() { - return Ok(()); - } - Locator::instance() .non_secret_controller .update_record_value(wallet_handle, xtype.into(), id.into(), value.into()) @@ -123,10 +105,6 @@ pub(crate) async fn add_wallet_record_tags( secret!(&tags) ); - if settings::indy_mocks_enabled() { - return Ok(()); - } - Locator::instance() .non_secret_controller .add_record_tags( @@ -153,10 +131,6 @@ pub(crate) async fn update_wallet_record_tags( secret!(&tags) ); - if settings::indy_mocks_enabled() { - return Ok(()); - } - Locator::instance() .non_secret_controller .update_record_tags( @@ -183,10 +157,6 @@ pub(crate) async fn delete_wallet_record_tags( secret!(&tag_names) ); - if settings::indy_mocks_enabled() { - return Ok(()); - } - Locator::instance() .non_secret_controller .delete_record_tags(wallet_handle, xtype.into(), id.into(), tag_names.into()) @@ -209,10 +179,6 @@ pub async fn open_search_wallet( options ); - if settings::indy_mocks_enabled() { - return Ok(SearchHandle(1)); - } - let res = Locator::instance() .non_secret_controller .open_search(wallet_handle, xtype.into(), query.into(), options.into()) @@ -233,10 +199,6 @@ pub async fn fetch_next_records_wallet( count ); - if settings::indy_mocks_enabled() { - return Ok(String::from("{}")); - } - let res = Locator::instance() .non_secret_controller .fetch_search_next_records(wallet_handle, search_handle, count) @@ -249,10 +211,6 @@ pub async fn fetch_next_records_wallet( pub async fn close_search_wallet(search_handle: SearchHandle) -> VcxCoreResult<()> { trace!("close_search >>> search_handle: {:?}", search_handle); - if settings::indy_mocks_enabled() { - return Ok(()); - } - Locator::instance() .non_secret_controller .close_search(search_handle) diff --git a/aries_vcx_core/src/wallet/indy/mod.rs b/aries_vcx_core/src/wallet/indy/mod.rs index 4d46b2cb9d..86938afaf1 100644 --- a/aries_vcx_core/src/wallet/indy/mod.rs +++ b/aries_vcx_core/src/wallet/indy/mod.rs @@ -1,6 +1,7 @@ use std::thread; use async_trait::async_trait; +use derive_builder::Builder; use futures::executor::block_on; use serde::{Deserialize, Serialize}; use serde_json::Value; diff --git a/aries_vcx_core/src/wallet/indy/signing.rs b/aries_vcx_core/src/wallet/indy/signing.rs index d19f1d07e6..0ae9910754 100644 --- a/aries_vcx_core/src/wallet/indy/signing.rs +++ b/aries_vcx_core/src/wallet/indy/signing.rs @@ -2,15 +2,10 @@ use vdrtools::Locator; use crate::{ errors::error::{AriesVcxCoreError, AriesVcxCoreErrorKind, VcxCoreResult}, - global::settings, WalletHandle, }; pub async fn sign(wallet_handle: WalletHandle, my_vk: &str, msg: &[u8]) -> VcxCoreResult> { - if settings::indy_mocks_enabled() { - return Ok(Vec::from(msg)); - } - let res = Locator::instance() .crypto_controller .crypto_sign(wallet_handle, my_vk, msg) @@ -20,10 +15,6 @@ pub async fn sign(wallet_handle: WalletHandle, my_vk: &str, msg: &[u8]) -> VcxCo } pub async fn verify(vk: &str, msg: &[u8], signature: &[u8]) -> VcxCoreResult { - if settings::indy_mocks_enabled() { - return Ok(true); - } - let res = Locator::instance() .crypto_controller .crypto_verify(vk, msg, signature) @@ -38,10 +29,6 @@ pub async fn pack_message( receiver_keys: &str, msg: &[u8], ) -> VcxCoreResult> { - if settings::indy_mocks_enabled() { - return Ok(msg.to_vec()); - } - // parse json array of keys let receiver_list = serde_json::from_str::>(receiver_keys) .map_err(|_| { @@ -76,10 +63,6 @@ pub async fn pack_message( } pub async fn unpack_message(wallet_handle: WalletHandle, msg: &[u8]) -> VcxCoreResult> { - if settings::indy_mocks_enabled() { - return Ok(Vec::from(msg)); - } - let res = Locator::instance() .crypto_controller .unpack_msg(serde_json::from_slice(msg)?, wallet_handle) diff --git a/aries_vcx_core/src/wallet/indy/wallet.rs b/aries_vcx_core/src/wallet/indy/wallet.rs index c5e0b61941..4bf556c602 100644 --- a/aries_vcx_core/src/wallet/indy/wallet.rs +++ b/aries_vcx_core/src/wallet/indy/wallet.rs @@ -8,8 +8,6 @@ use vdrtools::{ use crate::{ errors::error::{AriesVcxCoreError, AriesVcxCoreErrorKind, VcxCoreResult}, - global::settings, - utils, wallet::indy::{ did_mocks::{did_mocks_enabled, DidMocks}, IssuerConfig, RestoreWalletConfigs, WalletConfig, @@ -271,11 +269,6 @@ pub async fn export_wallet( } pub async fn create_and_open_wallet(wallet_config: &WalletConfig) -> VcxCoreResult { - if settings::indy_mocks_enabled() { - warn!("create_and_open_wallet ::: Indy mocks enabled, skipping opening main wallet."); - return Ok(WalletHandle(0)); - } - create_indy_wallet(wallet_config).await?; let handle = open_wallet(wallet_config).await?; @@ -286,11 +279,6 @@ pub async fn create_and_open_wallet(wallet_config: &WalletConfig) -> VcxCoreResu pub async fn close_wallet(wallet_handle: WalletHandle) -> VcxCoreResult<()> { trace!("close_wallet >>>"); - if settings::indy_mocks_enabled() { - warn!("close_wallet >>> Indy mocks enabled, skipping closing wallet"); - return Ok(()); - } - Locator::instance() .wallet_controller .close(wallet_handle) @@ -310,13 +298,6 @@ pub async fn create_and_store_my_did( method_name ); - if settings::indy_mocks_enabled() { - return Ok(( - utils::constants::DID.to_string(), - utils::constants::VERKEY.to_string(), - )); - } - let res = Locator::instance() .did_controller .create_and_store_my_did( diff --git a/aries_vcx_core/src/wallet/indy/wallet_non_secrets.rs b/aries_vcx_core/src/wallet/indy/wallet_non_secrets.rs index b09a18b410..0d55adeaa5 100644 --- a/aries_vcx_core/src/wallet/indy/wallet_non_secrets.rs +++ b/aries_vcx_core/src/wallet/indy/wallet_non_secrets.rs @@ -1,4 +1,4 @@ -use serde_json; +use serde_json::{self, json}; use crate::{ errors::error::{AriesVcxCoreError, AriesVcxCoreErrorKind, VcxCoreResult}, diff --git a/aries_vcx_core/src/wallet/mock_wallet.rs b/aries_vcx_core/src/wallet/mock_wallet.rs index 2cae11ecdc..e368d9deac 100644 --- a/aries_vcx_core/src/wallet/mock_wallet.rs +++ b/aries_vcx_core/src/wallet/mock_wallet.rs @@ -2,6 +2,7 @@ use std::collections::HashMap; use async_trait::async_trait; +use super::structs_io::UnpackMessageOutput; #[cfg(feature = "vdrtools_wallet")] use crate::WalletHandle; use crate::{ @@ -142,7 +143,11 @@ impl BaseWallet for MockWallet { Ok(msg.to_vec()) } - async fn unpack_message(&self, msg: &[u8]) -> VcxCoreResult> { - Ok(msg.to_vec()) + async fn unpack_message(&self, msg: &[u8]) -> VcxCoreResult { + Ok(UnpackMessageOutput { + message: format!("{:?}", msg), + recipient_verkey: "".to_owned(), + sender_verkey: None, + }) } } diff --git a/aries_vcx_core/src/wallet/mod.rs b/aries_vcx_core/src/wallet/mod.rs index 048e8bff4a..32ede501ec 100644 --- a/aries_vcx_core/src/wallet/mod.rs +++ b/aries_vcx_core/src/wallet/mod.rs @@ -3,3 +3,4 @@ pub mod base_wallet; #[cfg(feature = "vdrtools_wallet")] pub mod indy; pub mod mock_wallet; +pub mod structs_io; diff --git a/aries_vcx_core/src/wallet/structs_io.rs b/aries_vcx_core/src/wallet/structs_io.rs new file mode 100644 index 0000000000..148bce9db2 --- /dev/null +++ b/aries_vcx_core/src/wallet/structs_io.rs @@ -0,0 +1,9 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] +pub struct UnpackMessageOutput { + pub message: String, + pub recipient_verkey: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub sender_verkey: Option, +} diff --git a/did_doc/rustfmt.toml b/did_doc/rustfmt.toml deleted file mode 100644 index b3a03b77b3..0000000000 --- a/did_doc/rustfmt.toml +++ /dev/null @@ -1,2 +0,0 @@ -edition = "2021" -max_width=100 diff --git a/did_doc/src/schema/did_doc.rs b/did_doc/src/schema/did_doc.rs index 7601ed6dcb..72dd642a09 100644 --- a/did_doc/src/schema/did_doc.rs +++ b/did_doc/src/schema/did_doc.rs @@ -4,14 +4,13 @@ use did_parser::{Did, DidUrl}; use serde::{Deserialize, Serialize}; use serde_json::Value; -use crate::error::DidDocumentBuilderError; - use super::{ service::Service, types::uri::Uri, utils::OneOrList, verification_method::{VerificationMethod, VerificationMethodKind}, }; +use crate::error::DidDocumentBuilderError; pub type ControllerAlias = OneOrList; diff --git a/did_doc/src/schema/service.rs b/did_doc/src/schema/service.rs index d067882c08..5b0db23b7f 100644 --- a/did_doc/src/schema/service.rs +++ b/did_doc/src/schema/service.rs @@ -2,12 +2,11 @@ use std::collections::HashSet; use serde::{Deserialize, Serialize}; -use crate::error::DidDocumentBuilderError; - use super::{ types::{uri::Uri, url::Url}, utils::OneOrList, }; +use crate::error::DidDocumentBuilderError; pub type ServiceTypeAlias = OneOrList; diff --git a/did_doc/src/schema/verification_method/mod.rs b/did_doc/src/schema/verification_method/mod.rs index 3f2a00eda7..df5ab7c02f 100644 --- a/did_doc/src/schema/verification_method/mod.rs +++ b/did_doc/src/schema/verification_method/mod.rs @@ -2,15 +2,13 @@ mod public_key; mod verification_method_kind; mod verification_method_type; -pub use self::public_key::PublicKeyField; - use ::public_key::Key; -pub use verification_method_kind::VerificationMethodKind; -pub use verification_method_type::VerificationMethodType; - use did_parser::{Did, DidUrl}; use serde::{Deserialize, Serialize}; +pub use verification_method_kind::VerificationMethodKind; +pub use verification_method_type::VerificationMethodType; +pub use self::public_key::PublicKeyField; use crate::{error::DidDocumentBuilderError, schema::types::jsonwebkey::JsonWebKey}; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] @@ -160,16 +158,18 @@ impl CompleteVerificationMethodBuilder { id: self.id, controller: self.controller, verification_method_type: self.verification_method_type, - public_key: self.public_key.unwrap(), // SAFETY: The builder will always set the public key + public_key: self.public_key.unwrap(), /* SAFETY: The builder will always set the + * public key */ } } } #[cfg(test)] mod tests { - use super::*; use serde_json::Value; + use super::*; + fn create_valid_did() -> Did { Did::parse("did:example:123456789abcdefghi".to_string()).unwrap() } @@ -230,7 +230,7 @@ mod tests { let vm = VerificationMethod::builder(id.clone(), controller.clone(), verification_method_type) - .add_public_key_multibase(public_key_multibase.clone()) + .add_public_key_multibase(public_key_multibase) .build(); assert_eq!(vm.id(), &id); @@ -255,7 +255,7 @@ mod tests { let vm = VerificationMethod::builder(id.clone(), controller.clone(), verification_method_type) - .add_public_key_multibase(public_key_multibase.clone()) + .add_public_key_multibase(public_key_multibase) .build(); assert_eq!(vm.id(), &id); @@ -298,10 +298,9 @@ mod tests { let verification_method_type = create_valid_verification_key_type(); let public_key_multibase_expected = create_valid_multibase(); - let vm = - VerificationMethod::builder(id.clone(), controller.clone(), verification_method_type) - .add_public_key_multibase(public_key_multibase_expected.clone()) - .build(); + let vm = VerificationMethod::builder(id, controller, verification_method_type) + .add_public_key_multibase(public_key_multibase_expected.clone()) + .build(); match vm.public_key_field() { PublicKeyField::Multibase { diff --git a/did_doc_sov/src/lib.rs b/did_doc_sov/src/lib.rs index f8f4364202..8d4077bb6f 100644 --- a/did_doc_sov/src/lib.rs +++ b/did_doc_sov/src/lib.rs @@ -114,7 +114,7 @@ impl DidDocumentSovBuilder { } pub fn add_service(mut self, service: ServiceSov) -> Self { - self.services.push(service.clone()); + self.services.push(service); self } diff --git a/did_parser/rustfmt.toml b/did_parser/rustfmt.toml deleted file mode 100644 index b3a03b77b3..0000000000 --- a/did_parser/rustfmt.toml +++ /dev/null @@ -1,2 +0,0 @@ -edition = "2021" -max_width=100 diff --git a/did_parser/src/did.rs b/did_parser/src/did.rs index ac478e232b..5884bed718 100644 --- a/did_parser/src/did.rs +++ b/did_parser/src/did.rs @@ -1,11 +1,12 @@ -use std::convert::TryFrom; -use std::fmt::{Display, Formatter}; -use std::str::FromStr; +use std::{ + convert::TryFrom, + fmt::{Display, Formatter}, + str::FromStr, +}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use crate::DidUrl; -use crate::{error::ParseError, utils::parse::parse_did_method_id, DidRange}; +use crate::{error::ParseError, utils::parse::parse_did_method_id, DidRange, DidUrl}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Did { diff --git a/did_parser/src/utils/parse.rs b/did_parser/src/utils/parse.rs index 2a21565ad1..00a45cbb26 100644 --- a/did_parser/src/utils/parse.rs +++ b/did_parser/src/utils/parse.rs @@ -1,5 +1,4 @@ -use crate::error::ParseError; -use crate::DidRange; +use crate::{error::ParseError, DidRange}; pub(crate) fn parse_key_value( did_url: &str, diff --git a/did_parser/tests/did_url/positive.rs b/did_parser/tests/did_url/positive.rs index 3802a06272..5fde90ec2a 100644 --- a/did_parser/tests/did_url/positive.rs +++ b/did_parser/tests/did_url/positive.rs @@ -1,6 +1,7 @@ -use did_parser::DidUrl; use std::collections::HashMap; +use did_parser::DidUrl; + macro_rules! test_cases_positive { ($($name:ident: $input:expr, $expected_did:expr, $expected_method:expr, $expected_id:expr, $expected_path:expr, $expected_fragment:expr, $expected_queries:expr, $expected_params:expr)*) => { $( diff --git a/did_peer/src/numalgos/numalgo2/resolve/helpers.rs b/did_peer/src/numalgos/numalgo2/resolve/helpers.rs index fee97a642c..1ea38dc516 100644 --- a/did_peer/src/numalgos/numalgo2/resolve/helpers.rs +++ b/did_peer/src/numalgos/numalgo2/resolve/helpers.rs @@ -159,7 +159,7 @@ fn build_service_aip1( service.service_endpoint().parse()?, ExtraFieldsSov::AIP1(ExtraFieldsAIP1::default()), ) - .add_service_type(service_type.to_string())? + .add_service_type(service_type)? .build()) } @@ -174,7 +174,7 @@ fn build_service_didcommv2( let extra = ExtraFieldsSov::DIDCommV2(extra_builder.build()); Ok( Service::::builder(id, service.service_endpoint().parse()?, extra) - .add_service_type(service_type.to_string())? + .add_service_type(service_type)? .build(), ) } @@ -247,7 +247,7 @@ mod tests { .parse() .unwrap(); let mut index = 0; - let ddo_builder = DidDocumentBuilder::::new(did.clone()); + let ddo_builder = DidDocumentBuilder::::new(did); let built_ddo = process_service_element(purposeless_service_element, ddo_builder, &mut index) .unwrap() @@ -272,7 +272,7 @@ mod tests { .parse() .unwrap(); let mut index = 0; - let ddo_builder = DidDocumentBuilder::::new(did.clone()); + let ddo_builder = DidDocumentBuilder::::new(did); let built_ddo = process_service_element(purposeless_service_element, ddo_builder, &mut index) .unwrap() diff --git a/did_peer/src/numalgos/numalgo2/verification_method.rs b/did_peer/src/numalgos/numalgo2/verification_method.rs index 807aa5d5e0..4e98a74014 100644 --- a/did_peer/src/numalgos/numalgo2/verification_method.rs +++ b/did_peer/src/numalgos/numalgo2/verification_method.rs @@ -80,7 +80,7 @@ fn build_verification_methods_from_bls_multikey( let vm1 = add_public_key_to_builder( VerificationMethod::builder( - id1.to_owned(), + id1, did.to_owned(), VerificationMethodType::Bls12381G1Key2020, ), diff --git a/did_peer/src/peer_did/numalgos/traits.rs b/did_peer/src/peer_did/numalgos/traits.rs index af785f396a..c7283422a5 100644 --- a/did_peer/src/peer_did/numalgos/traits.rs +++ b/did_peer/src/peer_did/numalgos/traits.rs @@ -19,7 +19,7 @@ pub trait Numalgo: Sized + Default { { let did: Did = did.try_into().map_err(Into::into)?; - let numalgo_char = did.id().chars().nth(0).ok_or_else(|| { + let numalgo_char = did.id().chars().next().ok_or_else(|| { DidPeerError::DidValidationError(format!( "Invalid did: unable to read numalgo character in did {}", did.did() diff --git a/did_resolver/rustfmt.toml b/did_resolver/rustfmt.toml deleted file mode 100644 index b3a03b77b3..0000000000 --- a/did_resolver/rustfmt.toml +++ /dev/null @@ -1,2 +0,0 @@ -edition = "2021" -max_width=100 diff --git a/did_resolver/src/traits/dereferenceable/dereferencing_output.rs b/did_resolver/src/traits/dereferenceable/dereferencing_output.rs index 1e75953c54..0be19e468d 100644 --- a/did_resolver/src/traits/dereferenceable/dereferencing_output.rs +++ b/did_resolver/src/traits/dereferenceable/dereferencing_output.rs @@ -1,7 +1,7 @@ -use crate::shared_types::did_document_metadata::DidDocumentMetadata; use std::io::Read; use super::dereferencing_metadata::DidDereferencingMetadata; +use crate::shared_types::did_document_metadata::DidDocumentMetadata; pub struct DidDereferencingOutput { dereferencing_metadata: DidDereferencingMetadata, diff --git a/did_resolver/src/traits/dereferenceable/mod.rs b/did_resolver/src/traits/dereferenceable/mod.rs index d9bb38f579..22b30580a0 100644 --- a/did_resolver/src/traits/dereferenceable/mod.rs +++ b/did_resolver/src/traits/dereferenceable/mod.rs @@ -5,13 +5,13 @@ pub mod dereferencing_output; use std::io::Read; -use crate::{error::GenericError, traits::resolvable::DidResolvable}; use async_trait::async_trait; use did_parser::DidUrl; use self::{ dereferencing_options::DidDereferencingOptions, dereferencing_output::DidDereferencingOutput, }; +use crate::{error::GenericError, traits::resolvable::DidResolvable}; #[async_trait] pub trait DidDereferenceable: DidResolvable { diff --git a/did_resolver/src/traits/resolvable/mod.rs b/did_resolver/src/traits/resolvable/mod.rs index 447c299c97..fce98a0676 100644 --- a/did_resolver/src/traits/resolvable/mod.rs +++ b/did_resolver/src/traits/resolvable/mod.rs @@ -3,11 +3,11 @@ pub mod resolution_metadata; pub mod resolution_options; pub mod resolution_output; -use crate::error::GenericError; use async_trait::async_trait; use did_parser::Did; use self::{resolution_options::DidResolutionOptions, resolution_output::DidResolutionOutput}; +use crate::error::GenericError; #[async_trait] pub trait DidResolvable { diff --git a/did_resolver_registry/rustfmt.toml b/did_resolver_registry/rustfmt.toml deleted file mode 100644 index b3a03b77b3..0000000000 --- a/did_resolver_registry/rustfmt.toml +++ /dev/null @@ -1,2 +0,0 @@ -edition = "2021" -max_width=100 diff --git a/did_resolver_registry/src/lib.rs b/did_resolver_registry/src/lib.rs index c88b04ca12..8de6818b72 100644 --- a/did_resolver_registry/src/lib.rs +++ b/did_resolver_registry/src/lib.rs @@ -115,11 +115,13 @@ impl ResolverRegistry { #[cfg(test)] mod tests { - use super::*; + use std::{error::Error, pin::Pin}; + use async_trait::async_trait; use did_resolver::did_doc::schema::did_doc::DidDocumentBuilder; use mockall::{automock, predicate::eq}; - use std::{error::Error, pin::Pin}; + + use super::*; struct DummyDidResolver; diff --git a/did_resolver_sov/Cargo.toml b/did_resolver_sov/Cargo.toml index 318790ddd2..c4f7021ed3 100644 --- a/did_resolver_sov/Cargo.toml +++ b/did_resolver_sov/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] did_resolver = { path = "../did_resolver" } -aries_vcx_core = { path = "../aries_vcx_core", features = ["modular_libs"] } +aries_vcx_core = { path = "../aries_vcx_core", default_features = false} did_doc_sov = { path = "../did_doc_sov" } async-trait = "0.1.68" mockall = "0.11.4" diff --git a/did_resolver_sov/rustfmt.toml b/did_resolver_sov/rustfmt.toml deleted file mode 100644 index b3a03b77b3..0000000000 --- a/did_resolver_sov/rustfmt.toml +++ /dev/null @@ -1,2 +0,0 @@ -edition = "2021" -max_width=100 diff --git a/did_resolver_sov/src/dereferencing/dereferencer.rs b/did_resolver_sov/src/dereferencing/dereferencer.rs index 284d240126..1494a39951 100644 --- a/did_resolver_sov/src/dereferencing/dereferencer.rs +++ b/did_resolver_sov/src/dereferencing/dereferencer.rs @@ -13,12 +13,11 @@ use did_resolver::{ }, }; -use crate::resolution::DidSovResolver; - use super::utils::dereference_did_document; +use crate::{reader::AttrReader, resolution::DidSovResolver}; #[async_trait] -impl DidDereferenceable for DidSovResolver { +impl<'a, T: AttrReader> DidDereferenceable for DidSovResolver<'a, T> { type Output = Cursor>; async fn dereference( diff --git a/did_resolver_sov/src/dereferencing/utils.rs b/did_resolver_sov/src/dereferencing/utils.rs index c41e8347e6..8a3194f1c4 100644 --- a/did_resolver_sov/src/dereferencing/utils.rs +++ b/did_resolver_sov/src/dereferencing/utils.rs @@ -91,14 +91,17 @@ pub(crate) fn dereference_did_document( #[cfg(test)] mod tests { - use super::*; - - use did_resolver::did_doc::schema::did_doc::DidDocumentBuilder; - use did_resolver::did_doc::schema::verification_method::VerificationMethodType; - use did_resolver::did_parser::DidUrl; - use did_resolver::traits::resolvable::resolution_output::DidResolutionOutput; + use did_resolver::{ + did_doc::schema::{ + did_doc::DidDocumentBuilder, verification_method::VerificationMethodType, + }, + did_parser::DidUrl, + traits::resolvable::resolution_output::DidResolutionOutput, + }; use serde_json::Value; + use super::*; + fn example_did_document_builder() -> DidDocumentBuilder<()> { let verification_method = VerificationMethod::builder( DidUrl::parse("did:example:123456789abcdefghi#keys-1".to_string()).unwrap(), diff --git a/did_resolver_sov/src/reader/mod.rs b/did_resolver_sov/src/reader/mod.rs index 9a386e6b61..c050e6a06d 100644 --- a/did_resolver_sov/src/reader/mod.rs +++ b/did_resolver_sov/src/reader/mod.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use aries_vcx_core::ledger::base_ledger::IndyLedgerRead; use async_trait::async_trait; @@ -12,26 +10,20 @@ pub trait AttrReader: Send + Sync { async fn get_nym(&self, did: &str) -> Result; } -pub struct ConcreteAttrReader { - ledger: Arc, -} - #[async_trait] -impl AttrReader for ConcreteAttrReader { +impl AttrReader for S +where + S: IndyLedgerRead + ?Sized, +{ async fn get_attr(&self, target_did: &str, attr_name: &str) -> Result { - self.ledger - .get_attr(target_did, attr_name) + IndyLedgerRead::get_attr(self, target_did, attr_name) .await .map_err(|err| err.into()) } async fn get_nym(&self, did: &str) -> Result { - self.ledger.get_nym(did).await.map_err(|err| err.into()) - } -} - -impl From> for ConcreteAttrReader { - fn from(ledger: Arc) -> Self { - Self { ledger } + IndyLedgerRead::get_nym(self, did) + .await + .map_err(|err| err.into()) } } diff --git a/did_resolver_sov/src/resolution/resolver.rs b/did_resolver_sov/src/resolution/resolver.rs index b220012fc0..2dda4adc70 100644 --- a/did_resolver_sov/src/resolution/resolver.rs +++ b/did_resolver_sov/src/resolution/resolver.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use async_trait::async_trait; use did_doc_sov::extra_fields::ExtraFieldsSov; use did_resolver::{ @@ -13,25 +11,24 @@ use did_resolver::{ }; use serde_json::Value; +use super::utils::{is_valid_sovrin_did_id, ledger_response_to_ddo}; use crate::{ error::{parsing::ParsingErrorSource, DidSovError}, reader::AttrReader, }; -use super::utils::{is_valid_sovrin_did_id, ledger_response_to_ddo}; - -pub struct DidSovResolver { - ledger: Arc, +pub struct DidSovResolver<'a, T: AttrReader> { + ledger: &'a T, } -impl DidSovResolver { - pub fn new(ledger: Arc) -> Self { +impl<'a, T: AttrReader> DidSovResolver<'a, T> { + pub fn new(ledger: &'a T) -> Self { DidSovResolver { ledger } } } #[async_trait] -impl DidResolvable for DidSovResolver { +impl<'a, T: AttrReader> DidResolvable for DidSovResolver<'a, T> { type ExtraFieldsService = ExtraFieldsSov; type ExtraFieldsOptions = (); @@ -69,7 +66,7 @@ impl DidResolvable for DidSovResolver { } } -impl DidSovResolver { +impl<'a, T: AttrReader> DidSovResolver<'a, T> { async fn get_verkey(&self, did: &str) -> Result { let nym_response = self.ledger.get_nym(did).await?; let nym_json: Value = serde_json::from_str(&nym_response)?; diff --git a/did_resolver_sov/src/resolution/utils.rs b/did_resolver_sov/src/resolution/utils.rs index 95c49b513d..df3b797c7d 100644 --- a/did_resolver_sov/src/resolution/utils.rs +++ b/did_resolver_sov/src/resolution/utils.rs @@ -50,7 +50,7 @@ fn get_txn_time_from_response(resp: &str) -> Result { fn unix_to_datetime(posix_timestamp: i64) -> Option> { NaiveDateTime::from_timestamp_opt(posix_timestamp, 0) - .map(|date_time| DateTime::::from_utc(date_time, Utc)) + .map(|date_time| DateTime::::from_naive_utc_and_offset(date_time, Utc)) } pub(super) fn is_valid_sovrin_did_id(id: &str) -> bool { @@ -96,7 +96,7 @@ pub(super) async fn ledger_response_to_ddo( did.to_string().try_into()?, VerificationMethodType::Ed25519VerificationKey2018, ) - .add_public_key_base58(verkey.to_string()) + .add_public_key_base58(verkey) .build(); let ddo = DidDocument::builder(ddo_id) @@ -124,10 +124,11 @@ pub(super) async fn ledger_response_to_ddo( #[cfg(test)] mod tests { - use super::*; use chrono::TimeZone; use did_resolver::did_doc::schema::verification_method::PublicKeyField; + use super::*; + #[test] fn test_prepare_ids() { let did = "did:example:1234567890".to_string(); diff --git a/did_resolver_sov/src/service.rs b/did_resolver_sov/src/service.rs index 57a49abe1b..3242bd1c63 100644 --- a/did_resolver_sov/src/service.rs +++ b/did_resolver_sov/src/service.rs @@ -1,5 +1,4 @@ -use std::collections::HashSet; -use std::fmt::Display; +use std::{collections::HashSet, fmt::Display}; use serde::{Deserialize, Deserializer}; use url::Url; @@ -66,10 +65,12 @@ where #[cfg(test)] mod tests { - use super::*; - use serde_json::from_str; use std::iter::FromIterator; + use serde_json::from_str; + + use super::*; + #[test] fn test_deserialize_endpoint_did_sov() { let json = r#"{ diff --git a/did_resolver_sov/tests/resolution.rs b/did_resolver_sov/tests/resolution.rs index 76e3836869..38dc4b41b2 100644 --- a/did_resolver_sov/tests/resolution.rs +++ b/did_resolver_sov/tests/resolution.rs @@ -1,26 +1,25 @@ -use std::sync::Arc; -use std::thread; -use std::time::Duration; +use std::{thread, time::Duration}; -use aries_vcx::core::profile::profile::Profile; use aries_vcx::{ common::ledger::{ service_didsov::{DidSovServiceType, EndpointDidSov}, transactions::write_endpoint, }, - utils::devsetup::SetupProfile, + core::profile::Profile, + run_setup, +}; +use did_resolver::{ + did_parser::Did, + traits::resolvable::{resolution_options::DidResolutionOptions, DidResolvable}, }; -use did_resolver::did_parser::Did; -use did_resolver::traits::resolvable::{resolution_options::DidResolutionOptions, DidResolvable}; -use did_resolver_sov::reader::ConcreteAttrReader; use did_resolver_sov::resolution::DidSovResolver; -async fn write_test_endpoint(profile: &Arc, did: &str) { +async fn write_test_endpoint(profile: &impl Profile, did: &str) { let endpoint = EndpointDidSov::create() .set_service_endpoint("http://localhost:8080".parse().unwrap()) .set_routing_keys(Some(vec!["key1".to_string(), "key2".to_string()])) .set_types(Some(vec![DidSovServiceType::Endpoint])); - write_endpoint(&profile.inject_indy_ledger_write(), did, &endpoint) + write_endpoint(profile.ledger_write(), did, &endpoint) .await .unwrap(); thread::sleep(Duration::from_millis(50)); @@ -28,12 +27,10 @@ async fn write_test_endpoint(profile: &Arc, did: &str) { #[tokio::test] async fn write_service_on_ledger_and_resolve_did_doc() { - SetupProfile::run(|init| async move { + run_setup!(|init| async move { let did = format!("did:sov:{}", init.institution_did); write_test_endpoint(&init.profile, &init.institution_did).await; - let resolver = DidSovResolver::new(Arc::::new( - init.profile.inject_indy_ledger_read().into(), - )); + let resolver = DidSovResolver::new(init.profile.ledger_read()); let did_doc = resolver .resolve( &Did::parse(did.clone()).unwrap(), @@ -48,12 +45,10 @@ async fn write_service_on_ledger_and_resolve_did_doc() { #[tokio::test] async fn test_error_handling_during_resolution() { - SetupProfile::run(|init| async move { + run_setup!(|init| async move { let did = format!("did:unknownmethod:{}", init.institution_did); - let resolver = DidSovResolver::new(Arc::::new( - init.profile.inject_indy_ledger_read().into(), - )); + let resolver = DidSovResolver::new(init.profile.ledger_read()); let result = resolver .resolve( diff --git a/indy_ledger_response_parser/Cargo.toml b/indy_ledger_response_parser/Cargo.toml index ec8a66fc07..9946743374 100644 --- a/indy_ledger_response_parser/Cargo.toml +++ b/indy_ledger_response_parser/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" [dependencies] serde = { version = "1.0.163", features = ["derive"] } serde_json = "1.0.96" -time = "=0.3.20" -indy-vdr = { git = "https://github.com/hyperledger/indy-vdr.git", rev = "879e29e", default-features = false, features = ["log"] } +time = "0.3.20" +indy-vdr = { git = "https://github.com/hyperledger/indy-vdr.git", rev = "c143268", default-features = false, features = ["log"] } thiserror = "1.0.44" anoncreds-clsignatures = "0.2.0" diff --git a/libvcx_core/Cargo.toml b/libvcx_core/Cargo.toml index 7c6f34227c..99e8473583 100644 --- a/libvcx_core/Cargo.toml +++ b/libvcx_core/Cargo.toml @@ -8,31 +8,30 @@ edition.workspace = true [features] fatal_warnings = [] -anoncreds_credx = ["aries-vcx/modular_libs"] -anoncreds_vdrtools = ["aries-vcx/vdrtools"] - [dependencies] num-traits = "0.2.0" once_cell = { version = "1.15" } cfg-if = { version = "1.0" } -env_logger = "0.9.0" +env_logger = "0.10.0" log = "0.4.16" chrono = "0.4.23" -time = "=0.3.20" +time = "0.3.20" lazy_static = "1.3" -libc = "=0.2.139" -rand = "0.7.3" +libc = "0.2.148" +rand = "0.8.5" serde = "1.0.97" serde_json = "1.0.40" serde_derive = "1.0.97" futures = { version = "0.3", default-features = false } aries-vcx = { path = "../aries_vcx" } +aries_vcx_core = { path = "../aries_vcx_core" } diddoc_legacy = { path = "../diddoc_legacy" } thiserror = "1.0.37" -uuid = { version = "0.7.4", default-features = false, features = ["v4"] } +uuid = { version = "1.4.1", default-features = false, features = ["v4"] } agency_client = { path = "../agency_client" } async-trait = "0.1.61" url = "2.3.1" +wallet_migrator = { path = "../wallet_migrator" } [dev-dependencies] tokio = { version = "1.20", features = [ "rt", "macros" ] } diff --git a/libvcx_core/src/api_vcx/api_global/agency_client.rs b/libvcx_core/src/api_vcx/api_global/agency_client.rs index 3730d40f9f..6b4d0a3650 100644 --- a/libvcx_core/src/api_vcx/api_global/agency_client.rs +++ b/libvcx_core/src/api_vcx/api_global/agency_client.rs @@ -3,15 +3,13 @@ use std::{ sync::{RwLock, RwLockWriteGuard}, }; -use aries_vcx::{ - agency_client::{ - agency_client::AgencyClient, - configuration::{AgencyClientConfig, AgentProvisionConfig}, - messages::update_message::UIDsByConn, - MessageStatusCode, - }, - aries_vcx_core::wallet::agency_client_wallet::ToBaseAgencyClientWallet, +use aries_vcx::agency_client::{ + agency_client::AgencyClient, + configuration::{AgencyClientConfig, AgentProvisionConfig}, + messages::update_message::UIDsByConn, + MessageStatusCode, }; +use aries_vcx_core::wallet::agency_client_wallet::ToBaseAgencyClientWallet; use super::profile::get_main_wallet; use crate::errors::{ @@ -93,15 +91,13 @@ pub async fn provision_cloud_agent( pub mod tests { use aries_vcx::{ agency_client::{ - configuration::AgentProvisionConfig, messages::update_message::UIDsByConn, - testing::mocking::AgencyMockDecrypted, MessageStatusCode, + messages::update_message::UIDsByConn, testing::mocking::AgencyMockDecrypted, + MessageStatusCode, }, utils::{constants, devsetup::SetupMocks}, }; - use crate::api_vcx::api_global::agency_client::{ - agency_update_messages, provision_cloud_agent, update_webhook_url, - }; + use crate::api_vcx::api_global::agency_client::{agency_update_messages, update_webhook_url}; #[tokio::test] async fn test_update_institution_webhook() { @@ -109,21 +105,6 @@ pub mod tests { update_webhook_url("https://example.com").await.unwrap(); } - #[tokio::test] - async fn test_provision_cloud_agent() { - let _setup = SetupMocks::init(); - - let config = AgentProvisionConfig { - agency_did: "Ab8TvZa3Q19VNkQVzAWVL7".into(), - agency_verkey: "5LXaR43B1aQyeh94VBP8LG1Sgvjk7aNfqiksBCSjwqbf".into(), - agency_endpoint: "https://enym-eagency.pdev.evernym.com" - .parse() - .expect("valid url"), - agent_seed: None, - }; - provision_cloud_agent(&config).await.unwrap(); - } - #[tokio::test] async fn test_messages_update_status() { let _setup = SetupMocks::init(); diff --git a/libvcx_core/src/api_vcx/api_global/ledger.rs b/libvcx_core/src/api_vcx/api_global/ledger.rs index dc288f73cb..b6ea9a17c8 100644 --- a/libvcx_core/src/api_vcx/api_global/ledger.rs +++ b/libvcx_core/src/api_vcx/api_global/ledger.rs @@ -9,19 +9,14 @@ use aries_vcx::{ write_endpoint_legacy, }, }, - global::settings::CONFIG_INSTITUTION_DID, }; +use aries_vcx_core::ledger::base_ledger::{IndyLedgerRead, IndyLedgerWrite}; use diddoc_legacy::aries::service::AriesService; use url::Url; +use super::profile::{get_main_ledger_read, get_main_ledger_write, update_taa_configuration}; use crate::{ - api_vcx::api_global::{ - profile::{ - get_main_indy_ledger_read, get_main_indy_ledger_write, get_main_profile, - get_main_wallet, - }, - settings::get_config_value, - }, + api_vcx::api_global::profile::get_main_wallet, errors::{ error::{LibvcxError, LibvcxErrorKind, LibvcxResult}, mapping_from_ariesvcx::map_ariesvcx_result, @@ -29,15 +24,13 @@ use crate::{ }, }; -pub async fn endorse_transaction(transaction: &str) -> LibvcxResult<()> { - let endorser_did = get_config_value(CONFIG_INSTITUTION_DID)?; - - let ledger = get_main_indy_ledger_write()?; - map_ariesvcx_core_result(ledger.endorse_transaction(&endorser_did, transaction).await) +pub async fn endorse_transaction(transaction: &str, endorser_did: &str) -> LibvcxResult<()> { + let ledger = get_main_ledger_write()?; + map_ariesvcx_core_result(ledger.endorse_transaction(endorser_did, transaction).await) } pub async fn get_ledger_txn(seq_no: i32, submitter_did: Option) -> LibvcxResult { - let ledger = get_main_indy_ledger_read()?; + let ledger = get_main_ledger_read()?; map_ariesvcx_core_result( ledger .get_ledger_txn(seq_no, submitter_did.as_deref()) @@ -47,8 +40,8 @@ pub async fn get_ledger_txn(seq_no: i32, submitter_did: Option) -> Libvc pub async fn rotate_verkey(did: &str) -> LibvcxResult<()> { let result = aries_vcx::common::keys::rotate_verkey( - &get_main_wallet()?, - &get_main_indy_ledger_write()?, + get_main_wallet()?.as_ref(), + get_main_ledger_write()?.as_ref(), did, ) .await; @@ -56,8 +49,10 @@ pub async fn rotate_verkey(did: &str) -> LibvcxResult<()> { } pub async fn get_verkey_from_ledger(did: &str) -> LibvcxResult { - let indy_ledger = get_main_indy_ledger_read()?; - map_ariesvcx_result(aries_vcx::common::keys::get_verkey_from_ledger(&indy_ledger, did).await) + let indy_ledger = get_main_ledger_read()?; + map_ariesvcx_result( + aries_vcx::common::keys::get_verkey_from_ledger(indy_ledger.as_ref(), did).await, + ) } pub async fn ledger_write_endpoint_legacy( @@ -73,7 +68,7 @@ pub async fn ledger_write_endpoint_legacy( })?) .set_recipient_keys(recipient_keys) .set_routing_keys(routing_keys); - write_endpoint_legacy(&get_main_indy_ledger_write()?, target_did, &service).await?; + write_endpoint_legacy(get_main_ledger_write()?.as_ref(), target_did, &service).await?; Ok(service) } @@ -92,21 +87,21 @@ pub async fn ledger_write_endpoint( DidSovServiceType::DidCommunication, ])) .set_routing_keys(Some(routing_keys)); - write_endpoint(&get_main_indy_ledger_write()?, target_did, &service).await?; + write_endpoint(get_main_ledger_write()?.as_ref(), target_did, &service).await?; Ok(service) } pub async fn ledger_get_service(target_did: &str) -> LibvcxResult { let target_did = target_did.to_owned(); - map_ariesvcx_result(get_service(&get_main_indy_ledger_read()?, &target_did).await) + map_ariesvcx_result(get_service(get_main_ledger_read()?.as_ref(), &target_did).await) } pub async fn ledger_get_attr(target_did: &str, attr: &str) -> LibvcxResult { - map_ariesvcx_result(get_attr(&get_main_indy_ledger_read()?, target_did, attr).await) + map_ariesvcx_result(get_attr(get_main_ledger_read()?.as_ref(), target_did, attr).await) } pub async fn ledger_clear_attr(target_did: &str, attr: &str) -> LibvcxResult { - map_ariesvcx_result(clear_attr(&get_main_indy_ledger_write()?, target_did, attr).await) + map_ariesvcx_result(clear_attr(get_main_ledger_write()?.as_ref(), target_did, attr).await) } pub async fn ledger_write_endorser_did( @@ -117,7 +112,7 @@ pub async fn ledger_write_endorser_did( ) -> LibvcxResult { map_ariesvcx_result( write_endorser_did( - &get_main_indy_ledger_write()?, + get_main_ledger_write()?.as_ref(), submitter_did, target_did, target_vk, @@ -128,7 +123,8 @@ pub async fn ledger_write_endorser_did( } pub async fn ledger_get_txn_author_agreement() -> LibvcxResult { - get_main_indy_ledger_read()? + get_main_ledger_read()? + .as_ref() .get_txn_author_agreement() .await? .ok_or_else(|| { @@ -149,11 +145,11 @@ pub fn set_taa_configuration( version, mechanism: acceptance_mechanism, }; - get_main_profile().update_taa_configuration(taa_options) + update_taa_configuration(taa_options) } pub fn get_taa_configuration() -> LibvcxResult> { - get_main_profile().get_taa_configuration() + super::profile::get_taa_configuration() } #[cfg(test)] @@ -164,7 +160,7 @@ pub mod tests { get_txns_sovrin_testnet, }, global::settings::DEFAULT_GENESIS_PATH, - utils::devsetup::{SetupEmpty, SetupMocks}, + utils::devsetup::SetupEmpty, }; use crate::api_vcx::api_global::{ @@ -232,15 +228,4 @@ pub mod tests { let auth_agreement = serde_json::to_value(auth_agreement).unwrap(); assert_eq!(expected, auth_agreement); } - - #[tokio::test] - async fn test_vcx_get_ledger_author_agreement() { - let _setup = SetupMocks::init(); - - let agreement = ledger_get_txn_author_agreement().await.unwrap(); - assert_eq!( - aries_vcx::utils::constants::DEFAULT_AUTHOR_AGREEMENT, - agreement - ); - } } diff --git a/libvcx_core/src/api_vcx/api_global/mod.rs b/libvcx_core/src/api_vcx/api_global/mod.rs index e90da0656b..443fe33765 100644 --- a/libvcx_core/src/api_vcx/api_global/mod.rs +++ b/libvcx_core/src/api_vcx/api_global/mod.rs @@ -2,7 +2,6 @@ pub mod agency_client; pub mod ledger; pub mod pool; pub mod profile; -pub mod settings; pub mod state; pub mod wallet; diff --git a/libvcx_core/src/api_vcx/api_global/pool.rs b/libvcx_core/src/api_vcx/api_global/pool.rs index 6a8ef6f350..c155d62187 100644 --- a/libvcx_core/src/api_vcx/api_global/pool.rs +++ b/libvcx_core/src/api_vcx/api_global/pool.rs @@ -7,10 +7,6 @@ use std::{ use aries_vcx::{ aries_vcx_core::{ ledger::{ - base_ledger::{ - AnoncredsLedgerRead, AnoncredsLedgerWrite, IndyLedgerRead, IndyLedgerWrite, - TaaConfigurator, - }, request_submitter::vdr_ledger::{IndyVdrLedgerPool, IndyVdrSubmitter}, response_cacher::in_memory::{ InMemoryResponseCacherConfig, InMemoryResponseCacherConfigBuilder, @@ -21,29 +17,29 @@ use aries_vcx::{ }, core::profile::ledger::{indyvdr_build_ledger_read, indyvdr_build_ledger_write}, }; +use aries_vcx_core::ledger::{ + indy_vdr_ledger::{IndyVdrLedgerRead, IndyVdrLedgerWrite}, + request_signer::base_wallet::BaseWalletRequestSigner, + response_cacher::in_memory::InMemoryResponseCacher, +}; use crate::{ api_vcx::api_global::profile::get_main_wallet, - errors::error::{LibvcxError, LibvcxErrorKind, LibvcxResult}, + errors::error::{LibvcxError, LibvcxResult}, }; -lazy_static! { - pub static ref GLOBAL_LEDGER_ANONCREDS_READ: RwLock>> = - RwLock::new(None); - pub static ref GLOBAL_LEDGER_ANONCREDS_WRITE: RwLock>> = - RwLock::new(None); - pub static ref GLOBAL_LEDGER_INDY_READ: RwLock>> = - RwLock::new(None); - pub static ref GLOBAL_LEDGER_INDY_WRITE: RwLock>> = - RwLock::new(None); - pub static ref GLOBAL_TAA_CONFIGURATOR: RwLock>> = - RwLock::new(None); -} +pub static GLOBAL_LEDGER_INDY_READ: RwLock< + Option>>, +> = RwLock::new(None); +pub static GLOBAL_LEDGER_INDY_WRITE: RwLock< + Option>>, +> = RwLock::new(None); pub fn is_main_pool_open() -> bool { - false - // todo: implement this, based on whether ledger read is Some or None - // global_profile.inject_anoncreds_ledger_read() + GLOBAL_LEDGER_INDY_READ + .read() + .map(|v| v.is_some()) + .unwrap_or_default() } // todo : enable opting out of caching completely be specifying 0 capacity @@ -76,25 +72,22 @@ impl TryFrom for InMemoryResponseCacherConfi } } -async fn build_components_ledger( +fn build_components_ledger( base_wallet: Arc, libvcx_pool_config: &LibvcxLedgerConfig, ) -> LibvcxResult<( - Arc, - Arc, - Arc, - Arc, - Arc, + IndyVdrLedgerRead, + IndyVdrLedgerWrite, )> { let indy_vdr_config = match &libvcx_pool_config.pool_config { None => PoolConfig::default(), Some(cfg) => cfg.clone(), }; - let ledger_pool = Arc::new(IndyVdrLedgerPool::new( + let ledger_pool = IndyVdrLedgerPool::new( libvcx_pool_config.genesis_path.clone(), indy_vdr_config, libvcx_pool_config.exclude_nodes.clone().unwrap_or_default(), - )?); + )?; let request_submitter = Arc::new(IndyVdrSubmitter::new(ledger_pool)); let cache_config = match &libvcx_pool_config.cache_config { @@ -104,68 +97,34 @@ async fn build_components_ledger( .build(), Some(cfg) => cfg.clone().try_into()?, }; - let ledger_read = Arc::new(indyvdr_build_ledger_read( - request_submitter.clone(), - cache_config, - )?); - let ledger_write = Arc::new(indyvdr_build_ledger_write( - base_wallet, - request_submitter, - None, - )); - let taa_configurator: Arc = ledger_write.clone(); - let anoncreds_write: Arc = ledger_write.clone(); - let indy_write: Arc = ledger_write.clone(); - let anoncreds_read: Arc = ledger_read.clone(); - let indy_read: Arc = ledger_read.clone(); - Ok(( - anoncreds_read, - anoncreds_write, - indy_read, - indy_write, - taa_configurator, - )) + let ledger_read = indyvdr_build_ledger_read(request_submitter.clone(), cache_config)?; + let ledger_write = indyvdr_build_ledger_write(base_wallet, request_submitter, None); + + Ok((ledger_read, ledger_write)) } pub fn reset_ledger_components() -> LibvcxResult<()> { - let mut anoncreds_read = GLOBAL_LEDGER_ANONCREDS_READ.write()?; - *anoncreds_read = None; - let mut anoncreds_write = GLOBAL_LEDGER_ANONCREDS_WRITE.write()?; - *anoncreds_write = None; let mut indy_read = GLOBAL_LEDGER_INDY_READ.write()?; *indy_read = None; let mut indy_write = GLOBAL_LEDGER_INDY_WRITE.write()?; *indy_write = None; - let mut taa_configurator = GLOBAL_TAA_CONFIGURATOR.write()?; - *taa_configurator = None; Ok(()) } pub async fn setup_ledger_components(config: &LibvcxLedgerConfig) -> LibvcxResult<()> { let base_wallet = get_main_wallet()?; - let (anoncreds_read, anoncreds_write, indy_read, indy_write, taa_configurator) = - build_components_ledger(base_wallet, config).await?; - let mut anoncreds_read_guard = GLOBAL_LEDGER_ANONCREDS_READ.write()?; - *anoncreds_read_guard = Some(anoncreds_read.clone()); - let mut anoncreds_write_guard = GLOBAL_LEDGER_ANONCREDS_WRITE.write()?; - *anoncreds_write_guard = Some(anoncreds_write.clone()); + let (ledger_read, ledger_write) = build_components_ledger(base_wallet, config)?; let mut indy_read_guard = GLOBAL_LEDGER_INDY_READ.write()?; - *indy_read_guard = Some(indy_read.clone()); + *indy_read_guard = Some(Arc::new(ledger_read)); let mut indy_write_guard = GLOBAL_LEDGER_INDY_WRITE.write()?; - *indy_write_guard = Some(indy_write.clone()); - let mut indy_taa_configurator = GLOBAL_TAA_CONFIGURATOR.write()?; - *indy_taa_configurator = Some(taa_configurator.clone()); + *indy_write_guard = Some(Arc::new(ledger_write)); Ok(()) } pub async fn open_main_pool(config: &LibvcxLedgerConfig) -> LibvcxResult<()> { if is_main_pool_open() { - error!("open_main_pool >> Pool connection is already open."); - return Err(LibvcxError::from_msg( - LibvcxErrorKind::AlreadyInitialized, - "Pool connection is already open.", - )); + return Ok(()); } trace!( @@ -197,7 +156,7 @@ pub mod tests { aries_vcx_core::ledger::indy::pool::test_utils::{ create_testpool_genesis_txn_file, get_temp_file_path, }, - global::settings::{set_config_value, CONFIG_GENESIS_PATH, DEFAULT_GENESIS_PATH}, + global::settings::DEFAULT_GENESIS_PATH, utils::{ constants::POOL1_TXN, devsetup::{SetupDefaults, SetupEmpty, TempFile}, @@ -208,7 +167,7 @@ pub mod tests { use crate::{ api_vcx::api_global::{ pool::{close_main_pool, open_main_pool, reset_ledger_components, LibvcxLedgerConfig}, - profile::get_main_anoncreds_ledger_read, + profile::get_main_ledger_read, wallet::{close_main_wallet, test_utils::_create_and_open_wallet}, }, errors::error::LibvcxErrorKind, @@ -284,14 +243,13 @@ pub mod tests { let _setup = SetupEmpty::init(); _create_and_open_wallet().await.unwrap(); - let genesis_transactions = + let _genesis_transactions = TempFile::create_with_data(POOL1_TXN, "{ \"invalid\": \"genesis\" }"); - set_config_value(CONFIG_GENESIS_PATH, &genesis_transactions.path).unwrap(); // todo: indy-vdr panics if the file is invalid, see: // indy-vdr-0.3.4/src/pool/runner.rs:44:22 assert_eq!( - get_main_anoncreds_ledger_read().unwrap_err().kind(), + get_main_ledger_read().unwrap_err().kind(), LibvcxErrorKind::NotReady ); @@ -316,7 +274,7 @@ pub mod tests { LibvcxErrorKind::IOError ); assert_eq!( - get_main_anoncreds_ledger_read().unwrap_err().kind(), + get_main_ledger_read().unwrap_err().kind(), LibvcxErrorKind::NotReady ); close_main_wallet().await.unwrap(); diff --git a/libvcx_core/src/api_vcx/api_global/profile.rs b/libvcx_core/src/api_vcx/api_global/profile.rs index ea3a6f4f7a..428a3dd466 100644 --- a/libvcx_core/src/api_vcx/api_global/profile.rs +++ b/libvcx_core/src/api_vcx/api_global/profile.rs @@ -1,246 +1,87 @@ -use std::{ - fmt::{Debug, Formatter}, - sync::Arc, -}; +use std::sync::Arc; use aries_vcx::{ + self, aries_vcx_core::{ anoncreds::base_anoncreds::BaseAnonCreds, - ledger::base_ledger::{ - AnoncredsLedgerRead, AnoncredsLedgerWrite, IndyLedgerRead, IndyLedgerWrite, - TxnAuthrAgrmtOptions, - }, - wallet::{base_wallet::BaseWallet, mock_wallet::MockWallet}, - }, - global::settings::indy_mocks_enabled, - utils::mockdata::profile::{ - mock_anoncreds::MockAnoncreds, mock_ledger::MockLedger, mock_profile::MockProfile, + ledger::base_ledger::{IndyLedgerRead, IndyLedgerWrite}, + wallet::base_wallet::BaseWallet, }, }; +use aries_vcx_core::ledger::base_ledger::{ + AnoncredsLedgerRead, AnoncredsLedgerWrite, TaaConfigurator, TxnAuthrAgrmtOptions, +}; use crate::{ api_vcx::api_global::{ - pool::{ - GLOBAL_LEDGER_ANONCREDS_READ, GLOBAL_LEDGER_ANONCREDS_WRITE, GLOBAL_LEDGER_INDY_READ, - GLOBAL_LEDGER_INDY_WRITE, GLOBAL_TAA_CONFIGURATOR, - }, + pool::{GLOBAL_LEDGER_INDY_READ, GLOBAL_LEDGER_INDY_WRITE}, wallet::{GLOBAL_BASE_ANONCREDS, GLOBAL_BASE_WALLET}, }, errors::error::{LibvcxError, LibvcxErrorKind, LibvcxResult}, }; -pub trait ProfileV2: Send + Sync { - fn inject_indy_ledger_read(&self) -> LibvcxResult>; - - fn inject_indy_ledger_write(&self) -> LibvcxResult>; - - fn inject_anoncreds(&self) -> LibvcxResult>; - - fn inject_anoncreds_ledger_read(&self) -> LibvcxResult>; - - fn inject_anoncreds_ledger_write(&self) -> LibvcxResult>; - - fn inject_wallet(&self) -> LibvcxResult>; - - fn try_inject_wallet(&self) -> LibvcxResult>>; - - fn update_taa_configuration(&self, taa_options: TxnAuthrAgrmtOptions) -> LibvcxResult<()>; - - fn get_taa_configuration(&self) -> LibvcxResult>; +pub fn try_get_main_wallet() -> LibvcxResult>> { + let base_wallet = GLOBAL_BASE_WALLET.read()?; + base_wallet.as_ref().cloned().map(Some).ok_or_else(|| { + LibvcxError::from_msg(LibvcxErrorKind::NotReady, "Wallet is not initialized") + }) } -#[derive(Clone)] -struct VcxGlobalsProfile {} - -impl Debug for VcxGlobalsProfile { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!(f, "VcxGlobalsProfile") - } +pub fn get_main_wallet() -> LibvcxResult> { + let base_wallet = GLOBAL_BASE_WALLET.read()?; + base_wallet.as_ref().cloned().ok_or_else(|| { + LibvcxError::from_msg(LibvcxErrorKind::NotReady, "Wallet is not initialized") + }) } -impl ProfileV2 for VcxGlobalsProfile { - fn inject_indy_ledger_read(&self) -> LibvcxResult> { - let ledger = GLOBAL_LEDGER_INDY_READ.read()?; - match ledger.as_ref() { - None => Err(LibvcxError::from_msg( - LibvcxErrorKind::NotReady, - "Indy ledger read is not initialized", - )), - Some(l) => Ok(Arc::clone(l)), - } - } - - fn inject_indy_ledger_write(&self) -> LibvcxResult> { - let ledger = GLOBAL_LEDGER_INDY_WRITE.read()?; - match ledger.as_ref() { - None => Err(LibvcxError::from_msg( - LibvcxErrorKind::NotReady, - "Indy ledger write is not initialized", - )), - Some(l) => Ok(Arc::clone(l)), - } - } - - fn inject_anoncreds(&self) -> LibvcxResult> { - let anoncreds = GLOBAL_BASE_ANONCREDS.read()?; - match anoncreds.as_ref() { - None => Err(LibvcxError::from_msg( - LibvcxErrorKind::NotReady, - "Anoncreds is not initialized", - )), - Some(a) => Ok(Arc::clone(a)), - } - } - - fn inject_anoncreds_ledger_read(&self) -> LibvcxResult> { - let ledger = GLOBAL_LEDGER_ANONCREDS_READ.read()?; - match ledger.as_ref() { - None => Err(LibvcxError::from_msg( - LibvcxErrorKind::NotReady, - "Anoncreds ledger read is not initialized", - )), - Some(l) => Ok(Arc::clone(l)), - } - } - - fn inject_anoncreds_ledger_write(&self) -> LibvcxResult> { - let ledger = GLOBAL_LEDGER_ANONCREDS_WRITE.read()?; - match ledger.as_ref() { - None => Err(LibvcxError::from_msg( - LibvcxErrorKind::NotReady, - "Anoncreds ledger write is not initialized", - )), - Some(l) => Ok(Arc::clone(l)), - } - } - - fn inject_wallet(&self) -> LibvcxResult> { - let base_wallet = GLOBAL_BASE_WALLET.read()?; - match base_wallet.as_ref() { - None => Err(LibvcxError::from_msg( - LibvcxErrorKind::NotReady, - "Wallet is not initialized", - )), - Some(w) => Ok(Arc::clone(w)), - } - } - - fn try_inject_wallet(&self) -> LibvcxResult>> { - let base_wallet = GLOBAL_BASE_WALLET.read()?; - base_wallet - .as_ref() - .map(|w| Some(Arc::clone(w))) - .ok_or_else(|| { - LibvcxError::from_msg(LibvcxErrorKind::NotReady, "Wallet is not initialized") - }) - } - - fn update_taa_configuration(&self, taa_options: TxnAuthrAgrmtOptions) -> LibvcxResult<()> { - let configurator = GLOBAL_TAA_CONFIGURATOR.read()?; - match configurator.as_ref() { - None => Err(LibvcxError::from_msg( - LibvcxErrorKind::NotReady, - "Ledger is not initialized", - ))?, - Some(configurator) => configurator - .as_ref() - .set_txn_author_agreement_options(taa_options)?, - }; - Ok(()) - } - - fn get_taa_configuration(&self) -> LibvcxResult> { - let configurator = GLOBAL_TAA_CONFIGURATOR.read()?; - match configurator.as_ref() { - None => Err(LibvcxError::from_msg( - LibvcxErrorKind::NotReady, - "Ledger is not initialized", - ))?, - Some(configurator) => configurator - .as_ref() - .get_txn_author_agreement_options() - .map_err(|err| err.into()), - } - } +pub fn get_main_anoncreds() -> LibvcxResult> { + let anoncreds = GLOBAL_BASE_ANONCREDS.read()?; + anoncreds.as_ref().cloned().ok_or_else(|| { + LibvcxError::from_msg(LibvcxErrorKind::NotReady, "Anoncreds is not initialized") + }) } -lazy_static! { - static ref GLOBAL_PROFILE: VcxGlobalsProfile = VcxGlobalsProfile {}; +pub fn get_main_ledger_read() -> LibvcxResult> { + let ledger = GLOBAL_LEDGER_INDY_READ.read()?; + ledger.as_ref().cloned().ok_or_else(|| { + LibvcxError::from_msg( + LibvcxErrorKind::NotReady, + "Anoncreds ledger read is not initialized", + ) + }) } -impl ProfileV2 for MockProfile { - fn inject_indy_ledger_read(&self) -> LibvcxResult> { - Ok(Arc::new(MockLedger {})) - } - - fn inject_indy_ledger_write(&self) -> LibvcxResult> { - Ok(Arc::new(MockLedger {})) - } - - fn inject_anoncreds(&self) -> LibvcxResult> { - Ok(Arc::new(MockAnoncreds {})) - } - - fn inject_anoncreds_ledger_read(&self) -> LibvcxResult> { - Ok(Arc::new(MockLedger {})) - } - - fn inject_anoncreds_ledger_write(&self) -> LibvcxResult> { - Ok(Arc::new(MockLedger {})) - } - - fn inject_wallet(&self) -> LibvcxResult> { - Ok(Arc::new(MockWallet {})) - } - - fn try_inject_wallet(&self) -> LibvcxResult>> { - Ok(Some(Arc::new(MockWallet {}))) - } - - fn update_taa_configuration(&self, _taa_options: TxnAuthrAgrmtOptions) -> LibvcxResult<()> { - Ok(()) - } - - fn get_taa_configuration(&self) -> LibvcxResult> { - Ok(Some(TxnAuthrAgrmtOptions { - text: "foo".to_string(), - version: "bar".to_string(), - mechanism: "baz".to_string(), - })) - } -} - -pub fn get_main_profile() -> Arc { - if indy_mocks_enabled() { - return Arc::new(MockProfile {}); - } - Arc::new(GLOBAL_PROFILE.clone()) -} - -pub fn try_get_main_wallet() -> LibvcxResult>> { - get_main_profile().try_inject_wallet() -} - -pub fn get_main_wallet() -> LibvcxResult> { - get_main_profile().inject_wallet() +pub fn get_main_ledger_write() -> LibvcxResult> { + let ledger = GLOBAL_LEDGER_INDY_WRITE.read()?; + ledger.as_ref().cloned().ok_or_else(|| { + LibvcxError::from_msg( + LibvcxErrorKind::NotReady, + "Indy ledger write is not initialized", + ) + }) } -pub fn get_main_anoncreds() -> LibvcxResult> { - get_main_profile().inject_anoncreds() +pub fn update_taa_configuration(taa_options: TxnAuthrAgrmtOptions) -> LibvcxResult<()> { + let configurator = GLOBAL_LEDGER_INDY_WRITE.read()?; + match configurator.as_ref() { + None => Err(LibvcxError::from_msg( + LibvcxErrorKind::NotReady, + "Ledger is not initialized", + ))?, + Some(configurator) => configurator.set_txn_author_agreement_options(taa_options)?, + }; + Ok(()) } -pub fn get_main_indy_ledger_read() -> LibvcxResult> { - get_main_profile().inject_indy_ledger_read() -} - -pub fn get_main_indy_ledger_write() -> LibvcxResult> { - get_main_profile().inject_indy_ledger_write() -} - -pub fn get_main_anoncreds_ledger_read() -> LibvcxResult> { - get_main_profile().inject_anoncreds_ledger_read() -} - -pub fn get_main_anoncreds_ledger_write() -> LibvcxResult> { - get_main_profile().inject_anoncreds_ledger_write() +pub fn get_taa_configuration() -> LibvcxResult> { + let configurator = GLOBAL_LEDGER_INDY_WRITE.read()?; + match configurator.as_ref() { + None => Err(LibvcxError::from_msg( + LibvcxErrorKind::NotReady, + "Ledger is not initialized", + ))?, + Some(configurator) => configurator + .get_txn_author_agreement_options() + .map_err(|err| err.into()), + } } diff --git a/libvcx_core/src/api_vcx/api_global/settings.rs b/libvcx_core/src/api_vcx/api_global/settings.rs deleted file mode 100644 index 61987ff73f..0000000000 --- a/libvcx_core/src/api_vcx/api_global/settings.rs +++ /dev/null @@ -1,23 +0,0 @@ -use aries_vcx::{ - agency_client::testing::mocking::enable_agency_mocks, - aries_vcx_core::wallet::indy::IssuerConfig, - global::{ - settings, - settings::{aries_vcx_enable_indy_mocks, init_issuer_config}, - }, -}; - -use crate::errors::{error::LibvcxResult, mapping_from_ariesvcx::map_ariesvcx_result}; - -pub fn enable_mocks() -> LibvcxResult<()> { - enable_agency_mocks(); - map_ariesvcx_result(aries_vcx_enable_indy_mocks()) -} - -pub fn get_config_value(key: &str) -> LibvcxResult { - map_ariesvcx_result(settings::get_config_value(key)) -} - -pub fn settings_init_issuer_config(issuer_config: &IssuerConfig) -> LibvcxResult<()> { - map_ariesvcx_result(init_issuer_config(&issuer_config.institution_did)) -} diff --git a/libvcx_core/src/api_vcx/api_global/state.rs b/libvcx_core/src/api_vcx/api_global/state.rs index 3c985b9cd8..075ecadfc9 100644 --- a/libvcx_core/src/api_vcx/api_global/state.rs +++ b/libvcx_core/src/api_vcx/api_global/state.rs @@ -1,5 +1,3 @@ -use aries_vcx::global::settings::reset_config_values_ariesvcx; - use crate::api_vcx::api_global::{ agency_client::reset_main_agency_client, pool::{close_main_pool, reset_ledger_components}, @@ -20,7 +18,6 @@ pub fn state_vcx_shutdown() { crate::api_vcx::api_handle::disclosed_proof::release_all(); crate::api_vcx::api_handle::credential::release_all(); - let _ = reset_config_values_ariesvcx(); reset_main_agency_client(); match reset_ledger_components() { Ok(_) => {} @@ -29,68 +26,3 @@ pub fn state_vcx_shutdown() { } } } - -#[cfg(test)] -pub mod tests { - use aries_vcx::utils::{ - devsetup::SetupMocks, - mockdata::{ - mockdata_credex::ARIES_CREDENTIAL_OFFER, - mockdata_proof::ARIES_PROOF_REQUEST_PRESENTATION, - }, - }; - - use crate::api_vcx::{ - api_global::{profile::get_main_wallet, state::state_vcx_shutdown}, - api_handle::{ - credential, credential::credential_create_with_offer, credential_def, disclosed_proof, - disclosed_proof::create_with_proof_request, issuer_credential, mediated_connection, - proof, schema, schema::create_and_publish_schema, - }, - }; - - #[tokio::test] - async fn test_shutdown() { - let _setup = SetupMocks::init(); - - let data = r#"["name","male"]"#; - let connection = - mediated_connection::test_utils::build_test_connection_inviter_invited().await; - let credential_def = credential_def::create( - "SID".to_string(), - "id".to_string(), - "tag".to_string(), - false, - ) - .await - .unwrap(); - let issuer_credential = - issuer_credential::issuer_credential_create("1".to_string()).unwrap(); - let proof = proof::create_proof( - "1".to_string(), - "[]".to_string(), - "[]".to_string(), - r#"{"support_revocation":false}"#.to_string(), - "Optional".to_owned(), - ) - .await - .unwrap(); - let schema = - create_and_publish_schema("5", "name".to_string(), "0.1".to_string(), data.to_string()) - .await - .unwrap(); - let disclosed_proof = - create_with_proof_request("id", ARIES_PROOF_REQUEST_PRESENTATION).unwrap(); - let credential = credential_create_with_offer("name", ARIES_CREDENTIAL_OFFER).unwrap(); - - state_vcx_shutdown(); - assert!(!mediated_connection::is_valid_handle(connection)); - assert!(!issuer_credential::is_valid_handle(issuer_credential)); - assert!(!schema::is_valid_handle(schema)); - assert!(!proof::is_valid_handle(proof)); - assert!(!credential_def::is_valid_handle(credential_def)); - assert!(!credential::is_valid_handle(credential)); - assert!(!disclosed_proof::is_valid_handle(disclosed_proof)); - assert!(get_main_wallet().is_err()); - } -} diff --git a/libvcx_core/src/api_vcx/api_global/wallet.rs b/libvcx_core/src/api_vcx/api_global/wallet.rs index 22a34b4b88..15cfda7116 100644 --- a/libvcx_core/src/api_vcx/api_global/wallet.rs +++ b/libvcx_core/src/api_vcx/api_global/wallet.rs @@ -3,44 +3,38 @@ use std::{ sync::{Arc, RwLock}, }; -#[cfg(feature = "anoncreds_credx")] -use aries_vcx::aries_vcx_core::anoncreds::credx_anoncreds::IndyCredxAnonCreds; -#[cfg(feature = "anoncreds_vdrtools")] -use aries_vcx::aries_vcx_core::anoncreds::indy_anoncreds::IndySdkAnonCreds; use aries_vcx::{ aries_vcx_core::{ - anoncreds::base_anoncreds::BaseAnonCreds, + anoncreds::{base_anoncreds::BaseAnonCreds, credx_anoncreds::IndyCredxAnonCreds}, wallet, wallet::{ base_wallet::BaseWallet, indy::{ internal::{close_search_wallet, fetch_next_records_wallet, open_search_wallet}, - wallet::import, + wallet::{close_wallet, create_indy_wallet, import, open_wallet}, IndySdkWallet, IssuerConfig, RestoreWalletConfigs, WalletConfig, }, + structs_io::UnpackMessageOutput, }, SearchHandle, WalletHandle, }, - common::signing::unpack_message_to_string, global::settings::DEFAULT_LINK_SECRET_ALIAS, protocols::mediated_connection::pairwise_info::PairwiseInfo, }; use crate::{ api_vcx::api_global::profile::{ - get_main_anoncreds, get_main_indy_ledger_write, get_main_wallet, try_get_main_wallet, + get_main_anoncreds, get_main_ledger_write, get_main_wallet, try_get_main_wallet, }, errors::{ - error::LibvcxResult, mapping_from_ariesvcx::map_ariesvcx_result, + error::{LibvcxError, LibvcxErrorKind, LibvcxResult}, + mapping_from_ariesvcx::map_ariesvcx_result, mapping_from_ariesvcxcore::map_ariesvcx_core_result, }, }; -lazy_static! { - pub static ref GLOBAL_BASE_WALLET: RwLock>> = RwLock::new(None); - pub static ref GLOBAL_BASE_ANONCREDS: RwLock>> = - RwLock::new(None); -} +pub static GLOBAL_BASE_WALLET: RwLock>> = RwLock::new(None); +pub static GLOBAL_BASE_ANONCREDS: RwLock>> = RwLock::new(None); pub fn get_main_wallet_handle() -> LibvcxResult { get_main_wallet().map(|wallet| wallet.get_wallet_handle()) @@ -53,26 +47,14 @@ pub async fn export_main_wallet(path: &str, backup_key: &str) -> LibvcxResult<() ) } -fn build_component_base_wallet(wallet_handle: WalletHandle) -> Arc { +fn build_component_base_wallet(wallet_handle: WalletHandle) -> Arc { Arc::new(IndySdkWallet::new(wallet_handle)) } #[allow(unreachable_code)] #[allow(clippy::needless_return)] -fn build_component_anoncreds(base_wallet: Arc) -> Arc { - #[cfg(feature = "anoncreds_vdrtools")] - { - let wallet_handle = base_wallet.get_wallet_handle(); - return Arc::new(IndySdkAnonCreds::new(wallet_handle)); - } - #[cfg(feature = "anoncreds_credx")] - { - return Arc::new(IndyCredxAnonCreds::new(Arc::clone(&base_wallet))); - } - #[cfg(not(any(feature = "anoncreds_vdrtools", feature = "anoncreds_credx")))] - { - compile_error!("No anoncreds implementation enabled by feature flag upon build"); - } +fn build_component_anoncreds(base_wallet: Arc) -> Arc { + Arc::new(IndyCredxAnonCreds::new(base_wallet.clone())) } fn setup_global_wallet(wallet_handle: WalletHandle) -> LibvcxResult<()> { @@ -81,7 +63,7 @@ fn setup_global_wallet(wallet_handle: WalletHandle) -> LibvcxResult<()> { let mut b_wallet = GLOBAL_BASE_WALLET.write()?; *b_wallet = Some(base_wallet_impl.clone()); // anoncreds - let base_anoncreds_impl: Arc = build_component_anoncreds(base_wallet_impl); + let base_anoncreds_impl = build_component_anoncreds(base_wallet_impl); let mut b_anoncreds = GLOBAL_BASE_ANONCREDS.write()?; *b_anoncreds = Some(base_anoncreds_impl); Ok(()) @@ -157,8 +139,8 @@ pub async fn replace_did_keys_start(did: &str) -> LibvcxResult { pub async fn rotate_verkey_apply(did: &str, temp_vk: &str) -> LibvcxResult<()> { map_ariesvcx_result( aries_vcx::common::keys::rotate_verkey_apply( - &get_main_wallet()?, - &get_main_indy_ledger_write()?, + get_main_wallet()?.as_ref(), + get_main_ledger_write()?.as_ref(), did, temp_vk, ) @@ -166,9 +148,9 @@ pub async fn rotate_verkey_apply(did: &str, temp_vk: &str) -> LibvcxResult<()> { ) } -pub async fn wallet_unpack_message_to_string(payload: &[u8]) -> LibvcxResult { +pub async fn wallet_unpack_message(payload: &[u8]) -> LibvcxResult { let wallet = get_main_wallet()?; - map_ariesvcx_result(unpack_message_to_string(&wallet, payload).await) + map_ariesvcx_core_result(wallet.unpack_message(payload).await) } pub async fn wallet_create_and_store_did(seed: Option<&str>) -> LibvcxResult { @@ -279,12 +261,36 @@ pub async fn wallet_import(config: &RestoreWalletConfigs) -> LibvcxResult<()> { map_ariesvcx_core_result(import(config).await) } +pub async fn wallet_migrate(wallet_config: &WalletConfig) -> LibvcxResult<()> { + let src_wallet_handle = get_main_wallet_handle()?; + info!("Assuring target wallet exists."); + create_indy_wallet(wallet_config).await?; + info!("Opening target wallet."); + let dest_wallet_handle = open_wallet(wallet_config).await?; + info!("Target wallet is ready."); + + let migration_res = wallet_migrator::migrate_wallet( + src_wallet_handle, + dest_wallet_handle, + wallet_migrator::vdrtools2credx::migrate_any_record, + ) + .await; + + info!("Closing source and target wallets"); + close_wallet(src_wallet_handle).await.ok(); + close_wallet(dest_wallet_handle).await.ok(); + + migration_res.map_err(|e| LibvcxError::from_msg(LibvcxErrorKind::WalletMigrationFailed, e)) +} + +#[allow(clippy::unwrap_used)] pub mod test_utils { use aries_vcx::{ aries_vcx_core::wallet::indy::WalletConfig, global::settings::{DEFAULT_WALLET_BACKUP_KEY, DEFAULT_WALLET_KEY, WALLET_KDF_RAW}, utils::devsetup::TempFile, }; + use aries_vcx_core::wallet::base_wallet::BaseWallet; use crate::{ api_vcx::api_global::{ @@ -375,6 +381,29 @@ pub mod tests { errors::error::{LibvcxErrorKind, LibvcxResult}, }; + #[tokio::test] + async fn test_wallet_migrate() { + let wallet_name = format!("test_create_wallet_{}", uuid::Uuid::new_v4()); + let config: WalletConfig = serde_json::from_value(json!({ + "wallet_name": wallet_name, + "wallet_key": DEFAULT_WALLET_KEY, + "wallet_key_derivation": WALLET_KDF_RAW + })) + .unwrap(); + + create_and_open_as_main_wallet(&config).await.unwrap(); + + let wallet_name = format!("test_migrate_wallet_{}", uuid::Uuid::new_v4()); + let new_config: WalletConfig = serde_json::from_value(json!({ + "wallet_name": wallet_name, + "wallet_key": DEFAULT_WALLET_KEY, + "wallet_key_derivation": WALLET_KDF_RAW + })) + .unwrap(); + + super::wallet_migrate(&new_config).await.unwrap(); + } + #[tokio::test] async fn test_wallet_create() { let _setup = SetupEmpty::init(); diff --git a/libvcx_core/src/api_vcx/api_handle/connection.rs b/libvcx_core/src/api_vcx/api_handle/connection.rs index 10acb3f92e..e7d0eac19a 100644 --- a/libvcx_core/src/api_vcx/api_handle/connection.rs +++ b/libvcx_core/src/api_vcx/api_handle/connection.rs @@ -15,7 +15,7 @@ use rand::Rng; use url::Url; use crate::{ - api_vcx::api_global::profile::{get_main_indy_ledger_read, get_main_wallet}, + api_vcx::api_global::profile::{get_main_ledger_read, get_main_wallet}, errors::error::{LibvcxError, LibvcxErrorKind, LibvcxResult}, }; @@ -138,14 +138,14 @@ where // ----------------------------- CONSTRUCTORS ------------------------------------ pub async fn create_inviter(pw_info: Option) -> LibvcxResult { trace!("create_inviter >>>"); - let pw_info = pw_info.unwrap_or(PairwiseInfo::create(&get_main_wallet()?).await?); + let pw_info = pw_info.unwrap_or(PairwiseInfo::create(get_main_wallet()?.as_ref()).await?); let con = InviterConnection::new_inviter("".to_owned(), pw_info); add_connection(con) } pub async fn create_invitee(_invitation: &str) -> LibvcxResult { trace!("create_invitee >>>"); - let pairwise_info = PairwiseInfo::create(&get_main_wallet()?).await?; + let pairwise_info = PairwiseInfo::create(get_main_wallet()?.as_ref()).await?; let con = InviteeConnection::new_invitee("".to_owned(), pairwise_info); add_connection(con) } @@ -236,10 +236,10 @@ pub fn get_invitation(handle: u32) -> LibvcxResult { pub async fn process_invite(handle: u32, invitation: &str) -> LibvcxResult<()> { trace!("process_invite >>>"); - let ledger = get_main_indy_ledger_read()?; + let ledger = get_main_ledger_read()?; let invitation = deserialize(invitation)?; let con = get_cloned_connection(&handle)? - .accept_invitation(&ledger, invitation) + .accept_invitation(ledger.as_ref(), invitation) .await?; insert_connection(handle, con) @@ -282,13 +282,12 @@ pub async fn process_request( let con = con .handle_request( - &wallet, + wallet.as_ref(), request, Url::from_str(&service_endpoint).map_err(|err| { LibvcxError::from_msg(LibvcxErrorKind::InvalidUrl, err.to_string()) })?, routing_keys, - &HttpClient, ) .await?; @@ -301,7 +300,7 @@ pub async fn process_response(handle: u32, response: &str) -> LibvcxResult<()> { let con = get_cloned_connection(&handle)?; let response = deserialize(response)?; let con = con - .handle_response(&get_main_wallet()?, response, &HttpClient) + .handle_response(get_main_wallet()?.as_ref(), response) .await?; insert_connection(handle, con) @@ -341,7 +340,7 @@ pub async fn send_response(handle: u32) -> LibvcxResult<()> { let con = get_cloned_connection(&handle)?; let response = con.get_connection_response_msg(); - con.send_message(&get_main_wallet()?, &response.into(), &HttpClient) + con.send_message(get_main_wallet()?.as_ref(), &response.into(), &HttpClient) .await?; insert_connection(handle, con) } @@ -358,7 +357,7 @@ pub async fn send_request( .map_err(|err| LibvcxError::from_msg(LibvcxErrorKind::InvalidUrl, err.to_string()))?; let con = con.prepare_request(url, routing_keys).await?; let request = con.get_request().clone(); - con.send_message(&get_main_wallet()?, &request.into(), &HttpClient) + con.send_message(get_main_wallet()?.as_ref(), &request.into(), &HttpClient) .await?; insert_connection(handle, con) @@ -368,8 +367,12 @@ pub async fn send_ack(handle: u32) -> LibvcxResult<()> { trace!("send_ack >>>"); let con = get_cloned_connection(&handle)?; - con.send_message(&get_main_wallet()?, &con.get_ack().into(), &HttpClient) - .await?; + con.send_message( + get_main_wallet()?.as_ref(), + &con.get_ack().into(), + &HttpClient, + ) + .await?; Ok(()) } @@ -378,7 +381,7 @@ pub async fn send_generic_message(handle: u32, content: String) -> LibvcxResult< let message = serde_json::from_str(&content)?; let con = get_cloned_generic_connection(&handle)?; - con.send_message(&get_main_wallet()?, &message, &HttpClient) + con.send_message(get_main_wallet()?.as_ref(), &message, &HttpClient) .await?; Ok(()) } diff --git a/libvcx_core/src/api_vcx/api_handle/credential.rs b/libvcx_core/src/api_vcx/api_handle/credential.rs index fca70c9cb8..b13052c91c 100644 --- a/libvcx_core/src/api_vcx/api_handle/credential.rs +++ b/libvcx_core/src/api_vcx/api_handle/credential.rs @@ -1,6 +1,5 @@ use aries_vcx::{ agency_client::testing::mocking::AgencyMockDecrypted, - global::settings::indy_mocks_enabled, handlers::issuance::{holder::Holder, mediated_holder::holder_find_message_to_handle}, messages::{ msg_fields::protocols::cred_issuance::{ @@ -18,8 +17,11 @@ use serde_json; use crate::{ api_vcx::{ - api_global::profile::{get_main_anoncreds, get_main_anoncreds_ledger_read}, - api_handle::{mediated_connection, object_cache::ObjectCache}, + api_global::profile::{get_main_anoncreds, get_main_ledger_read}, + api_handle::{ + mediated_connection::{self, send_message}, + object_cache::ObjectCache, + }, }, errors::error::{LibvcxError, LibvcxErrorKind, LibvcxResult}, }; @@ -161,8 +163,8 @@ pub async fn update_state( Some(aries_msg) => { credential .process_aries_msg( - &get_main_anoncreds_ledger_read()?, - &get_main_anoncreds()?, + get_main_ledger_read()?.as_ref(), + get_main_anoncreds()?.as_ref(), aries_msg.clone(), ) .await?; @@ -173,9 +175,7 @@ pub async fn update_state( match credential.get_final_message()? { None => {} Some(msg_response) => { - let send_message = - mediated_connection::send_message_closure(connection_handle).await?; - send_message(msg_response).await?; + send_message(connection_handle, msg_response).await?; } } } @@ -224,7 +224,7 @@ pub fn get_rev_reg_id(handle: u32) -> LibvcxResult { pub async fn is_revokable(handle: u32) -> LibvcxResult { let credential = HANDLE_MAP.get_cloned(handle)?; credential - .is_revokable(&get_main_anoncreds_ledger_read()?) + .is_revokable(get_main_ledger_read()?.as_ref()) .await .map_err(|err| err.into()) } @@ -235,7 +235,9 @@ pub async fn delete_credential(handle: u32) -> LibvcxResult<()> { handle ); let credential = HANDLE_MAP.get_cloned(handle)?; - credential.delete_credential(&get_main_anoncreds()?).await?; + credential + .delete_credential(get_main_anoncreds()?.as_ref()) + .await?; HANDLE_MAP.release(handle) } @@ -263,15 +265,14 @@ pub async fn send_credential_request(handle: u32, connection_handle: u32) -> Lib ); let mut credential = HANDLE_MAP.get_cloned(handle)?; let my_pw_did = mediated_connection::get_pw_did(connection_handle)?; - let send_message = mediated_connection::send_message_closure(connection_handle).await?; let msg_response = credential .prepare_credential_request( - &get_main_anoncreds_ledger_read()?, - &get_main_anoncreds()?, + get_main_ledger_read()?.as_ref(), + get_main_anoncreds()?.as_ref(), my_pw_did, ) .await?; - send_message(msg_response).await?; + send_message(connection_handle, msg_response).await?; HANDLE_MAP.insert(handle, credential) } @@ -282,10 +283,6 @@ async fn get_credential_offer_msg(connection_handle: u32, msg_id: &str) -> Libvc msg_id ); - if indy_mocks_enabled() { - AgencyMockDecrypted::set_next_decrypted_response(GET_MESSAGES_DECRYPTED_RESPONSE); - AgencyMockDecrypted::set_next_decrypted_message(ARIES_CREDENTIAL_OFFER); - } let credential_offer = match mediated_connection::get_message_by_id(connection_handle, msg_id).await { Ok(message) => match message { @@ -401,9 +398,8 @@ pub async fn decline_offer( comment: Option<&str>, ) -> LibvcxResult<()> { let mut credential = HANDLE_MAP.get_cloned(handle)?; - let send_message = mediated_connection::send_message_closure(connection_handle).await?; let problem_report = credential.decline_offer(comment)?; - send_message(problem_report.into()).await?; + send_message(connection_handle, problem_report.into()).await?; HANDLE_MAP.insert(handle, credential) } @@ -423,21 +419,19 @@ pub mod tests { mockdata_credex, mockdata_credex::{ ARIES_CREDENTIAL_OFFER, ARIES_CREDENTIAL_OFFER_JSON_FORMAT, - ARIES_CREDENTIAL_RESPONSE, CREDENTIAL_SM_FINISHED, + CREDENTIAL_SM_FINISHED, }, }, }, }; use super::*; - #[cfg(test)] - use crate::api_vcx::api_handle::credential::tests_utils::BAD_CREDENTIAL_OFFER; - use crate::api_vcx::api_handle::credential::{ - credential_create_with_offer, get_attributes, get_credential, send_credential_request, - }; - #[cfg(test)] - use crate::api_vcx::api_handle::mediated_connection::test_utils::{ - build_test_connection_invitee_completed, build_test_connection_inviter_requested, + use crate::api_vcx::api_handle::{ + credential::{ + credential_create_with_offer, get_attributes, get_credential, + tests_utils::BAD_CREDENTIAL_OFFER, + }, + mediated_connection::test_utils::build_test_connection_invitee_completed, }; async fn _get_offer(handle: u32) -> String { @@ -520,77 +514,6 @@ pub mod tests { assert_eq!(cred_original_serialized, cred_restored_serialized); } - #[tokio::test] - async fn full_credential_test() { - let _setup = SetupMocks::init(); - - info!("full_credential_test:: going to build_test_connection"); - let handle_conn = build_test_connection_inviter_requested().await; - - info!("full_credential_test:: going to _get_offer"); - let offer = _get_offer(handle_conn).await; - - info!("full_credential_test:: going to credential_create_with_offer"); - let handle_cred = credential_create_with_offer("TEST_CREDENTIAL", &offer).unwrap(); - assert_eq!( - HolderState::OfferReceived as u32, - get_state(handle_cred).unwrap() - ); - - info!("full_credential_test:: going get offered attributes from offer received state"); - let offer_attrs: String = get_attributes(handle_cred).unwrap(); - info!( - "full_credential_test:: obtained offered attributes: {}", - offer_attrs - ); - let offer_attrs: serde_json::Value = serde_json::from_str(&offer_attrs).unwrap(); - let offer_attrs_expected: serde_json::Value = - serde_json::from_str(mockdata_credex::OFFERED_ATTRIBUTES).unwrap(); - assert_eq!(offer_attrs, offer_attrs_expected); - - info!("full_credential_test:: going to send_credential_request"); - send_credential_request(handle_cred, handle_conn) - .await - .unwrap(); - assert_eq!( - HolderState::RequestSet as u32, - get_state(handle_cred).unwrap() - ); - - AgencyMockDecrypted::set_next_decrypted_response(GET_MESSAGES_DECRYPTED_RESPONSE); - AgencyMockDecrypted::set_next_decrypted_message(ARIES_CREDENTIAL_RESPONSE); - - info!("full_credential_test:: going to update_state, should receive credential"); - update_state(handle_cred, None, handle_conn).await.unwrap(); - assert_eq!( - get_state(handle_cred).unwrap(), - HolderState::Finished as u32 - ); - - info!("full_credential_test:: going to get_credential"); - let msg = get_credential(handle_cred).unwrap(); - info!("full_credential_test:: get_credential returned {}", msg); - let msg_value: serde_json::Value = serde_json::from_str(&msg).unwrap(); - - info!( - "full_credential_test:: going to deserialize credential: {:?}", - msg_value - ); - let _credential_struct: IssueCredentialV1 = - serde_json::from_str(msg_value.to_string().as_str()).unwrap(); - - info!("full_credential_test:: going get offered attributes from final state"); - let offer_attrs: String = get_attributes(handle_cred).unwrap(); - info!( - "full_credential_test:: obtained offered attributes: {}", - offer_attrs - ); - let offer_attrs: serde_json::Value = serde_json::from_str(&offer_attrs).unwrap(); - let offer_attrs_expected: serde_json::Value = - serde_json::from_str(mockdata_credex::OFFERED_ATTRIBUTES).unwrap(); - assert_eq!(offer_attrs, offer_attrs_expected); - } - #[tokio::test] async fn test_get_attributes_json_attach() { let _setup = SetupMocks::init(); diff --git a/libvcx_core/src/api_vcx/api_handle/credential_def.rs b/libvcx_core/src/api_vcx/api_handle/credential_def.rs index 9dd67bf1e5..a30ac62e1e 100644 --- a/libvcx_core/src/api_vcx/api_handle/credential_def.rs +++ b/libvcx_core/src/api_vcx/api_handle/credential_def.rs @@ -1,18 +1,10 @@ -use aries_vcx::{ - common::primitives::credential_definition::{ - CredentialDef, CredentialDefConfigBuilder, PublicEntityStateType, - }, - global::settings::CONFIG_INSTITUTION_DID, +use aries_vcx::common::primitives::credential_definition::{ + CredentialDef, CredentialDefConfigBuilder, PublicEntityStateType, }; use crate::{ api_vcx::{ - api_global::{ - profile::{ - get_main_anoncreds, get_main_anoncreds_ledger_read, get_main_anoncreds_ledger_write, - }, - settings::get_config_value, - }, + api_global::profile::{get_main_anoncreds, get_main_ledger_read, get_main_ledger_write}, api_handle::object_cache::ObjectCache, }, errors::error::{LibvcxError, LibvcxErrorKind, LibvcxResult}, @@ -24,12 +16,12 @@ lazy_static! { } pub async fn create( + issuer_did: String, source_id: String, schema_id: String, tag: String, support_revocation: bool, ) -> LibvcxResult { - let issuer_did = get_config_value(CONFIG_INSTITUTION_DID)?; let config = CredentialDefConfigBuilder::default() .issuer_did(issuer_did) .schema_id(schema_id) @@ -44,9 +36,10 @@ pub async fn create( ), ) })?; + let cred_def = CredentialDef::create( - &get_main_anoncreds_ledger_read()?, - &get_main_anoncreds()?, + get_main_ledger_read()?.as_ref(), + get_main_anoncreds()?.as_ref(), source_id, config, support_revocation, @@ -61,8 +54,8 @@ pub async fn publish(handle: u32) -> LibvcxResult<()> { if !cd.was_published() { cd = cd .publish_cred_def( - &get_main_anoncreds_ledger_read()?, - &get_main_anoncreds_ledger_write()?, + get_main_ledger_read()?.as_ref(), + get_main_ledger_write()?.as_ref(), ) .await?; } else { @@ -105,7 +98,7 @@ pub fn release_all() { pub async fn update_state(handle: u32) -> LibvcxResult { let mut cd = CREDENTIALDEF_MAP.get_cloned(handle)?; - let res = cd.update_state(&get_main_anoncreds_ledger_read()?).await?; + let res = cd.update_state(get_main_ledger_read()?.as_ref()).await?; CREDENTIALDEF_MAP.insert(handle, cd)?; Ok(res) } @@ -122,179 +115,27 @@ pub fn check_is_published(handle: u32) -> LibvcxResult { pub mod tests { use std::{thread::sleep, time::Duration}; - use aries_vcx::{ - aries_vcx_core::ledger::indy::pool::test_utils::get_temp_dir_path, - common::test_utils::create_and_write_test_schema, - global::settings::CONFIG_INSTITUTION_DID, - utils, - utils::{constants::SCHEMA_ID, devsetup::SetupMocks}, - }; - - use super::*; - use crate::api_vcx::{ - api_global::settings::get_config_value, - api_handle::{revocation_registry, revocation_registry::RevocationRegistryConfig, schema}, - utils::devsetup::SetupGlobalsWalletPoolAgency, - }; - - #[tokio::test] - async fn test_vcx_credentialdef_release() { - let _setup = SetupMocks::init(); - let schema_handle = schema::test_utils::create_schema_real().await; - sleep(Duration::from_secs(1)); + use aries_vcx::global::settings::DEFAULT_DID; - let schema_id = schema::get_schema_id(schema_handle).unwrap(); - let _issuer_did = get_config_value(CONFIG_INSTITUTION_DID).unwrap(); - let cred_def_handle = create("1".to_string(), schema_id, "tag_1".to_string(), false) - .await - .unwrap(); - release(cred_def_handle).unwrap(); - assert_eq!( - to_string(cred_def_handle).unwrap_err().kind, - LibvcxErrorKind::InvalidHandle - ) - } + use super::{create, publish}; + use crate::api_vcx::api_handle::schema; - // TODO: Get rid of this - #[cfg(test)] pub async fn create_and_publish_nonrevocable_creddef() -> (u32, u32) { let schema_handle = schema::test_utils::create_schema_real().await; sleep(Duration::from_secs(1)); let schema_id = schema::get_schema_id(schema_handle).unwrap(); - let _issuer_did = get_config_value(CONFIG_INSTITUTION_DID).unwrap(); - let cred_def_handle = create("1".to_string(), schema_id, "tag_1".to_string(), false) - .await - .unwrap(); - - publish(cred_def_handle).await.unwrap(); - (schema_handle, cred_def_handle) - } - - #[tokio::test] - async fn test_create_cred_def() { - let _setup = SetupMocks::init(); - let (_, _) = create_and_publish_nonrevocable_creddef().await; - } - - #[tokio::test] - #[ignore] - async fn create_revocable_cred_def_and_check_tails_location() { - SetupGlobalsWalletPoolAgency::run(|setup| async move { - let schema = create_and_write_test_schema( - &get_main_anoncreds().unwrap(), - &get_main_anoncreds_ledger_write().unwrap(), - &setup.institution_did, - utils::constants::DEFAULT_SCHEMA_ATTRS, - ) - .await; - let issuer_did = get_config_value(CONFIG_INSTITUTION_DID).unwrap(); - - let path = get_temp_dir_path(); - - let handle_cred_def = create( - "1".to_string(), - schema.schema_id.clone(), - "tag1".to_string(), - true, - ) - .await - .unwrap(); - publish(handle_cred_def).await.unwrap(); - - let rev_reg_config = RevocationRegistryConfig { - issuer_did, - cred_def_id: get_cred_def_id(handle_cred_def).unwrap(), - tag: 1, - tails_dir: String::from(path.to_str().unwrap()), - max_creds: 2, - }; - let handle_rev_reg = revocation_registry::create(rev_reg_config).await.unwrap(); - let tails_url = utils::constants::TEST_TAILS_URL; - - revocation_registry::publish(handle_rev_reg, tails_url) - .await - .unwrap(); - let rev_reg_def = revocation_registry::get_rev_reg_def(handle_rev_reg).unwrap(); - assert_eq!(rev_reg_def.value.tails_location, tails_url); - }) - .await; - } - - #[tokio::test] - #[ignore] - async fn test_create_credential_def_real() { - SetupGlobalsWalletPoolAgency::run(|_setup| async move { - let (_, handle) = create_and_publish_nonrevocable_creddef().await; - - let _source_id = get_source_id(handle).unwrap(); - let _cred_def_id = get_cred_def_id(handle).unwrap(); - let _schema_json = to_string(handle).unwrap(); - }) - .await; - } - - #[tokio::test] - async fn test_to_string_succeeds() { - let _setup = SetupMocks::init(); - - let (_, cred_def_handle) = create_and_publish_nonrevocable_creddef().await; - - let credential_string = to_string(cred_def_handle).unwrap(); - let credential_values: serde_json::Value = - serde_json::from_str(&credential_string).unwrap(); - assert_eq!(credential_values["version"].clone(), "1.0"); - } - - #[tokio::test] - async fn test_from_string_succeeds() { - let _setup = SetupMocks::init(); - - let (_, cred_def_handle) = create_and_publish_nonrevocable_creddef().await; - let credentialdef_data = to_string(cred_def_handle).unwrap(); - assert!(!credentialdef_data.is_empty()); - release(cred_def_handle).unwrap(); - - let new_handle = from_string(&credentialdef_data).unwrap(); - let new_credentialdef_data = to_string(new_handle).unwrap(); - - let credentialdef1: CredentialDef = - CredentialDef::from_string(&credentialdef_data).unwrap(); - let credentialdef2: CredentialDef = - CredentialDef::from_string(&new_credentialdef_data).unwrap(); - - assert_eq!(credentialdef1, credentialdef2); - assert_eq!( - from_string("{}").unwrap_err().kind(), - LibvcxErrorKind::CreateCredDef - ); - } - - #[tokio::test] - async fn test_release_all() { - let _setup = SetupMocks::init(); - - let _issuer_did = String::from("4fUDR9R7fjwELRvH9JT6HH"); - let h1 = create( - "SourceId".to_string(), - SCHEMA_ID.to_string(), - "tag".to_string(), + let cred_def_handle = create( + DEFAULT_DID.to_owned(), + "1".to_string(), + schema_id, + "tag_1".to_string(), false, ) .await .unwrap(); - let h2 = create( - "SourceId".to_string(), - SCHEMA_ID.to_string(), - "tag".to_string(), - false, - ) - .await - .unwrap(); - - release_all(); - assert!(!is_valid_handle(h1)); - assert!(!is_valid_handle(h2)); + publish(cred_def_handle).await.unwrap(); + (schema_handle, cred_def_handle) } } diff --git a/libvcx_core/src/api_vcx/api_handle/disclosed_proof.rs b/libvcx_core/src/api_vcx/api_handle/disclosed_proof.rs index 189051d46d..af5b9cd576 100644 --- a/libvcx_core/src/api_vcx/api_handle/disclosed_proof.rs +++ b/libvcx_core/src/api_vcx/api_handle/disclosed_proof.rs @@ -1,6 +1,4 @@ use aries_vcx::{ - agency_client::testing::mocking::AgencyMockDecrypted, - global::settings::indy_mocks_enabled, handlers::proof_presentation::{ mediated_prover::prover_find_message_to_handle, prover::Prover, }, @@ -8,16 +6,13 @@ use aries_vcx::{ msg_fields::protocols::present_proof::{request::RequestPresentation, PresentProof}, AriesMessage, }, - utils::{ - constants::GET_MESSAGES_DECRYPTED_RESPONSE, - mockdata::mockdata_proof::ARIES_PROOF_REQUEST_PRESENTATION, - }, }; use serde_json; +use super::mediated_connection::send_message; use crate::{ api_vcx::{ - api_global::profile::{get_main_anoncreds, get_main_anoncreds_ledger_read}, + api_global::profile::{get_main_anoncreds, get_main_ledger_read}, api_handle::{mediated_connection, object_cache::ObjectCache}, }, errors::error::{LibvcxError, LibvcxErrorKind, LibvcxResult}, @@ -183,9 +178,8 @@ pub fn get_presentation_msg(handle: u32) -> LibvcxResult { pub async fn send_proof(handle: u32, connection_handle: u32) -> LibvcxResult<()> { let mut proof = HANDLE_MAP.get_cloned(handle)?; - let send_message = mediated_connection::send_message_closure(connection_handle).await?; let message = proof.mark_presentation_sent()?; - send_message(message).await?; + send_message(connection_handle, message).await?; HANDLE_MAP.insert(handle, proof) } @@ -202,14 +196,13 @@ pub async fn reject_proof(handle: u32, connection_handle: u32) -> LibvcxResult<( handle, connection_handle ); let mut proof = HANDLE_MAP.get_cloned(handle)?; - let send_message = mediated_connection::send_message_closure(connection_handle).await?; let message = proof .decline_presentation_request( Some(String::from("Presentation Request was rejected")), None, ) .await?; - send_message(message).await?; + send_message(connection_handle, message).await?; HANDLE_MAP.insert(handle, proof) } @@ -221,8 +214,8 @@ pub async fn generate_proof( let mut proof = HANDLE_MAP.get_cloned(handle)?; proof .generate_presentation( - &get_main_anoncreds_ledger_read()?, - &get_main_anoncreds()?, + get_main_ledger_read()?.as_ref(), + get_main_anoncreds()?.as_ref(), serde_json::from_str(credentials)?, serde_json::from_str(self_attested_attrs)?, ) @@ -237,20 +230,21 @@ pub async fn decline_presentation_request( proposal: Option<&str>, ) -> LibvcxResult<()> { let mut proof = HANDLE_MAP.get_cloned(handle)?; - let send_message = mediated_connection::send_message_closure(connection_handle).await?; let message = proof .decline_presentation_request( reason.map(|s| s.to_string()), proposal.map(|s| s.to_string()), ) .await?; - send_message(message).await?; + send_message(connection_handle, message).await?; HANDLE_MAP.insert(handle, proof) } pub async fn retrieve_credentials(handle: u32) -> LibvcxResult { let proof = HANDLE_MAP.get_cloned(handle)?; - let retrieved_creds = proof.retrieve_credentials(&get_main_anoncreds()?).await?; + let retrieved_creds = proof + .retrieve_credentials(get_main_anoncreds()?.as_ref()) + .await?; Ok(serde_json::to_string(&retrieved_creds)?) } @@ -280,11 +274,6 @@ pub fn get_thread_id(handle: u32) -> LibvcxResult { } async fn get_proof_request(connection_handle: u32, msg_id: &str) -> LibvcxResult { - if indy_mocks_enabled() { - AgencyMockDecrypted::set_next_decrypted_response(GET_MESSAGES_DECRYPTED_RESPONSE); - AgencyMockDecrypted::set_next_decrypted_message(ARIES_PROOF_REQUEST_PRESENTATION); - } - let presentation_request = { trace!( "Prover::get_presentation_request >>> connection_handle: {:?}, msg_id: {:?}", @@ -351,29 +340,14 @@ mod tests { use aries_vcx::{ utils, utils::{ - constants::{ - ARIES_PROVER_CREDENTIALS, ARIES_PROVER_SELF_ATTESTED_ATTRS, - GET_MESSAGES_DECRYPTED_RESPONSE, - }, devsetup::{SetupDefaults, SetupMocks}, - mockdata::{ - mock_settings::MockBuilder, - mockdata_proof, - mockdata_proof::{ARIES_PROOF_PRESENTATION_ACK, ARIES_PROOF_REQUEST_PRESENTATION}, - }, + mockdata::mockdata_proof::ARIES_PROOF_REQUEST_PRESENTATION, }, }; use serde_json::Value; use super::*; - #[cfg(test)] - use crate::api_vcx::api_handle::mediated_connection::test_utils::{ - build_test_connection_invitee_completed, build_test_connection_inviter_requested, - }; - use crate::aries_vcx::{ - common::proofs::proof_request::PresentationRequestData, - protocols::proof_presentation::prover::state_machine::ProverState, - }; + use crate::aries_vcx::protocols::proof_presentation::prover::state_machine::ProverState; async fn _get_proof_request_messages(connection_h: u32) -> String { let requests = get_proof_request_messages(connection_h).await.unwrap(); @@ -410,116 +384,6 @@ mod tests { ); } - #[tokio::test] - async fn test_proof_cycle() { - let _setup = SetupMocks::init(); - - let connection_h = build_test_connection_inviter_requested().await; - - AgencyMockDecrypted::set_next_decrypted_response(GET_MESSAGES_DECRYPTED_RESPONSE); - AgencyMockDecrypted::set_next_decrypted_message(ARIES_PROOF_REQUEST_PRESENTATION); - - let request = _get_proof_request_messages(connection_h).await; - - let handle_proof = create_with_proof_request("TEST_CREDENTIAL", &request).unwrap(); - assert_eq!( - ProverState::PresentationRequestReceived as u32, - get_state(handle_proof).unwrap() - ); - - let _mock_builder = - MockBuilder::init().set_mock_generate_indy_proof("{\"selected\":\"credentials\"}"); - - generate_proof(handle_proof, "{\"selected\":\"credentials\"}", "{}") - .await - .unwrap(); - send_proof(handle_proof, connection_h).await.unwrap(); - assert_eq!( - ProverState::PresentationSent as u32, - get_state(handle_proof).unwrap() - ); - - update_state( - handle_proof, - Some(ARIES_PROOF_PRESENTATION_ACK), - connection_h, - ) - .await - .unwrap(); - assert_eq!( - ProverState::Finished as u32, - get_state(handle_proof).unwrap() - ); - } - - #[tokio::test] - async fn test_proof_update_state_v2() { - let _setup = SetupMocks::init(); - - let connection_handle = build_test_connection_inviter_requested().await; - - AgencyMockDecrypted::set_next_decrypted_response(GET_MESSAGES_DECRYPTED_RESPONSE); - AgencyMockDecrypted::set_next_decrypted_message(mockdata_proof::ARIES_PRESENTATION_REQUEST); - - let request = _get_proof_request_messages(connection_handle).await; - - let handle = create_with_proof_request("TEST_CREDENTIAL", &request).unwrap(); - assert_eq!( - ProverState::PresentationRequestReceived as u32, - get_state(handle).unwrap() - ); - - generate_proof( - handle, - ARIES_PROVER_CREDENTIALS, - ARIES_PROVER_SELF_ATTESTED_ATTRS, - ) - .await - .unwrap(); - assert_eq!( - ProverState::PresentationPrepared as u32, - get_state(handle).unwrap() - ); - - send_proof(handle, connection_handle).await.unwrap(); - assert_eq!( - ProverState::PresentationSent as u32, - get_state(handle).unwrap() - ); - - mediated_connection::release(connection_handle).unwrap(); - let connection_handle = build_test_connection_inviter_requested().await; - - AgencyMockDecrypted::set_next_decrypted_response(GET_MESSAGES_DECRYPTED_RESPONSE); - AgencyMockDecrypted::set_next_decrypted_message( - mockdata_proof::ARIES_PROOF_PRESENTATION_ACK, - ); - - update_state(handle, None, connection_handle).await.unwrap(); - assert_eq!(ProverState::Finished as u32, get_state(handle).unwrap()); - } - - #[tokio::test] - async fn test_proof_reject_cycle() { - let _setup = SetupMocks::init(); - - let connection_h = build_test_connection_inviter_requested().await; - - AgencyMockDecrypted::set_next_decrypted_response(GET_MESSAGES_DECRYPTED_RESPONSE); - AgencyMockDecrypted::set_next_decrypted_message(ARIES_PROOF_REQUEST_PRESENTATION); - - let request = _get_proof_request_messages(connection_h).await; - - let handle = create_with_proof_request("TEST_CREDENTIAL", &request).unwrap(); - assert_eq!( - ProverState::PresentationRequestReceived as u32, - get_state(handle).unwrap() - ); - - reject_proof(handle, connection_h).await.unwrap(); - assert_eq!(ProverState::Failed as u32, get_state(handle).unwrap()); - } - #[tokio::test] async fn get_state_test() { let _setup = SetupMocks::init(); @@ -555,16 +419,6 @@ mod tests { ); } - #[tokio::test] - async fn test_get_proof_request() { - let _setup = SetupMocks::init(); - - let connection_h = build_test_connection_invitee_completed(); - - let request = get_proof_request(connection_h, "123").await.unwrap(); - let _request: RequestPresentation = serde_json::from_str(&request).unwrap(); - } - #[tokio::test] async fn test_deserialize_succeeds_with_self_attest_allowed() { let _setup = SetupDefaults::init(); @@ -574,25 +428,4 @@ mod tests { let serialized = to_string(handle).unwrap(); from_string(&serialized).unwrap(); } - - #[tokio::test] - async fn test_get_proof_request_attachment() { - let _setup = SetupMocks::init(); - - let connection_h = build_test_connection_inviter_requested().await; - - AgencyMockDecrypted::set_next_decrypted_response(GET_MESSAGES_DECRYPTED_RESPONSE); - AgencyMockDecrypted::set_next_decrypted_message(ARIES_PROOF_REQUEST_PRESENTATION); - - let request = _get_proof_request_messages(connection_h).await; - - let handle = create_with_proof_request("TEST_CREDENTIAL", &request).unwrap(); - assert_eq!( - ProverState::PresentationRequestReceived as u32, - get_state(handle).unwrap() - ); - - let attrs = get_proof_request_attachment(handle).unwrap(); - let _attrs: PresentationRequestData = serde_json::from_str(&attrs).unwrap(); - } } diff --git a/libvcx_core/src/api_vcx/api_handle/issuer_credential.rs b/libvcx_core/src/api_vcx/api_handle/issuer_credential.rs index 6132124e8d..6975cf1957 100644 --- a/libvcx_core/src/api_vcx/api_handle/issuer_credential.rs +++ b/libvcx_core/src/api_vcx/api_handle/issuer_credential.rs @@ -8,6 +8,7 @@ use aries_vcx::{ }; use serde_json; +use super::mediated_connection::send_message; use crate::{ api_vcx::{ api_global::profile::{get_main_anoncreds, get_main_wallet}, @@ -188,7 +189,7 @@ pub async fn build_credential_offer_msg_v2( }; credential .build_credential_offer_msg( - &get_main_anoncreds()?, + get_main_anoncreds()?.as_ref(), offer_info.clone(), comment.map(|s| s.to_string()), ) @@ -207,9 +208,8 @@ pub async fn send_credential_offer_v2( connection_handle: u32, ) -> LibvcxResult<()> { let credential = ISSUER_CREDENTIAL_MAP.get_cloned(credential_handle)?; - let send_closure = mediated_connection::send_message_closure(connection_handle).await?; let credential_offer = credential.get_credential_offer_msg()?; - send_closure(credential_offer).await?; + send_message(connection_handle, credential_offer).await?; ISSUER_CREDENTIAL_MAP.insert(credential_handle, credential)?; Ok(()) } @@ -224,7 +224,7 @@ pub async fn send_credential_offer_nonmediated( let wallet = get_main_wallet()?; let send_message: SendClosure = Box::new(|msg: AriesMessage| { - Box::pin(async move { con.send_message(&wallet, &msg, &HttpClient).await }) + Box::pin(async move { con.send_message(wallet.as_ref(), &msg, &HttpClient).await }) }); let credential_offer = credential.get_credential_offer_msg()?; send_message(credential_offer).await?; @@ -235,16 +235,17 @@ pub async fn send_credential_offer_nonmediated( pub async fn send_credential(handle: u32, connection_handle: u32) -> LibvcxResult { let mut credential = ISSUER_CREDENTIAL_MAP.get_cloned(handle)?; - credential.build_credential(&get_main_anoncreds()?).await?; - let send_closure = mediated_connection::send_message_closure(connection_handle).await?; + credential + .build_credential(get_main_anoncreds()?.as_ref()) + .await?; match credential.get_state() { IssuerState::Failed => { let problem_report = credential.get_problem_report()?; - send_closure(problem_report.into()).await?; + send_message(connection_handle, problem_report.into()).await?; } _ => { let msg_issue_credential = credential.get_msg_issue_credential()?; - send_closure(msg_issue_credential.into()).await?; + send_message(connection_handle, msg_issue_credential.into()).await?; } } let state: u32 = credential.get_state().into(); @@ -257,9 +258,11 @@ pub async fn send_credential_nonmediated(handle: u32, connection_handle: u32) -> let con = connection::get_cloned_generic_connection(&connection_handle)?; let wallet = get_main_wallet()?; let send_closure: SendClosure = Box::new(|msg: AriesMessage| { - Box::pin(async move { con.send_message(&wallet, &msg, &HttpClient).await }) + Box::pin(async move { con.send_message(wallet.as_ref(), &msg, &HttpClient).await }) }); - credential.build_credential(&get_main_anoncreds()?).await?; + credential + .build_credential(get_main_anoncreds()?.as_ref()) + .await?; match credential.get_state() { IssuerState::Failed => { let problem_report = credential.get_problem_report()?; @@ -278,7 +281,7 @@ pub async fn send_credential_nonmediated(handle: u32, connection_handle: u32) -> pub async fn revoke_credential_local(handle: u32) -> LibvcxResult<()> { let credential = ISSUER_CREDENTIAL_MAP.get_cloned(handle)?; credential - .revoke_credential_local(&get_main_anoncreds()?) + .revoke_credential_local(get_main_anoncreds()?.as_ref()) .await .map_err(|err| err.into()) } @@ -308,21 +311,9 @@ pub fn get_thread_id(handle: u32) -> LibvcxResult { #[cfg(test)] #[allow(clippy::unwrap_used)] pub mod tests { - use aries_vcx::utils::{ - constants::V3_OBJECT_SERIALIZE_VERSION, - devsetup::SetupMocks, - mockdata::{ - mockdata_credex::ARIES_CREDENTIAL_REQUEST, - mockdata_mediated_connection::ARIES_CONNECTION_ACK, - }, - }; + use aries_vcx::utils::{constants::V3_OBJECT_SERIALIZE_VERSION, devsetup::SetupMocks}; use super::*; - #[cfg(test)] - use crate::api_vcx::api_handle::credential_def::tests::create_and_publish_nonrevocable_creddef; - #[cfg(test)] - use crate::api_vcx::api_handle::mediated_connection::test_utils::build_test_connection_inviter_requested; - use crate::aries_vcx::protocols::issuance::issuer::state_machine::IssuerState; fn _issuer_credential_create() -> u32 { issuer_credential_create("1".to_string()).unwrap() @@ -360,27 +351,6 @@ pub mod tests { assert!(!string.is_empty()); } - #[tokio::test] - async fn test_send_credential_offer() { - let _setup = SetupMocks::init(); - - let connection_handle = build_test_connection_inviter_requested().await; - - let credential_handle = _issuer_credential_create(); - - let (_, cred_def_handle) = create_and_publish_nonrevocable_creddef().await; - build_credential_offer_msg_v2(credential_handle, cred_def_handle, 123, _cred_json(), None) - .await - .unwrap(); - send_credential_offer_v2(credential_handle, connection_handle) - .await - .unwrap(); - assert_eq!( - get_state(credential_handle).unwrap(), - u32::from(IssuerState::OfferSet) - ); - } - #[tokio::test] async fn test_from_string_succeeds() { let _setup = SetupMocks::init(); @@ -400,64 +370,6 @@ pub mod tests { assert_eq!(new_string, string); } - #[tokio::test] - async fn test_update_state_with_message() { - let _setup = SetupMocks::init(); - - let connection_handle = build_test_connection_inviter_requested().await; - let credential_handle = _issuer_credential_create(); - let (_, cred_def_handle) = create_and_publish_nonrevocable_creddef().await; - build_credential_offer_msg_v2(credential_handle, cred_def_handle, 1234, _cred_json(), None) - .await - .unwrap(); - send_credential_offer_v2(credential_handle, connection_handle) - .await - .unwrap(); - assert_eq!( - get_state(credential_handle).unwrap(), - u32::from(IssuerState::OfferSet) - ); - - update_state( - credential_handle, - Some(ARIES_CREDENTIAL_REQUEST), - connection_handle, - ) - .await - .unwrap(); - assert_eq!( - get_state(credential_handle).unwrap(), - u32::from(IssuerState::RequestReceived) - ); - } - - #[tokio::test] - async fn test_update_state_with_bad_message() { - let _setup = SetupMocks::init(); - - let handle_conn = build_test_connection_inviter_requested().await; - let handle_cred = _issuer_credential_create(); - let (_, cred_def_handle) = create_and_publish_nonrevocable_creddef().await; - build_credential_offer_msg_v2(handle_cred, cred_def_handle, 1234, _cred_json(), None) - .await - .unwrap(); - send_credential_offer_v2(handle_cred, handle_conn) - .await - .unwrap(); - assert_eq!( - get_state(handle_cred).unwrap(), - u32::from(IssuerState::OfferSet) - ); - - // try to update state with nonsense message - let result = update_state(handle_cred, Some(ARIES_CONNECTION_ACK), handle_conn).await; - assert!(result.is_ok()); // todo: maybe we should rather return error if update_state doesn't progress state - assert_eq!( - get_state(handle_cred).unwrap(), - u32::from(IssuerState::OfferSet) - ); - } - #[tokio::test] async fn test_release_all() { let _setup = SetupMocks::init(); diff --git a/libvcx_core/src/api_vcx/api_handle/mediated_connection.rs b/libvcx_core/src/api_vcx/api_handle/mediated_connection.rs index 155b3dc3c0..dda90818f6 100644 --- a/libvcx_core/src/api_vcx/api_handle/mediated_connection.rs +++ b/libvcx_core/src/api_vcx/api_handle/mediated_connection.rs @@ -11,7 +11,7 @@ use aries_vcx::{ }, AriesMessage, }, - protocols::{mediated_connection::pairwise_info::PairwiseInfo, SendClosure}, + protocols::mediated_connection::pairwise_info::PairwiseInfo, }; use serde_json; use uuid::Uuid; @@ -20,7 +20,7 @@ use crate::{ api_vcx::{ api_global::{ agency_client::get_main_agency_client, - profile::{get_main_indy_ledger_read, get_main_wallet}, + profile::{get_main_ledger_read, get_main_wallet}, wallet::{wallet_sign, wallet_verify}, }, api_handle::object_cache::ObjectCache, @@ -141,7 +141,7 @@ pub async fn create_connection(source_id: &str) -> LibvcxResult { trace!("create_connection >>> source_id: {}", source_id); let connection = MediatedConnection::create( source_id, - &get_main_wallet()?, + get_main_wallet()?.as_ref(), &get_main_agency_client()?, true, ) @@ -152,10 +152,10 @@ pub async fn create_connection(source_id: &str) -> LibvcxResult { pub async fn create_connection_with_invite(source_id: &str, details: &str) -> LibvcxResult { debug!("create connection {} with invite {}", source_id, details); if let Ok(invitation) = serde_json::from_str::(details) { - let ddo = into_did_doc(&get_main_indy_ledger_read()?, &invitation).await?; + let ddo = into_did_doc(get_main_ledger_read()?.as_ref(), &invitation).await?; let connection = MediatedConnection::create_with_invite( source_id, - &get_main_wallet()?, + get_main_wallet()?.as_ref(), &get_main_agency_client()?, invitation, ddo, @@ -180,7 +180,7 @@ pub async fn create_with_request_v2(request: &str, pw_info: PairwiseInfo) -> Lib })?; let connection = MediatedConnection::create_with_request( - &get_main_wallet()?, + get_main_wallet()?.as_ref(), request, pw_info, &get_main_agency_client()?, @@ -193,7 +193,7 @@ pub async fn send_generic_message(handle: u32, msg: &str) -> LibvcxResult LibvcxResult<() let connection = CONNECTION_MAP.get_cloned(handle)?; connection - .send_handshake_reuse(&get_main_wallet()?, oob_msg) + .send_handshake_reuse(get_main_wallet()?.as_ref(), oob_msg) .await .map_err(|err| err.into()) } @@ -220,7 +220,11 @@ pub async fn update_state_with_message(handle: u32, message: &str) -> LibvcxResu })?; connection - .update_state_with_message(get_main_wallet()?, get_main_agency_client()?, Some(message)) + .update_state_with_message( + get_main_wallet()?.as_ref(), + get_main_agency_client()?, + Some(message), + ) .await?; let state: u32 = connection.get_state().into(); CONNECTION_MAP.insert(handle, connection)?; @@ -240,7 +244,7 @@ pub async fn handle_message(handle: u32, message: &str) -> LibvcxResult<()> { })?; connection - .handle_message(message, &get_main_wallet()?) + .handle_message(message, get_main_wallet()?.as_ref()) .await?; CONNECTION_MAP.insert(handle, connection) } @@ -255,7 +259,7 @@ pub async fn update_state(handle: u32) -> LibvcxResult { ); connection - .find_and_handle_message(&get_main_wallet()?, &get_main_agency_client()?) + .find_and_handle_message(get_main_wallet()?.as_ref(), &get_main_agency_client()?) .await? } else { info!( @@ -265,7 +269,7 @@ pub async fn update_state(handle: u32) -> LibvcxResult { ); connection - .find_message_and_update_state(&get_main_wallet()?, &get_main_agency_client()?) + .find_message_and_update_state(get_main_wallet()?.as_ref(), &get_main_agency_client()?) .await? }; let state: u32 = connection.get_state().into(); @@ -283,7 +287,11 @@ pub async fn connect(handle: u32) -> LibvcxResult> { let mut connection = CONNECTION_MAP.get_cloned(handle)?; connection - .connect(&get_main_wallet()?, &get_main_agency_client()?, None) + .connect( + get_main_wallet()?.as_ref(), + &get_main_agency_client()?, + None, + ) .await?; let invitation = connection .get_invite_details() @@ -367,24 +375,17 @@ pub async fn get_message_by_id(handle: u32, msg_id: &str) -> LibvcxResult LibvcxResult<()> { trace!("connection::send_message >>>"); - let send_message = send_message_closure(handle).await?; - send_message(message).await.map_err(|err| err.into()) -} - -pub async fn send_message_closure(handle: u32) -> LibvcxResult { let connection = CONNECTION_MAP.get_cloned(handle)?; - - connection - .send_message_closure(get_main_wallet()?) - .await - .map_err(|err| err.into()) + let wallet = get_main_wallet()?; + let send_message = connection.send_message_closure(wallet.as_ref()).await?; + send_message(message).await.map_err(|err| err.into()) } pub async fn send_ping(handle: u32, comment: Option<&str>) -> LibvcxResult<()> { let mut connection = CONNECTION_MAP.get_cloned(handle)?; connection - .send_ping(get_main_wallet()?, comment.map(String::from)) + .send_ping(get_main_wallet()?.as_ref(), comment.map(String::from)) .await?; CONNECTION_MAP.insert(handle, connection) } @@ -398,7 +399,7 @@ pub async fn send_discovery_features( connection .send_discovery_query( - &get_main_wallet()?, + get_main_wallet()?.as_ref(), query.map(String::from), comment.map(String::from), ) @@ -537,23 +538,15 @@ pub mod test_utils { pub mod tests { use aries_vcx::{ self, - agency_client::testing::mocking::AgencyMockDecrypted, - messages::msg_fields::protocols::connection::invitation::InvitationContent, utils::{ constants, devsetup::{SetupEmpty, SetupMocks}, - mockdata::mockdata_mediated_connection::{ - ARIES_CONNECTION_ACK, ARIES_CONNECTION_INVITATION, ARIES_CONNECTION_REQUEST, - }, }, }; - use diddoc_legacy::aries::diddoc::test_utils::*; use serde_json::Value; use super::*; - #[cfg(test)] - use crate::api_vcx::api_handle::mediated_connection::test_utils::build_test_connection_inviter_invited; - use crate::api_vcx::{api_handle::mediated_connection, VcxStateType}; + use crate::api_vcx::VcxStateType; fn _setup() { let _setup = SetupEmpty::init(); @@ -563,152 +556,6 @@ pub mod tests { "test connection" } - #[tokio::test] - async fn test_vcx_connection_release() { - let _setup = SetupMocks::init(); - let handle = mediated_connection::create_connection(_source_id()) - .await - .unwrap(); - release(handle).unwrap(); - assert_eq!( - to_string(handle).unwrap_err().kind, - LibvcxErrorKind::InvalidHandle - ) - } - - #[tokio::test] - async fn test_create_connection_works() { - let _setup = SetupMocks::init(); - let connection_handle = mediated_connection::create_connection(_source_id()) - .await - .unwrap(); - assert!(mediated_connection::is_valid_handle(connection_handle)); - assert_eq!(0, mediated_connection::get_state(connection_handle)); - } - - #[tokio::test] - async fn test_create_connection_with_pairwise_invite() { - let _setup = SetupMocks::init(); - - let id = Uuid::new_v4().to_string(); - let content = InvitationContent::builder_pairwise() - .label(_label()) - .recipient_keys(_recipient_keys()) - .routing_keys(_routing_keys()) - .service_endpoint(_service_endpoint()) - .build(); - - let msg: AriesMessage = Invitation::builder().id(id).content(content).build(); - - let connection_handle = mediated_connection::create_connection_with_invite( - _source_id(), - &serde_json::to_string(&msg).unwrap(), - ) - .await - .unwrap(); - assert!(mediated_connection::is_valid_handle(connection_handle)); - assert_eq!(1, mediated_connection::get_state(connection_handle)); - } - - #[tokio::test] - async fn test_create_connection_with_public_invite() { - let _setup = SetupMocks::init(); - - let id = Uuid::new_v4().to_string(); - let content = InvitationContent::builder_public() - .label(_label()) - .did(_did()) - .build(); - let invitation: Invitation = Invitation::builder().id(id).content(content).build(); - let msg = AriesMessage::from(invitation); - - let connection_handle = mediated_connection::create_connection_with_invite( - _source_id(), - &serde_json::to_string(&msg).unwrap(), - ) - .await - .unwrap(); - assert!(mediated_connection::is_valid_handle(connection_handle)); - assert_eq!(1, mediated_connection::get_state(connection_handle)); - } - - #[tokio::test] - async fn test_get_connection_state_works() { - let _setup = SetupMocks::init(); - let connection_handle = mediated_connection::create_connection(_source_id()) - .await - .unwrap(); - assert_eq!(0, mediated_connection::get_state(connection_handle)); - } - - #[tokio::test] - async fn test_connection_delete() { - let _setup = SetupMocks::init(); - warn!(">> test_connection_delete going to create connection"); - let connection_handle = mediated_connection::create_connection(_source_id()) - .await - .unwrap(); - warn!(">> test_connection_delete checking is valid handle"); - assert!(mediated_connection::is_valid_handle(connection_handle)); - - mediated_connection::release(connection_handle).unwrap(); - assert!(!mediated_connection::is_valid_handle(connection_handle)); - } - - #[tokio::test] - async fn test_create_connection() { - let _setup = SetupMocks::init(); - - let handle = create_connection("test_create_connection").await.unwrap(); - assert_eq!(get_state(handle), VcxStateType::VcxStateNone as u32); - - connect(handle).await.unwrap(); - assert_eq!(get_pw_did(handle).unwrap(), constants::DID); - assert_eq!(get_pw_verkey(handle).unwrap(), constants::VERKEY); - - AgencyMockDecrypted::set_next_decrypted_response( - constants::GET_MESSAGES_DECRYPTED_RESPONSE, - ); - AgencyMockDecrypted::set_next_decrypted_message(ARIES_CONNECTION_REQUEST); - update_state(handle).await.unwrap(); - assert_eq!( - get_state(handle), - VcxStateType::VcxStateRequestReceived as u32 - ); - - AgencyMockDecrypted::set_next_decrypted_response( - constants::GET_MESSAGES_DECRYPTED_RESPONSE, - ); - AgencyMockDecrypted::set_next_decrypted_message(ARIES_CONNECTION_ACK); - update_state(handle).await.unwrap(); - assert_eq!(get_state(handle), VcxStateType::VcxStateAccepted as u32); - - // This errors b/c we release handle in delete connection - assert!(release(handle).is_ok()); - } - - #[tokio::test] - async fn test_create_drop_create() { - let _setup = SetupMocks::init(); - - let handle = create_connection("test_create_drop_create").await.unwrap(); - - assert_eq!(get_state(handle), VcxStateType::VcxStateNone as u32); - let did1 = get_pw_did(handle).unwrap(); - - release(handle).unwrap(); - - let handle2 = create_connection("test_create_drop_create").await.unwrap(); - - assert_eq!(get_state(handle2), VcxStateType::VcxStateNone as u32); - let did2 = get_pw_did(handle2).unwrap(); - - assert_ne!(handle, handle2); - assert_eq!(did1, did2); - - release(handle2).unwrap(); - } - #[tokio::test] async fn test_get_state_fails() { let _setup = SetupEmpty::init(); @@ -725,101 +572,6 @@ pub mod tests { assert_eq!(rc.unwrap_err().kind(), LibvcxErrorKind::InvalidHandle); } - #[tokio::test] - async fn test_get_service_endpoint() { - let _setup = SetupMocks::init(); - - let handle = create_connection("test_get_qr_code_data").await.unwrap(); - - connect(handle).await.unwrap(); - - let details = get_invite_details(handle).unwrap(); - assert!(details.contains("\"serviceEndpoint\":")); - - assert_eq!( - get_invite_details(0).unwrap_err().kind(), - LibvcxErrorKind::InvalidConnectionHandle - ); - } - - #[tokio::test] - async fn test_retry_connection() { - let _setup = SetupMocks::init(); - - let handle = create_connection("test_serialize_deserialize") - .await - .unwrap(); - - assert_eq!(get_state(handle), VcxStateType::VcxStateNone as u32); - - connect(handle).await.unwrap(); - connect(handle).await.unwrap(); - } - - #[tokio::test] - async fn test_release_all() { - let _setup = SetupMocks::init(); - - let h1 = create_connection("rel1").await.unwrap(); - let h2 = create_connection("rel2").await.unwrap(); - let h3 = create_connection("rel3").await.unwrap(); - release_all(); - assert!(!is_valid_handle(h1)); - assert!(!is_valid_handle(h2)); - assert!(!is_valid_handle(h3)); - } - - #[tokio::test] - async fn test_create_with_valid_invite_details() { - let _setup = SetupMocks::init(); - - let handle = create_connection_with_invite("alice", ARIES_CONNECTION_INVITATION) - .await - .unwrap(); - connect(handle).await.unwrap(); - - let handle_2 = create_connection_with_invite("alice", ARIES_CONNECTION_INVITATION) - .await - .unwrap(); - connect(handle_2).await.unwrap(); - } - - #[tokio::test] - async fn test_process_acceptance_message() { - let _setup = SetupMocks::init(); - - let handle = create_connection("test_process_acceptance_message") - .await - .unwrap(); - update_state_with_message(handle, ARIES_CONNECTION_REQUEST) - .await - .unwrap(); - } - - // #[tokio::test] - // #[cfg(feature = "general_test")] - // async fn test_connection_handle_is_found() { - // let _setup = SetupMocks::init(); - // let handle = create_connection_with_invite("alice", - // ARIES_CONNECTION_INVITATION).await.unwrap(); - // - // CONNECTION_MAP.get_mut(handle, |_connection| { - // { Ok(()) }.boxed() - // }).await.unwrap(); - // } - - #[tokio::test] - async fn test_send_generic_message_fails_with_invalid_connection() { - let _setup = SetupMocks::init(); - - let handle = build_test_connection_inviter_invited().await; - - let err = send_generic_message(handle, "this is the message") - .await - .unwrap_err(); - assert_eq!(err.kind(), LibvcxErrorKind::NotReady); - } - #[test] fn test_generate_public_invitation() { let _setup = SetupMocks::init(); diff --git a/libvcx_core/src/api_vcx/api_handle/out_of_band.rs b/libvcx_core/src/api_vcx/api_handle/out_of_band.rs index 0a955c6b80..4447420737 100644 --- a/libvcx_core/src/api_vcx/api_handle/out_of_band.rs +++ b/libvcx_core/src/api_vcx/api_handle/out_of_band.rs @@ -17,7 +17,7 @@ use crate::{ api_vcx::{ api_global::{ agency_client::get_main_agency_client, - profile::{get_main_indy_ledger_read, get_main_wallet}, + profile::{get_main_ledger_read, get_main_wallet}, }, api_handle::{ connection, mediated_connection::CONNECTION_MAP as MEDIATED_CONS_MAP, @@ -181,7 +181,7 @@ pub async fn connection_exists(handle: u32, conn_handles: &Vec) -> LibvcxRe let connections = conn_map.values().collect(); if let Some(connection) = oob - .connection_exists(&get_main_indy_ledger_read()?, &connections) + .connection_exists(get_main_ledger_read()?.as_ref(), &connections) .await? { if let Some((&handle, _)) = conn_map.iter().find(|(_, conn)| *conn == connection) { @@ -208,7 +208,7 @@ pub async fn nonmediated_connection_exists( handle, conn_handles ); - let indy_ledger = get_main_indy_ledger_read()?; + let indy_ledger = get_main_ledger_read()?; let oob = OUT_OF_BAND_RECEIVER_MAP.get_cloned(handle)?; let filter_closure = |h: &u32| { @@ -219,7 +219,7 @@ pub async fn nonmediated_connection_exists( let connections: HashMap<_, _> = conn_handles.iter().filter_map(filter_closure).collect(); match oob - .nonmediated_connection_exists::<_, &u32>(&indy_ledger, &connections) + .nonmediated_connection_exists::<_, &u32>(indy_ledger.as_ref(), &connections) .await { None => Ok((0, false)), @@ -231,11 +231,16 @@ pub async fn build_connection(handle: u32) -> LibvcxResult { trace!("build_connection >>> handle: {}", handle); let oob = OUT_OF_BAND_RECEIVER_MAP.get_cloned(handle)?; let invitation = AnyInvitation::Oob(oob.oob.clone()); - let ddo = into_did_doc(&get_main_indy_ledger_read()?, &invitation).await?; - oob.build_connection(&get_main_wallet()?, &get_main_agency_client()?, ddo, false) - .await? - .to_string() - .map_err(|err| err.into()) + let ddo = into_did_doc(get_main_ledger_read()?.as_ref(), &invitation).await?; + oob.build_connection( + get_main_wallet()?.as_ref(), + &get_main_agency_client()?, + ddo, + false, + ) + .await? + .to_string() + .map_err(|err| err.into()) } pub fn get_thread_id_sender(handle: u32) -> LibvcxResult { diff --git a/libvcx_core/src/api_vcx/api_handle/proof.rs b/libvcx_core/src/api_vcx/api_handle/proof.rs index c0d8ea11d8..900a0f4880 100644 --- a/libvcx_core/src/api_vcx/api_handle/proof.rs +++ b/libvcx_core/src/api_vcx/api_handle/proof.rs @@ -13,11 +13,12 @@ use serde_json; use crate::{ api_vcx::{ - api_global::profile::{ - get_main_anoncreds, get_main_anoncreds_ledger_read, get_main_wallet, - }, + api_global::profile::{get_main_anoncreds, get_main_ledger_read, get_main_wallet}, api_handle::{ - connection, connection::HttpClient, mediated_connection, object_cache::ObjectCache, + connection, + connection::HttpClient, + mediated_connection::{self, send_message}, + object_cache::ObjectCache, }, }, errors::error::{LibvcxError, LibvcxErrorKind, LibvcxResult}, @@ -41,11 +42,12 @@ pub async fn create_proof( revocation_details: String, name: String, ) -> LibvcxResult { - let presentation_request = PresentationRequestData::create(&get_main_anoncreds()?, &name) - .await? - .set_requested_attributes_as_string(requested_attrs)? - .set_requested_predicates_as_string(requested_predicates)? - .set_not_revoked_interval(revocation_details)?; + let presentation_request = + PresentationRequestData::create(get_main_anoncreds()?.as_ref(), &name) + .await? + .set_requested_attributes_as_string(requested_attrs)? + .set_requested_predicates_as_string(requested_predicates)? + .set_not_revoked_interval(revocation_details)?; let verifier = Verifier::create_from_request(source_id, &presentation_request)?; PROOF_MAP.add(verifier) } @@ -69,7 +71,6 @@ pub async fn update_state( if !proof.progressable_by_message() { return Ok(proof.get_state().into()); } - let send_message = mediated_connection::send_message_closure(connection_handle).await?; if let Some(message) = message { let message: AriesMessage = serde_json::from_str(message).map_err(|err| { @@ -87,13 +88,13 @@ pub async fn update_state( ); if let Some(message) = proof .process_aries_msg( - &get_main_anoncreds_ledger_read()?, - &get_main_anoncreds()?, + get_main_ledger_read()?.as_ref(), + get_main_anoncreds()?.as_ref(), message, ) .await? { - send_message(message).await?; + send_message(connection_handle, message).await?; } } else { let messages = mediated_connection::get_messages(connection_handle).await?; @@ -101,13 +102,13 @@ pub async fn update_state( if let Some((uid, message)) = verifier_find_message_to_handle(&proof, messages) { if let Some(message) = proof .process_aries_msg( - &get_main_anoncreds_ledger_read()?, - &get_main_anoncreds()?, + get_main_ledger_read()?.as_ref(), + get_main_anoncreds()?.as_ref(), message, ) .await? { - send_message(message).await?; + send_message(connection_handle, message).await?; } mediated_connection::update_message_status(connection_handle, &uid).await?; }; @@ -137,7 +138,7 @@ pub async fn update_state_nonmediated( let wallet = get_main_wallet()?; let send_message: SendClosure = Box::new(|msg: AriesMessage| { - Box::pin(async move { con.send_message(&wallet, &msg, &HttpClient).await }) + Box::pin(async move { con.send_message(wallet.as_ref(), &msg, &HttpClient).await }) }); let message: AriesMessage = serde_json::from_str(message).map_err(|err| { @@ -151,8 +152,8 @@ pub async fn update_state_nonmediated( })?; if let Some(message) = proof .process_aries_msg( - &get_main_anoncreds_ledger_read()?, - &get_main_anoncreds()?, + get_main_ledger_read()?.as_ref(), + get_main_anoncreds()?.as_ref(), message, ) .await? @@ -209,9 +210,8 @@ pub fn from_string(proof_data: &str) -> LibvcxResult { pub async fn send_proof_request(handle: u32, connection_handle: u32) -> LibvcxResult<()> { let mut proof = PROOF_MAP.get_cloned(handle)?; - let send_closure = mediated_connection::send_message_closure(connection_handle).await?; let message = proof.mark_presentation_request_sent()?; - send_closure(message.into()).await?; + send_message(connection_handle, message.into()).await?; PROOF_MAP.insert(handle, proof) } @@ -225,7 +225,7 @@ pub async fn send_proof_request_nonmediated( let wallet = get_main_wallet()?; let send_message: SendClosure = Box::new(|msg: AriesMessage| { - Box::pin(async move { con.send_message(&wallet, &msg, &HttpClient).await }) + Box::pin(async move { con.send_message(wallet.as_ref(), &msg, &HttpClient).await }) }); let message = proof.mark_presentation_request_sent()?; @@ -315,26 +315,13 @@ pub fn get_thread_id(handle: u32) -> LibvcxResult { #[cfg(test)] #[allow(clippy::unwrap_used)] pub mod tests { - use aries_vcx::{ - agency_client::testing::mocking::HttpClientMockResponse, - utils::{ - constants::{ - PROOF_REJECT_RESPONSE_STR_V2, REQUESTED_ATTRS, REQUESTED_PREDICATES, - V3_OBJECT_SERIALIZE_VERSION, - }, - devsetup::SetupMocks, - mockdata::{mock_settings::MockBuilder, mockdata_proof}, - }, + use aries_vcx::utils::{ + constants::{REQUESTED_ATTRS, REQUESTED_PREDICATES, V3_OBJECT_SERIALIZE_VERSION}, + devsetup::SetupMocks, }; use serde_json::Value; use super::*; - #[cfg(test)] - use crate::api_vcx::api_handle::mediated_connection::test_utils::build_test_connection_inviter_requested; - use crate::{ - api_vcx::api_handle::proof, - aries_vcx::protocols::proof_presentation::verifier::state_machine::VerifierState, - }; async fn create_default_proof() -> u32 { create_proof( @@ -419,20 +406,6 @@ pub mod tests { assert!(!is_valid_handle(handle)); } - #[tokio::test] - async fn test_send_proof_request() { - let _setup = SetupMocks::init(); - - let handle_conn = build_test_connection_inviter_requested().await; - - let handle_proof = create_default_proof().await; - send_proof_request(handle_proof, handle_conn).await.unwrap(); - assert_eq!( - get_state(handle_proof).unwrap(), - VerifierState::PresentationRequestSent as u32 - ); - } - #[tokio::test] async fn test_get_proof_fails_with_no_proof() { let _setup = SetupMocks::init(); @@ -442,160 +415,6 @@ pub mod tests { assert!(get_presentation_msg(handle).is_err()) } - #[tokio::test] - async fn test_proof_update_state_v2() { - let _setup = SetupMocks::init(); - let _mock_builder = MockBuilder::init().set_mock_result_for_validate_indy_proof(Ok(true)); - - let handle_conn = build_test_connection_inviter_requested().await; - let handle_proof = create_default_proof().await; - - send_proof_request(handle_proof, handle_conn).await.unwrap(); - assert_eq!( - get_state(handle_proof).unwrap(), - VerifierState::PresentationRequestSent as u32 - ); - - mediated_connection::release(handle_conn).unwrap(); - let handle_conn = build_test_connection_inviter_requested().await; - - update_state( - handle_proof, - Some(mockdata_proof::ARIES_PROOF_PRESENTATION), - handle_conn, - ) - .await - .unwrap(); - - assert_eq!( - get_state(handle_proof).unwrap(), - VerifierState::Finished as u32 - ); - } - - #[tokio::test] - async fn test_update_state() { - let _setup = SetupMocks::init(); - let _mock_builder = MockBuilder::init().set_mock_result_for_validate_indy_proof(Ok(true)); - - let handle_conn = build_test_connection_inviter_requested().await; - let handle_proof = create_default_proof().await; - - send_proof_request(handle_proof, handle_conn).await.unwrap(); - assert_eq!( - get_state(handle_proof).unwrap(), - VerifierState::PresentationRequestSent as u32 - ); - - update_state( - handle_proof, - Some(mockdata_proof::ARIES_PROOF_PRESENTATION), - handle_conn, - ) - .await - .unwrap(); - assert_eq!( - get_state(handle_proof).unwrap(), - VerifierState::Finished as u32 - ); - } - - #[tokio::test] - async fn test_proof_validation_with_predicate() { - let _setup = SetupMocks::init(); - let _mock_builder = MockBuilder::init().set_mock_result_for_validate_indy_proof(Ok(true)); - - let handle_conn = build_test_connection_inviter_requested().await; - let handle_proof = create_default_proof().await; - - send_proof_request(handle_proof, handle_conn).await.unwrap(); - assert_eq!( - get_state(handle_proof).unwrap(), - VerifierState::PresentationRequestSent as u32 - ); - - update_state( - handle_proof, - Some(mockdata_proof::ARIES_PROOF_PRESENTATION), - handle_conn, - ) - .await - .unwrap(); - assert_eq!( - get_state(handle_proof).unwrap(), - VerifierState::Finished as u32 - ); - } - - #[tokio::test] - async fn test_update_state_with_reject_message() { - let _setup = SetupMocks::init(); - - let handle_conn = build_test_connection_inviter_requested().await; - let handle_proof = create_default_proof().await; - - send_proof_request(handle_proof, handle_conn).await.unwrap(); - - update_state( - handle_proof, - Some(PROOF_REJECT_RESPONSE_STR_V2), - handle_conn, - ) - .await - .unwrap(); - assert_eq!( - get_state(handle_proof).unwrap(), - VerifierState::Failed as u32 - ); - } - - #[tokio::test] - async fn test_send_presentation_request() { - let _setup = SetupMocks::init(); - - let handle_conn = build_test_connection_inviter_requested().await; - let handle_proof = create_default_proof().await; - - send_proof_request(handle_proof, handle_conn).await.unwrap(); - assert_eq!( - get_state(handle_proof).unwrap(), - VerifierState::PresentationRequestSent as u32 - ); - } - - #[tokio::test] - async fn test_get_proof() { - let _setup = SetupMocks::init(); - let _mock_builder = MockBuilder::init().set_mock_result_for_validate_indy_proof(Ok(true)); - - let handle_conn = build_test_connection_inviter_requested().await; - let handle_proof = create_default_proof().await; - - send_proof_request(handle_proof, handle_conn).await.unwrap(); - assert_eq!( - get_state(handle_proof).unwrap(), - VerifierState::PresentationRequestSent as u32 - ); - - update_state( - handle_proof, - Some(mockdata_proof::ARIES_PROOF_PRESENTATION), - handle_conn, - ) - .await - .unwrap(); - assert_eq!( - get_state(handle_proof).unwrap(), - VerifierState::Finished as u32 - ); - - let proof_str = get_presentation_msg(handle_proof).unwrap(); - assert_eq!( - proof_str, - mockdata_proof::ARIES_PROOF_PRESENTATION.replace(['\n', ' '], "") - ); - } - #[tokio::test] async fn test_release_all() { let _setup = SetupMocks::init(); @@ -632,110 +451,4 @@ pub mod tests { assert!(!is_valid_handle(h2)); assert!(!is_valid_handle(h3)); } - - #[tokio::test] - async fn test_send_proof_request_can_be_retried() { - let _setup = SetupMocks::init(); - - let handle_conn = build_test_connection_inviter_requested().await; - let handle_proof = create_default_proof().await; - - let _request = get_presentation_request_msg(handle_proof).unwrap(); - assert_eq!(get_state(handle_proof).unwrap(), 1); - - HttpClientMockResponse::set_next_response( - aries_vcx::agency_client::errors::error::AgencyClientResult::Err( - aries_vcx::agency_client::errors::error::AgencyClientError::from_msg( - aries_vcx::agency_client::errors::error::AgencyClientErrorKind::IOError, - "Sending message timeout.", - ), - ), - ); - assert_eq!( - send_proof_request(handle_proof, handle_conn) - .await - .unwrap_err() - .kind(), - LibvcxErrorKind::IOError - ); - assert_eq!(get_state(handle_proof).unwrap(), 1); - - // Retry sending proof request - send_proof_request(handle_proof, handle_conn).await.unwrap(); - assert_eq!( - get_state(handle_proof).unwrap(), - VerifierState::PresentationRequestSent as u32 - ); - } - - #[tokio::test] - async fn test_proof_accepted() { - let _setup = SetupMocks::init(); - let _mock_builder = MockBuilder::init().set_mock_result_for_validate_indy_proof(Ok(true)); - - let handle_conn = build_test_connection_inviter_requested().await; - let handle_proof = create_default_proof().await; - - let _request = get_presentation_request_msg(handle_proof).unwrap(); - send_proof_request(handle_proof, handle_conn).await.unwrap(); - update_state( - handle_proof, - Some(mockdata_proof::ARIES_PROOF_PRESENTATION), - handle_conn, - ) - .await - .unwrap(); - assert_eq!( - proof::get_state(handle_proof).unwrap(), - VerifierState::Finished as u32 - ); - } - - #[tokio::test] - async fn test_proof_errors() { - let _setup = SetupMocks::init(); - - let handle_conn = build_test_connection_inviter_requested().await; - let handle_proof = create_default_proof().await; - - let bad_handle = 100000; - let empty = r#""#; - - assert_eq!( - send_proof_request(bad_handle, handle_conn) - .await - .unwrap_err() - .kind(), - LibvcxErrorKind::InvalidHandle - ); - assert_eq!( - get_verification_status(handle_proof).unwrap(), - VcxPresentationVerificationStatus::Unavailable - ); - assert_eq!( - create_proof( - "my source id".to_string(), - empty.to_string(), - "{}".to_string(), - r#"{"support_revocation":false}"#.to_string(), - "my name".to_string(), - ) - .await - .unwrap_err() - .kind(), - LibvcxErrorKind::InvalidJson - ); - assert_eq!( - to_string(bad_handle).unwrap_err().kind(), - LibvcxErrorKind::InvalidHandle - ); - assert_eq!( - get_source_id(bad_handle).unwrap_err().kind(), - LibvcxErrorKind::InvalidHandle - ); - assert_eq!( - from_string(empty).unwrap_err().kind(), - LibvcxErrorKind::InvalidJson - ); - } } diff --git a/libvcx_core/src/api_vcx/api_handle/revocation_registry.rs b/libvcx_core/src/api_vcx/api_handle/revocation_registry.rs index c82c02ee2d..fe784ab9d9 100644 --- a/libvcx_core/src/api_vcx/api_handle/revocation_registry.rs +++ b/libvcx_core/src/api_vcx/api_handle/revocation_registry.rs @@ -1,14 +1,10 @@ -use aries_vcx::{ - common::primitives::revocation_registry::{RevocationRegistry, RevocationRegistryDefinition}, - global::settings::CONFIG_INSTITUTION_DID, +use aries_vcx::common::primitives::revocation_registry::{ + RevocationRegistry, RevocationRegistryDefinition, }; use crate::{ api_vcx::{ - api_global::{ - profile::{get_main_anoncreds, get_main_anoncreds_ledger_write}, - settings::get_config_value, - }, + api_global::profile::{get_main_anoncreds, get_main_ledger_write}, api_handle::object_cache::ObjectCache, }, errors::error::{LibvcxError, LibvcxErrorKind, LibvcxResult}, @@ -36,7 +32,7 @@ pub async fn create(config: RevocationRegistryConfig) -> LibvcxResult { tag, } = config; let rev_reg = RevocationRegistry::create( - &get_main_anoncreds()?, + get_main_anoncreds()?.as_ref(), &issuer_did, &cred_def_id, &tails_dir, @@ -51,20 +47,19 @@ pub async fn create(config: RevocationRegistryConfig) -> LibvcxResult { pub async fn publish(handle: u32, tails_url: &str) -> LibvcxResult { let mut rev_reg = REV_REG_MAP.get_cloned(handle)?; rev_reg - .publish_revocation_primitives(&get_main_anoncreds_ledger_write()?, tails_url) + .publish_revocation_primitives(get_main_ledger_write()?.as_ref(), tails_url) .await?; REV_REG_MAP.insert(handle, rev_reg)?; Ok(handle) } -pub async fn publish_revocations(handle: u32) -> LibvcxResult<()> { - let submitter_did = get_config_value(CONFIG_INSTITUTION_DID)?; +pub async fn publish_revocations(handle: u32, submitter_did: &str) -> LibvcxResult<()> { let rev_reg = REV_REG_MAP.get_cloned(handle)?; rev_reg .publish_local_revocations( - &get_main_anoncreds()?, - &get_main_anoncreds_ledger_write()?, - &submitter_did, + get_main_anoncreds()?.as_ref(), + get_main_ledger_write()?.as_ref(), + submitter_did, ) .await?; diff --git a/libvcx_core/src/api_vcx/api_handle/schema.rs b/libvcx_core/src/api_vcx/api_handle/schema.rs index 7329e36107..fe0cf9b278 100644 --- a/libvcx_core/src/api_vcx/api_handle/schema.rs +++ b/libvcx_core/src/api_vcx/api_handle/schema.rs @@ -1,18 +1,11 @@ use std::string::ToString; -use aries_vcx::{ - common::primitives::credential_schema::Schema, global::settings::CONFIG_INSTITUTION_DID, -}; +use aries_vcx::common::primitives::credential_schema::Schema; use serde_json; use crate::{ api_vcx::{ - api_global::{ - profile::{ - get_main_anoncreds, get_main_anoncreds_ledger_read, get_main_anoncreds_ledger_write, - }, - settings::get_config_value, - }, + api_global::profile::{get_main_anoncreds, get_main_ledger_read, get_main_ledger_write}, api_handle::object_cache::ObjectCache, }, errors::error::{LibvcxError, LibvcxErrorKind, LibvcxResult}, @@ -23,13 +16,12 @@ lazy_static! { } pub async fn create_and_publish_schema( + issuer_did: &str, source_id: &str, name: String, version: String, data: String, ) -> LibvcxResult { - let issuer_did = get_config_value(CONFIG_INSTITUTION_DID)?; - trace!( "create_new_schema >>> source_id: {}, issuer_did: {}, name: {}, version: {}, data: {}", source_id, @@ -50,15 +42,15 @@ pub async fn create_and_publish_schema( ) })?; let schema = Schema::create( - &get_main_anoncreds()?, + get_main_anoncreds()?.as_ref(), source_id, - &issuer_did, + issuer_did, &name, &version, &data, ) .await? - .publish(&get_main_anoncreds_ledger_write()?, None) + .publish(get_main_ledger_write()?.as_ref()) .await?; std::thread::sleep(std::time::Duration::from_millis(100)); debug!( @@ -71,25 +63,6 @@ pub async fn create_and_publish_schema( .map_err(|e| LibvcxError::from_msg(LibvcxErrorKind::CreateSchema, e.to_string())) } -pub async fn get_schema_attrs(source_id: String, schema_id: String) -> LibvcxResult<(u32, String)> { - trace!( - "get_schema_attrs >>> source_id: {}, schema_id: {}", - source_id, - schema_id - ); - let schema_ledger_data_json = get_main_anoncreds_ledger_read()? - .get_schema(&schema_id, None) - .await?; - let schema = Schema::create_from_ledger_json(&schema_ledger_data_json, &source_id, &schema_id)?; - let schema_json = schema.to_string_versioned()?; - - let handle = SCHEMA_MAP - .add(schema) - .map_err(|e| LibvcxError::from_msg(LibvcxErrorKind::CreateSchema, e.to_string()))?; - - Ok((handle, schema_json)) -} - pub fn is_valid_handle(handle: u32) -> bool { SCHEMA_MAP.has_handle(handle) } @@ -126,7 +99,7 @@ pub fn release_all() { pub async fn update_state(schema_handle: u32) -> LibvcxResult { let mut schema = SCHEMA_MAP.get_cloned(schema_handle)?; let res = schema - .update_state(&get_main_anoncreds_ledger_read()?) + .update_state(get_main_ledger_read()?.as_ref()) .await?; SCHEMA_MAP.insert(schema_handle, schema)?; Ok(res) @@ -138,10 +111,10 @@ pub fn get_state(handle: u32) -> LibvcxResult { #[allow(clippy::unwrap_used)] pub mod test_utils { + use aries_vcx::global::settings::DEFAULT_DID; use rand::Rng; use super::*; - use crate::api_vcx::api_global::settings::get_config_value; pub fn prepare_schema_data() -> (String, String, String, String) { let data = json!(data()).to_string(); @@ -151,7 +124,7 @@ pub mod test_utils { rand::thread_rng().gen::(), rand::thread_rng().gen::() ); - let did = get_config_value(CONFIG_INSTITUTION_DID).unwrap(); + let did = DEFAULT_DID.to_owned(); (did, schema_name, schema_version, data) } @@ -159,7 +132,7 @@ pub mod test_utils { // TODO: Reuse test utils code and data pub async fn create_schema_real() -> u32 { let (_did, schema_name, schema_version, data) = prepare_schema_data(); - create_and_publish_schema("id", schema_name, schema_version, data) + create_and_publish_schema(DEFAULT_DID, "id", schema_name, schema_version, data) .await .unwrap() } @@ -169,7 +142,7 @@ pub mod test_utils { info!("schema: {:?}", schema); assert_eq!(schema.schema_id, schema_id.to_string()); - let mut schema_data = schema.data.clone(); + let mut schema_data = schema.data; schema_data.sort(); let mut vec_data: Vec = serde_json::from_str(data).unwrap(); vec_data.sort(); @@ -192,176 +165,28 @@ pub mod test_utils { #[cfg(test)] pub mod tests { use aries_vcx::{ - common::test_utils::create_and_write_test_schema, - global::settings::{set_config_value, DEFAULT_DID}, - utils::{ - constants, - constants::SCHEMA_ID, - devsetup::{SetupDefaults, SetupEmpty, SetupMocks}, - }, + global::settings::DEFAULT_DID, + utils::devsetup::{SetupDefaults, SetupEmpty}, }; use super::*; - use crate::api_vcx::{ - api_handle::{ - schema, - schema::test_utils::{check_schema, create_schema_real, prepare_schema_data}, - }, - utils::devsetup::SetupGlobalsWalletPoolAgency, - }; #[tokio::test] - async fn test_vcx_schema_release() { - let _setup = SetupMocks::init(); - - let (_did, schema_name, schema_version, data) = prepare_schema_data(); - let handle = create_and_publish_schema( - "test_create_schema_success", - schema_name, - schema_version, - data.clone(), - ) - .await - .unwrap(); - release(handle).unwrap(); - assert_eq!( - to_string(handle).unwrap_err().kind, - LibvcxErrorKind::InvalidHandle - ) - } - - #[tokio::test] - async fn test_create_schema_success() { - let _setup = SetupMocks::init(); + async fn test_create_schema_fails() { + let _setup = SetupDefaults::init(); - let (_did, schema_name, schema_version, data) = prepare_schema_data(); - create_and_publish_schema( - "test_create_schema_success", - schema_name, - schema_version, - data, + let err = create_and_publish_schema( + DEFAULT_DID, + "1", + "name".to_string(), + "1.0".to_string(), + "".to_string(), ) .await - .unwrap(); - } - - #[tokio::test] - async fn test_get_schema_attrs_success() { - let _setup = SetupMocks::init(); - - let (handle, schema_json) = - get_schema_attrs("Check For Success".to_string(), SCHEMA_ID.to_string()) - .await - .unwrap(); - - check_schema( - handle, - &schema_json, - SCHEMA_ID, - r#"["name","age","height","sex"]"#, - ); - } - - #[tokio::test] - async fn test_create_schema_fails() { - let _setup = SetupDefaults::init(); - set_config_value(CONFIG_INSTITUTION_DID, DEFAULT_DID).unwrap(); - let err = - create_and_publish_schema("1", "name".to_string(), "1.0".to_string(), "".to_string()) - .await - .unwrap_err(); + .unwrap_err(); assert_eq!(err.kind(), LibvcxErrorKind::SerializationError) } - #[tokio::test] - #[ignore] - async fn test_get_schema_attrs_from_ledger() { - SetupGlobalsWalletPoolAgency::run(|setup| async move { - let schema = create_and_write_test_schema( - &get_main_anoncreds().unwrap(), - &get_main_anoncreds_ledger_write().unwrap(), - &setup.institution_did, - constants::DEFAULT_SCHEMA_ATTRS, - ) - .await; - - let (schema_handle, schema_attrs) = - get_schema_attrs("id".to_string(), schema.schema_id.clone()) - .await - .unwrap(); - - check_schema( - schema_handle, - &schema_attrs, - &schema.schema_id, - constants::DEFAULT_SCHEMA_ATTRS, - ); - }) - .await; - } - - #[tokio::test] - #[ignore] - async fn test_create_schema_with_pool() { - SetupGlobalsWalletPoolAgency::run(|_setup| async move { - let handle = create_schema_real().await; - - let _source_id = get_source_id(handle).unwrap(); - let _schema_id = get_schema_id(handle).unwrap(); - let _schema_json = to_string(handle).unwrap(); - }) - .await; - } - - #[tokio::test] - #[ignore] - async fn test_create_duplicate_fails() { - SetupGlobalsWalletPoolAgency::run(|_setup| async move { - let (_did, schema_name, schema_version, data) = prepare_schema_data(); - - create_and_publish_schema( - "id", - schema_name.clone(), - schema_version.clone(), - data.clone(), - ) - .await - .unwrap(); - - let err = create_and_publish_schema("id_2", schema_name, schema_version, data) - .await - .unwrap_err(); - error!("err: {:?}", err); - // .unwrap_err(); - - assert_eq!(err.kind(), LibvcxErrorKind::DuplicationSchema); - }) - .await; - } - - #[tokio::test] - async fn test_release_all() { - let _setup = SetupMocks::init(); - - let (_did, schema_name, version, data) = prepare_schema_data(); - - let h1 = create_and_publish_schema("1", schema_name.clone(), version.clone(), data.clone()) - .await - .unwrap(); - let h2 = create_and_publish_schema("2", schema_name.clone(), version.clone(), data.clone()) - .await - .unwrap(); - let h3 = create_and_publish_schema("3", schema_name.clone(), version.clone(), data.clone()) - .await - .unwrap(); - - release_all(); - - assert!(!is_valid_handle(h1)); - assert!(!is_valid_handle(h2)); - assert!(!is_valid_handle(h3)); - } - #[test] fn test_handle_errors() { let _setup = SetupEmpty::init(); @@ -371,81 +196,4 @@ pub mod tests { LibvcxErrorKind::InvalidHandle ); } - - #[tokio::test] - #[ignore] - async fn test_vcx_schema_get_state_with_ledger() { - SetupGlobalsWalletPoolAgency::run(|_setup| async move { - let handle = create_schema_real().await; - assert_eq!(1, get_state(handle).unwrap()); - }) - .await; - } - - #[tokio::test] - #[ignore] - async fn test_vcx_create_schema_with_pool() { - SetupGlobalsWalletPoolAgency::run(|_setup| async move { - let (_issuer_did, schema_name, schema_version, schema_data) = prepare_schema_data(); - let _schema_handle = schema::create_and_publish_schema( - "source_id", - schema_name, - schema_version, - schema_data, - ) - .await - .unwrap(); - }) - .await; - } - - #[tokio::test] - #[ignore] - async fn test_vcx_schema_serialize_contains_version() { - SetupGlobalsWalletPoolAgency::run(|_setup| async move { - let (_issuer_did, schema_name, schema_version, schema_data) = prepare_schema_data(); - let schema_handle = schema::create_and_publish_schema( - "source_id", - schema_name, - schema_version, - schema_data, - ) - .await - .unwrap(); - - let schema_json = schema::to_string(schema_handle).unwrap(); - - let j: serde_json::Value = serde_json::from_str(&schema_json).unwrap(); - let _schema: Schema = serde_json::from_value(j["data"].clone()).unwrap(); - assert_eq!(j["version"], "1.0"); - }) - .await; - } - - #[tokio::test] - #[ignore] - async fn test_vcx_schema_get_attrs_with_pool() { - SetupGlobalsWalletPoolAgency::run(|_setup| async move { - let (_issuer_did, schema_name, schema_version, schema_data) = prepare_schema_data(); - let schema_handle = schema::create_and_publish_schema( - "source_id", - schema_name, - schema_version, - schema_data, - ) - .await - .unwrap(); - let _schema_json_1 = schema::to_string(schema_handle).unwrap(); - let schema_id = schema::get_schema_id(schema_handle).unwrap(); - - let (_schema_handle, schema_json_2) = - schema::get_schema_attrs("source_id".into(), schema_id) - .await - .unwrap(); - let j: serde_json::Value = serde_json::from_str(&schema_json_2).unwrap(); - let _schema: Schema = serde_json::from_value(j["data"].clone()).unwrap(); - assert_eq!(j["version"], "1.0"); - }) - .await; - } } diff --git a/libvcx_core/src/api_vcx/utils/devsetup.rs b/libvcx_core/src/api_vcx/utils/devsetup.rs index a68cc888fe..82de7e8f3b 100644 --- a/libvcx_core/src/api_vcx/utils/devsetup.rs +++ b/libvcx_core/src/api_vcx/utils/devsetup.rs @@ -9,10 +9,7 @@ use aries_vcx::{ }, WalletHandle, }, - global::settings::{ - self, init_issuer_config, set_config_value, CONFIG_INSTITUTION_DID, DEFAULT_DID, - DEFAULT_GENESIS_PATH, - }, + global::settings::{self, DEFAULT_GENESIS_PATH}, utils::devsetup::{init_test_logging, reset_global_state}, }; @@ -37,7 +34,6 @@ async fn dev_setup_issuer_wallet_and_agency_client() -> (String, WalletHandle) { let config_issuer = wallet_configure_issuer(wallet_handle, enterprise_seed) .await .unwrap(); - init_issuer_config(&config_issuer.institution_did).unwrap(); (config_issuer.institution_did, wallet_handle) } @@ -51,7 +47,6 @@ impl SetupGlobalsWalletPoolAgency { pub async fn init() -> SetupGlobalsWalletPoolAgency { reset_global_state(); init_test_logging(); - set_config_value(CONFIG_INSTITUTION_DID, DEFAULT_DID).unwrap(); let (institution_did, wallet_handle) = dev_setup_issuer_wallet_and_agency_client().await; SetupGlobalsWalletPoolAgency { institution_did, diff --git a/libvcx_core/src/errors/error.rs b/libvcx_core/src/errors/error.rs index f525f83d4d..4f0241e211 100644 --- a/libvcx_core/src/errors/error.rs +++ b/libvcx_core/src/errors/error.rs @@ -143,6 +143,8 @@ pub enum LibvcxErrorKind { DuplicationWallet, #[error("Wallet record not found")] WalletRecordNotFound, + #[error("Wallet migration failed")] + WalletMigrationFailed, #[error("Record already exists in the wallet")] DuplicationWalletRecord, #[error("Wallet not found")] diff --git a/libvdrtools/Cargo.toml b/libvdrtools/Cargo.toml index 12c2796017..284bac6a5a 100644 --- a/libvdrtools/Cargo.toml +++ b/libvdrtools/Cargo.toml @@ -26,35 +26,25 @@ mysql_storage = [] fatal_warnings = [] [dependencies] -async-std = { version = "1.8.0", features = ["attributes"] } -async-trait = "0.1.42" -etcommon-rlp = "0.2.4" -failure = { version = "0.1.8", features = ["backtrace"] } -hex = "0.4.0" -libc = "0.2.114" -log = "0.4.8" -log-derive = "0.3.0" -rand = "0.8.4" -bs58 = { version = "0.4.0", optional = true } -serde = "1.0.99" -serde_json = "1.0.40" -serde_derive = "1.0.99" -sha2 = "0.9" -sha3 = "0.9" -rmp-serde = "1.1.1" -time = "=0.3.20" -lazy_static = "1.3" -byteorder = "1.3.2" -zeroize = "~1.3.0" -regex = "1.2.1" -indy-api-types = { path = "./indy-api-types", features = ["casting_errors"]} +async-std = { version = "1", features = ["attributes"] } +async-trait = "0.1" +failure = { version = "0.1", features = ["backtrace"] } +hex = "0.4" +libc = "0.2" +log = "0.4" +log-derive = "0.4" +bs58 = { version = "0.5", optional = true } +serde = "1" +serde_json = "1" +serde_derive = "1" +lazy_static = "1" +zeroize = "1" +regex = "1" +indy-api-types = { path = "./indy-api-types"} indy-utils = { path = "./indy-utils"} indy-wallet = { path = "./indy-wallet"} -num-traits = "0.2" -num-derive = "0.3" futures = { version = "0.3", default-features = false, features = [ "executor", "alloc", "thread-pool" ] } -uuid = { version = "0.8", default-features = false, features = ["v4"] } -ursa = { version = "0.3.7", optional = true} +ursa = { version = "0.3.7", optional = true, default-features = false, features = ["cl_native"] } [dev-dependencies] -dirs = "2.0.2" +dirs = "5" diff --git a/libvdrtools/indy-api-types/Cargo.toml b/libvdrtools/indy-api-types/Cargo.toml index 02804933b7..1cde54b95c 100644 --- a/libvdrtools/indy-api-types/Cargo.toml +++ b/libvdrtools/indy-api-types/Cargo.toml @@ -6,23 +6,15 @@ edition = "2018" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html -[features] -casting_errors_wallet = [ "sqlx" ] -casting_errors_misc = [ "openssl", "rust-base58", "ursa"] -casting_errors = [ "casting_errors_wallet", "casting_errors_misc"] -rust-base58 = ["bs58"] - [dependencies] -thiserror = "1.0.40" +thiserror = "1" futures = { version = "0.3", default-features = false, features = ["std"] } -log = { version = "0.4.17", features = ["std"] } -libc = "0.2.114" -openssl = {version = "0.10", optional = true} -bs58 = {version = "0.4.0", optional = true} -serde = "1.0.99" -serde_json = "1.0.40" -serde_derive = "1.0.99" -sqlx = { version = "0.5.8", git = "https://github.com/jovfer/sqlx", branch = "feature/json_no_preserve_order_v5", features = [ "sqlite", "json_no_preserve_order", "runtime-tokio-rustls" ], optional = true } -zeroize = "~1.3.0" -ursa = { version = "0.3.7", optional = true} -aes = "0.7.4" +log = { version = "0.4", features = ["std"] } +libc = "0.2" +serde = "1" +serde_json = "1" +serde_derive = "1" +openssl = { version = "0.10" } +bs58 = "0.5" +sqlx = { version = "0.7", features = [ "sqlite", "mysql", "runtime-tokio-rustls" ] } +ursa = { version = "0.3.7", default-features = false, features = ["wasm"] } \ No newline at end of file diff --git a/libvdrtools/indy-api-types/src/domain/wallet/mod.rs b/libvdrtools/indy-api-types/src/domain/wallet/mod.rs index fbfbb7e35f..b5a008cefe 100644 --- a/libvdrtools/indy-api-types/src/domain/wallet/mod.rs +++ b/libvdrtools/indy-api-types/src/domain/wallet/mod.rs @@ -1,7 +1,6 @@ -use serde_json::value::Value; -use std::collections::HashMap; +use std::{collections::HashMap, fmt}; -use crate::validation::Validatable; +use serde_json::value::Value; #[derive(Debug, Serialize, Deserialize, Clone, Default)] pub struct Config { @@ -74,7 +73,7 @@ pub struct KeyConfig { pub seed: Option, } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Serialize, Deserialize)] pub struct Record { // Wallet record type #[serde(rename = "type")] @@ -87,13 +86,16 @@ pub struct Record { pub tags: HashMap, } -pub type Tags = HashMap; - -impl Validatable for Config { - fn validate(&self) -> Result<(), String> { - if self.id.is_empty() { - return Err("Wallet id is empty".to_string()); - } - Ok(()) +impl fmt::Debug for Record { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Record") + .field("type_", &self.type_) + .field("id", &self.id) + // Censor the value + .field("value", &"******".to_string()) + .field("tags", &self.tags) + .finish() } } + +pub type Tags = HashMap; diff --git a/libvdrtools/indy-api-types/src/errors.rs b/libvdrtools/indy-api-types/src/errors.rs index a4f867aee2..d792848571 100644 --- a/libvdrtools/indy-api-types/src/errors.rs +++ b/libvdrtools/indy-api-types/src/errors.rs @@ -1,23 +1,21 @@ use std::{ cell, cell::RefCell, + error::Error, ffi::{CString, NulError}, fmt, io, ptr, sync::Arc, }; +use libc::c_char; use log; -use std::error::Error; -use thiserror::Error as ThisError; - #[cfg(feature = "casting_errors_wallet")] use sqlx; - +use thiserror::Error as ThisError; +use ursa::errors::{UrsaCryptoError, UrsaCryptoErrorKind}; #[cfg(feature = "casting_errors_misc")] use ursa::errors::{UrsaCryptoError, UrsaCryptoErrorKind}; -use libc::c_char; - use crate::ErrorCode; pub mod prelude { @@ -248,7 +246,6 @@ impl From for IndyError { } } -#[cfg(feature = "casting_errors_misc")] impl From for IndyError { fn from(err: UrsaCryptoError) -> Self { match err.kind() { @@ -278,7 +275,6 @@ impl From for IndyError { } } -#[cfg(feature = "casting_errors_misc")] impl From for IndyError { fn from(_err: bs58::decode::Error) -> Self { IndyError::from_msg( @@ -288,7 +284,6 @@ impl From for IndyError { } } -#[cfg(feature = "casting_errors_misc")] impl From for IndyError { fn from(err: openssl::error::ErrorStack) -> IndyError { // TODO: FIXME: Analyze ErrorStack and split invalid structure errors from other errors @@ -296,7 +291,6 @@ impl From for IndyError { } } -#[cfg(feature = "casting_errors_wallet")] impl From for IndyError { fn from(err: sqlx::Error) -> IndyError { match &err { @@ -616,8 +610,8 @@ pub fn set_current_error(err: &IndyError) { /// 1) synchronous - in the same application thread /// 2) asynchronous - inside of function callback /// -/// NOTE: Error is stored until the next one occurs in the same execution thread or until asynchronous callback finished. -/// Returning pointer has the same lifetime. +/// NOTE: Error is stored until the next one occurs in the same execution thread or until +/// asynchronous callback finished. Returning pointer has the same lifetime. /// /// #Params /// * `error_json_p` - Reference that will contain error details (if any error has occurred before) @@ -629,7 +623,6 @@ pub fn set_current_error(err: &IndyError) { /// 2) calling `indy_set_runtime_config` API function with `collect_backtrace: true` /// "message": str - human-readable error description /// } -/// pub fn get_current_error_c_json() -> *const c_char { let mut value = ptr::null(); diff --git a/libvdrtools/indy-api-types/src/lib.rs b/libvdrtools/indy-api-types/src/lib.rs index 05378c106a..8e9f98c1b0 100644 --- a/libvdrtools/indy-api-types/src/lib.rs +++ b/libvdrtools/indy-api-types/src/lib.rs @@ -16,6 +16,12 @@ pub type IndyHandle = i32; pub struct WalletHandle(pub i32); pub const INVALID_WALLET_HANDLE: WalletHandle = WalletHandle(0); +impl From for WalletHandle { + fn from(value: i32) -> Self { + Self(value) + } +} + pub type CallbackHandle = i32; pub type CommandHandle = i32; @@ -41,8 +47,6 @@ pub mod domain; pub mod errors; pub use errors::IndyError; -pub mod validation; - #[derive(Debug, PartialEq, Eq, Copy, Clone)] #[repr(i32)] pub enum ErrorCode { @@ -89,7 +93,8 @@ pub enum ErrorCode { // Invalid library state was detected in runtime. It signals library bug CommonInvalidState = 112, - // Object (json, config, key, credential and etc...) passed by library caller has invalid structure + // Object (json, config, key, credential and etc...) passed by library caller has invalid + // structure CommonInvalidStructure = 113, // IO Error @@ -211,8 +216,8 @@ pub enum ErrorCode { // Timeout for action PoolLedgerTimeout = 307, - // Attempt to open Pool for witch Genesis Transactions are not compatible with set Protocol version. - // Call pool.indy_set_protocol_version to set correct Protocol version. + // Attempt to open Pool for witch Genesis Transactions are not compatible with set Protocol + // version. Call pool.indy_set_protocol_version to set correct Protocol version. PoolIncompatibleProtocolVersion = 308, // Item not found on ledger. diff --git a/libvdrtools/indy-api-types/src/validation.rs b/libvdrtools/indy-api-types/src/validation.rs deleted file mode 100644 index 9fe85212b7..0000000000 --- a/libvdrtools/indy-api-types/src/validation.rs +++ /dev/null @@ -1,5 +0,0 @@ -pub trait Validatable { - fn validate(&self) -> Result<(), String> { - Ok(()) - } -} diff --git a/libvdrtools/indy-utils/Cargo.toml b/libvdrtools/indy-utils/Cargo.toml index e5c6f0da74..9b7bc91bbf 100644 --- a/libvdrtools/indy-utils/Cargo.toml +++ b/libvdrtools/indy-utils/Cargo.toml @@ -19,20 +19,20 @@ hash_openssl = [] randombytes_sodium = [] [dependencies] -base64 = {version = "0.10.1"} -dirs = "2.0.2" -failure = "0.1.6" +base64 = {version = "0.21"} +dirs = "5" +failure = "0.1" indy-api-types = { path = "../indy-api-types"} -lazy_static = "1.3" -libc = "0.2.114" -log = "0.4.8" +lazy_static = "1" +libc = "0.2" +log = "0.4" openssl = { version = "0.10" } -serde = "1.0.99" -serde_json = "1.0.40" -serde_derive = "1.0.99" -sodiumoxide = {version = "0.0.16"} -zeroize = "~1.3.0" +serde = "1" +serde_json = "1" +serde_derive = "1" +sodiumoxide = {version = "0.0.16"} # touching this makes things go boom +zeroize = "1" [dev-dependencies] -rmp-serde = "0.13.7" -rand = "0.7.0" +rmp-serde = "1" +rand = "0.8" diff --git a/libvdrtools/indy-utils/src/crypto/base64/rust_base64.rs b/libvdrtools/indy-utils/src/crypto/base64/rust_base64.rs index cac198ae91..1c48f23779 100644 --- a/libvdrtools/indy-utils/src/crypto/base64/rust_base64.rs +++ b/libvdrtools/indy-utils/src/crypto/base64/rust_base64.rs @@ -1,20 +1,22 @@ +use base64::{engine::general_purpose, Engine}; use indy_api_types::errors::prelude::*; pub fn encode(doc: &[u8]) -> String { - base64::encode(doc) + general_purpose::STANDARD.encode(doc) } pub fn decode(doc: &str) -> Result, IndyError> { - base64::decode(doc) + general_purpose::STANDARD + .decode(doc) .map_err(|e| e.to_indy(IndyErrorKind::InvalidStructure, "Invalid base64 sequence")) } pub fn encode_urlsafe(doc: &[u8]) -> String { - base64::encode_config(doc, base64::URL_SAFE) //TODO switch to URL_SAFE_NO_PAD + general_purpose::URL_SAFE.encode(doc) } pub fn decode_urlsafe(doc: &str) -> Result, IndyError> { - base64::decode_config(doc, base64::URL_SAFE_NO_PAD).map_err(|e| { + general_purpose::URL_SAFE.decode(doc).map_err(|e| { e.to_indy( IndyErrorKind::InvalidStructure, "Invalid base64URL_SAFE sequence", @@ -53,19 +55,4 @@ mod tests { assert!(result.is_ok(), "Got error"); assert_eq!(&[1, 2, 3], &result.unwrap()[..]); } - - #[test] // aries-396 - fn encode_base64_urlsafe_and_urlsafe_no_pad_compatible() { - let data = "Hello World"; - { - let encoded = base64::encode_config(data, base64::URL_SAFE); - let decoded_data = base64::decode_config(&encoded, base64::URL_SAFE_NO_PAD).unwrap(); - assert_eq!(data.as_bytes().to_vec(), decoded_data); - } - { - let encoded = base64::encode_config(data, base64::URL_SAFE_NO_PAD); - let decoded_data = base64::decode_config(&encoded, base64::URL_SAFE).unwrap(); - assert_eq!(data.as_bytes().to_vec(), decoded_data); - } - } } diff --git a/libvdrtools/indy-utils/src/crypto/chacha20poly1305_ietf/sodium.rs b/libvdrtools/indy-utils/src/crypto/chacha20poly1305_ietf/sodium.rs index 20c47b8158..5b913ec5da 100644 --- a/libvdrtools/indy-utils/src/crypto/chacha20poly1305_ietf/sodium.rs +++ b/libvdrtools/indy-utils/src/crypto/chacha20poly1305_ietf/sodium.rs @@ -1,13 +1,15 @@ extern crate sodiumoxide; -use self::sodiumoxide::{crypto::aead::chacha20poly1305_ietf, utils}; -use super::pwhash_argon2i13; -use indy_api_types::{domain::wallet::KeyDerivationMethod, errors::prelude::*}; use std::{ cmp, io, io::{Read, Write}, }; +use indy_api_types::{domain::wallet::KeyDerivationMethod, errors::prelude::*}; + +use self::sodiumoxide::{crypto::aead::chacha20poly1305_ietf, utils}; +use super::pwhash_argon2i13; + pub const KEYBYTES: usize = chacha20poly1305_ietf::KEYBYTES; pub const NONCEBYTES: usize = chacha20poly1305_ietf::NONCEBYTES; pub const TAGBYTES: usize = chacha20poly1305_ietf::TAGBYTES; @@ -303,8 +305,8 @@ mod tests { pub fn gen_nonce_and_encrypt_detached_decrypt_detached_works() { let data = randombytes(100); let key = gen_key(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and alg - // Which the receiver MUST then check before decryption + // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and + // alg Which the receiver MUST then check before decryption let aad = b"some protocol data input to the encryption"; let (c, nonce, tag) = gen_nonce_and_encrypt_detached(&data, aad, &key); diff --git a/libvdrtools/indy-utils/src/crypto/ed25519_box/sodium.rs b/libvdrtools/indy-utils/src/crypto/ed25519_box/sodium.rs index 4d6c1a0392..d51f75d92f 100644 --- a/libvdrtools/indy-utils/src/crypto/ed25519_box/sodium.rs +++ b/libvdrtools/indy-utils/src/crypto/ed25519_box/sodium.rs @@ -1,8 +1,9 @@ extern crate sodiumoxide; -use self::sodiumoxide::crypto::box_; use indy_api_types::errors::prelude::*; +use self::sodiumoxide::crypto::box_; + pub const NONCEBYTES: usize = box_::curve25519xsalsa20poly1305::NONCEBYTES; pub const PUBLICKEYBYTES: usize = box_::curve25519xsalsa20poly1305::PUBLICKEYBYTES; pub const SECRETKEYBYTES: usize = box_::curve25519xsalsa20poly1305::SECRETKEYBYTES; diff --git a/libvdrtools/indy-utils/src/crypto/ed25519_sign/sodium.rs b/libvdrtools/indy-utils/src/crypto/ed25519_sign/sodium.rs index 5f7ad758b1..c89f8abeab 100644 --- a/libvdrtools/indy-utils/src/crypto/ed25519_sign/sodium.rs +++ b/libvdrtools/indy-utils/src/crypto/ed25519_sign/sodium.rs @@ -1,5 +1,4 @@ use indy_api_types::errors::prelude::*; - use libc::c_int; use sodiumoxide::crypto::{box_, sign}; diff --git a/libvdrtools/indy-utils/src/crypto/hash/openssl.rs b/libvdrtools/indy-utils/src/crypto/hash/openssl.rs index 97c86bfc0a..237ac7e01f 100644 --- a/libvdrtools/indy-utils/src/crypto/hash/openssl.rs +++ b/libvdrtools/indy-utils/src/crypto/hash/openssl.rs @@ -1,11 +1,13 @@ extern crate openssl; -use self::openssl::hash::{Hasher, MessageDigest}; use indy_api_types::errors::prelude::*; +use self::openssl::hash::{Hasher, MessageDigest}; + pub const HASHBYTES: usize = 32; -// these bytes are the same as openssl_hash(MessageDigest::sha256(), &[]) so we do not have to actually call the hash function +// these bytes are the same as openssl_hash(MessageDigest::sha256(), &[]) so we do not have to +// actually call the hash function pub const EMPTY_HASH_BYTES: [u8; HASHBYTES] = [ 227, 176, 196, 66, 152, 252, 28, 20, 154, 251, 244, 200, 153, 111, 185, 36, 39, 174, 65, 228, 100, 155, 147, 76, 164, 149, 153, 27, 120, 82, 184, 85, diff --git a/libvdrtools/indy-utils/src/crypto/pwhash_argon2i13/sodium.rs b/libvdrtools/indy-utils/src/crypto/pwhash_argon2i13/sodium.rs index 2989810302..b629c34c1f 100644 --- a/libvdrtools/indy-utils/src/crypto/pwhash_argon2i13/sodium.rs +++ b/libvdrtools/indy-utils/src/crypto/pwhash_argon2i13/sodium.rs @@ -1,10 +1,11 @@ extern crate serde; extern crate sodiumoxide; -use self::sodiumoxide::crypto::pwhash; use indy_api_types::{domain::wallet::KeyDerivationMethod, errors::prelude::*}; use libc::{c_int, c_ulonglong, size_t}; +use self::sodiumoxide::crypto::pwhash; + pub const SALTBYTES: usize = pwhash::SALTBYTES; sodium_type!(Salt, pwhash::Salt, SALTBYTES); @@ -84,9 +85,10 @@ extern "C" { #[cfg(test)] mod tests { - use super::*; use rmp_serde; + use super::*; + #[test] fn get_salt_works() { let salt = gen_salt(); diff --git a/libvdrtools/indy-utils/src/crypto/sealedbox/sodium.rs b/libvdrtools/indy-utils/src/crypto/sealedbox/sodium.rs index 0acc7abc4f..0f00f525fb 100644 --- a/libvdrtools/indy-utils/src/crypto/sealedbox/sodium.rs +++ b/libvdrtools/indy-utils/src/crypto/sealedbox/sodium.rs @@ -1,8 +1,9 @@ extern crate sodiumoxide; +use indy_api_types::errors::prelude::*; + use self::sodiumoxide::crypto::sealedbox; use super::ed25519_box; -use indy_api_types::errors::prelude::*; pub fn encrypt(pk: &ed25519_box::PublicKey, doc: &[u8]) -> Result, IndyError> { Ok(sealedbox::seal(doc, &pk.0)) diff --git a/libvdrtools/indy-utils/src/crypto/xsalsa20/sodium.rs b/libvdrtools/indy-utils/src/crypto/xsalsa20/sodium.rs index 913aba2456..64cb09cb6a 100644 --- a/libvdrtools/indy-utils/src/crypto/xsalsa20/sodium.rs +++ b/libvdrtools/indy-utils/src/crypto/xsalsa20/sodium.rs @@ -1,8 +1,9 @@ extern crate sodiumoxide; -use self::sodiumoxide::crypto::{secretbox, secretbox::xsalsa20poly1305}; use indy_api_types::errors::prelude::*; +use self::sodiumoxide::crypto::{secretbox, secretbox::xsalsa20poly1305}; + pub const KEYBYTES: usize = xsalsa20poly1305::KEYBYTES; pub const NONCEBYTES: usize = xsalsa20poly1305::NONCEBYTES; pub const MACBYTES: usize = xsalsa20poly1305::MACBYTES; diff --git a/libvdrtools/indy-utils/src/lib.rs b/libvdrtools/indy-utils/src/lib.rs index 83bc87b618..2888fda516 100644 --- a/libvdrtools/indy-utils/src/lib.rs +++ b/libvdrtools/indy-utils/src/lib.rs @@ -22,9 +22,6 @@ macro_rules! secret { pub mod crypto; pub mod environment; pub mod sequence; -#[macro_use] -#[allow(unused_macros)] -pub mod test; pub mod wql; use indy_api_types::{CommandHandle, SearchHandle, VdrHandle, WalletHandle}; diff --git a/libvdrtools/indy-utils/src/test.rs b/libvdrtools/indy-utils/src/test.rs deleted file mode 100644 index dbaf50da56..0000000000 --- a/libvdrtools/indy-utils/src/test.rs +++ /dev/null @@ -1,117 +0,0 @@ -use super::environment; - -use std::{fs, fs::File, path::Path}; - -pub fn cleanup_files(dir: &Path, name: &str) { - let mut path = dir.to_path_buf(); - path.push(name); - if path.exists() { - if path.is_dir() { - fs::remove_dir_all(path).unwrap(); - } else { - fs::remove_file(path).unwrap(); - } - } -} - -pub fn cleanup_indy_home(name: &str) { - cleanup_files(&environment::indy_home_path(), name); -} - -pub fn cleanup_temp(name: &str) { - cleanup_files(&environment::tmp_path(), name); -} - -pub fn cleanup_wallet(name: &str) { - cleanup_files(&environment::wallet_home_path(), name); -} - -pub fn cleanup_pool(name: &str) { - cleanup_files(&environment::pool_home_path(), name); -} - -pub fn cleanup_storage(name: &str) { - cleanup_wallet(name); - cleanup_pool(name); - cleanup_indy_home(name); - cleanup_temp(name); -} - -pub fn test_pool_create_poolfile(pool_name: &str) -> File { - let mut pool_path = environment::pool_path(pool_name); - fs::create_dir_all(pool_path.as_path()).unwrap(); - pool_path.push(pool_name); - pool_path.set_extension("txn"); - fs::File::create(pool_path.as_path()).unwrap() -} - -pub fn check_pool_exists(name: &str) -> bool { - let mut path = environment::pool_home_path(); - path.push(name); - path.exists() -} - -pub fn gen_txns() -> Vec { - let test_pool_ip = environment::test_pool_ip(); - - vec![ - format!( - r#"{{"reqSignature":{{}},"txn":{{"data":{{"data":{{"alias":"Node1","blskey":"4N8aUNHSgjQVgkpm8nhNEfDf6txHznoYREg9kirmJrkivgL4oSEimFF6nsQ6M41QvhM2Z33nves5vfSn9n1UwNFJBYtWVnHYMATn76vLuL3zU88KyeAYcHfsih3He6UHcXDxcaecHVz6jhCYz1P2UZn2bDVruL5wXpehgBfBaLKm3Ba","blskey_pop":"RahHYiCvoNCtPTrVtP7nMC5eTYrsUA8WjXbdhNc8debh1agE9bGiJxWBXYNFbnJXoXhWFMvyqhqhRoq737YQemH5ik9oL7R4NTTCz2LEZhkgLJzB3QRQqJyBNyv7acbdHrAT8nQ9UkLbaVL9NBpnWXBTw4LEMePaSHEw66RzPNdAX1","client_ip":"{}","client_port":9702,"node_ip":"{}","node_port":9701,"services":["VALIDATOR"]}},"dest":"Gw6pDLhcBcoQesN72qfotTgFa7cbuqZpkX3Xo6pLhPhv"}},"metadata":{{"from":"Th7MpTaRZVRYnPiabds81Y"}},"type":"0"}},"txnMetadata":{{"seqNo":1,"txnId":"fea82e10e894419fe2bea7d96296a6d46f50f93f9eeda954ec461b2ed2950b62"}},"ver":"1"}}"#, - test_pool_ip, test_pool_ip - ), - format!( - r#"{{"reqSignature":{{}},"txn":{{"data":{{"data":{{"alias":"Node2","blskey":"37rAPpXVoxzKhz7d9gkUe52XuXryuLXoM6P6LbWDB7LSbG62Lsb33sfG7zqS8TK1MXwuCHj1FKNzVpsnafmqLG1vXN88rt38mNFs9TENzm4QHdBzsvCuoBnPH7rpYYDo9DZNJePaDvRvqJKByCabubJz3XXKbEeshzpz4Ma5QYpJqjk","blskey_pop":"Qr658mWZ2YC8JXGXwMDQTzuZCWF7NK9EwxphGmcBvCh6ybUuLxbG65nsX4JvD4SPNtkJ2w9ug1yLTj6fgmuDg41TgECXjLCij3RMsV8CwewBVgVN67wsA45DFWvqvLtu4rjNnE9JbdFTc1Z4WCPA3Xan44K1HoHAq9EVeaRYs8zoF5","client_ip":"{}","client_port":9704,"node_ip":"{}","node_port":9703,"services":["VALIDATOR"]}},"dest":"8ECVSk179mjsjKRLWiQtssMLgp6EPhWXtaYyStWPSGAb"}},"metadata":{{"from":"EbP4aYNeTHL6q385GuVpRV"}},"type":"0"}},"txnMetadata":{{"seqNo":2,"txnId":"1ac8aece2a18ced660fef8694b61aac3af08ba875ce3026a160acbc3a3af35fc"}},"ver":"1"}}"#, - test_pool_ip, test_pool_ip - ), - format!( - r#"{{"reqSignature":{{}},"txn":{{"data":{{"data":{{"alias":"Node3","blskey":"3WFpdbg7C5cnLYZwFZevJqhubkFALBfCBBok15GdrKMUhUjGsk3jV6QKj6MZgEubF7oqCafxNdkm7eswgA4sdKTRc82tLGzZBd6vNqU8dupzup6uYUf32KTHTPQbuUM8Yk4QFXjEf2Usu2TJcNkdgpyeUSX42u5LqdDDpNSWUK5deC5","blskey_pop":"QwDeb2CkNSx6r8QC8vGQK3GRv7Yndn84TGNijX8YXHPiagXajyfTjoR87rXUu4G4QLk2cF8NNyqWiYMus1623dELWwx57rLCFqGh7N4ZRbGDRP4fnVcaKg1BcUxQ866Ven4gw8y4N56S5HzxXNBZtLYmhGHvDtk6PFkFwCvxYrNYjh","client_ip":"{}","client_port":9706,"node_ip":"{}","node_port":9705,"services":["VALIDATOR"]}},"dest":"DKVxG2fXXTU8yT5N7hGEbXB3dfdAnYv1JczDUHpmDxya"}},"metadata":{{"from":"4cU41vWW82ArfxJxHkzXPG"}},"type":"0"}},"txnMetadata":{{"seqNo":3,"txnId":"7e9f355dffa78ed24668f0e0e369fd8c224076571c51e2ea8be5f26479edebe4"}},"ver":"1"}}"#, - test_pool_ip, test_pool_ip - ), - format!( - r#"{{"reqSignature":{{}},"txn":{{"data":{{"data":{{"alias":"Node4","blskey":"2zN3bHM1m4rLz54MJHYSwvqzPchYp8jkHswveCLAEJVcX6Mm1wHQD1SkPYMzUDTZvWvhuE6VNAkK3KxVeEmsanSmvjVkReDeBEMxeDaayjcZjFGPydyey1qxBHmTvAnBKoPydvuTAqx5f7YNNRAdeLmUi99gERUU7TD8KfAa6MpQ9bw","blskey_pop":"RPLagxaR5xdimFzwmzYnz4ZhWtYQEj8iR5ZU53T2gitPCyCHQneUn2Huc4oeLd2B2HzkGnjAff4hWTJT6C7qHYB1Mv2wU5iHHGFWkhnTX9WsEAbunJCV2qcaXScKj4tTfvdDKfLiVuU2av6hbsMztirRze7LvYBkRHV3tGwyCptsrP","client_ip":"{}","client_port":9708,"node_ip":"{}","node_port":9707,"services":["VALIDATOR"]}},"dest":"4PS3EDQ3dW1tci1Bp6543CfuuebjFrg36kLAUcskGfaA"}},"metadata":{{"from":"TWwCRQRZ2ZHMJFn9TzLp7W"}},"type":"0"}},"txnMetadata":{{"seqNo":4,"txnId":"aa5e817d7cc626170eca175822029339a444eb0ee8f0bd20d3b0b76e566fb008"}},"ver":"1"}}"#, - test_pool_ip, test_pool_ip - ), - ] -} - -#[macro_export] -macro_rules! assert_match { - ($pattern:pat, $var:expr) => { - assert!(match $var { - $pattern => true, - _ => false, - }) - }; - ($pattern:pat, $var:expr, $val_in_pattern:ident, $exp_value:expr) => { - assert!(match $var { - $pattern => $val_in_pattern == $exp_value, - _ => false, - }) - }; - ($pattern:pat, $var:expr, $val_in_pattern1:ident, $exp_value1:expr, $val_in_pattern2:ident, $exp_value2:expr) => { - assert!(match $var { - $pattern => $val_in_pattern1 == $exp_value1 && $val_in_pattern2 == $exp_value2, - _ => false, - }) - }; -} - -#[macro_export] -macro_rules! assert_kind { - ($kind:expr, $var:expr) => { - match $var { - Err(e) => assert_eq!($kind, e.kind()), - _ => assert!(false, "Result expected to be error"), - } - }; -} - -#[macro_export] -macro_rules! assert_code { - ($code:expr, $var:expr) => { - match $var { - Err(e) => assert_eq!($code, e.error_code), - _ => assert!(false, "Result expected to be error"), - } - }; -} diff --git a/libvdrtools/indy-utils/src/wql.rs b/libvdrtools/indy-utils/src/wql.rs index 6af8897b5f..d9cabdd8d6 100644 --- a/libvdrtools/indy-utils/src/wql.rs +++ b/libvdrtools/indy-utils/src/wql.rs @@ -267,9 +267,10 @@ fn parse_single_operator( #[cfg(test)] mod tests { - use super::*; use rand::{distributions::Alphanumeric, thread_rng, Rng}; + use super::*; + fn _random_string(len: usize) -> String { thread_rng() .sample_iter(&Alphanumeric) diff --git a/libvdrtools/indy-wallet/Cargo.toml b/libvdrtools/indy-wallet/Cargo.toml index e388816185..7d3c951630 100644 --- a/libvdrtools/indy-wallet/Cargo.toml +++ b/libvdrtools/indy-wallet/Cargo.toml @@ -12,21 +12,21 @@ mysql = [] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -async-trait = "0.1.42" -byteorder = "1.3.2" +async-trait = "0.1" +byteorder = "1" futures = { version = "0.3", default-features = false, features = [ "alloc" ] } -indy-api-types = { path = "../indy-api-types", features = [ "casting_errors_wallet", "casting_errors_misc" ] } +indy-api-types = { path = "../indy-api-types" } indy-utils = { path = "../indy-utils"} -libc = "0.2.114" -log = "0.4.8" -rmp-serde = "0.13.7" -bs58 = "0.4.0" -serde = "1.0.99" -serde_json = "1.0.40" -serde_derive = "1.0.99" -sqlx = { version = "0.5.8", git = "https://github.com/jovfer/sqlx", branch = "feature/json_no_preserve_order_v5", features = [ "sqlite", "mysql", "json_no_preserve_order", "runtime-tokio-rustls" ] } -zeroize = "~1.3.0" -lru = "0.7.6" +libc = "0.2" +log = "0.4" +rmp-serde = "1" +bs58 = "0.5" +serde = "1" +serde_json = "1" +serde_derive = "1" +sqlx = { version = "0.7", features = [ "sqlite", "mysql", "runtime-tokio-rustls" ] } +zeroize = "1" +lru = "0.12" [dev-dependencies] -async-std = "1.12.0" +async-std = "1" diff --git a/libvdrtools/indy-wallet/src/cache/cache.rs b/libvdrtools/indy-wallet/src/cache/cache.rs deleted file mode 100644 index 83732c4390..0000000000 --- a/libvdrtools/indy-wallet/src/cache/cache.rs +++ /dev/null @@ -1,11 +0,0 @@ -use crate::cache::wallet_cache::{WalletCacheKey, WalletCacheValue}; - -pub trait Cache { - fn put(&mut self, key: WalletCacheKey, value: WalletCacheValue) -> Option; - fn get(&mut self, key: &WalletCacheKey) -> Option<&WalletCacheValue>; - fn get_mut(&mut self, key: &WalletCacheKey) -> Option<&mut WalletCacheValue>; - fn pop(&mut self, key: &WalletCacheKey) -> Option; - fn peek(&self, key: &WalletCacheKey) -> Option<&WalletCacheValue>; - fn len(&self) -> usize; - fn cap(&self) -> usize; -} diff --git a/libvdrtools/indy-wallet/src/cache/lru.rs b/libvdrtools/indy-wallet/src/cache/lru.rs index 8ce1b709b7..fe50b13e4c 100644 --- a/libvdrtools/indy-wallet/src/cache/lru.rs +++ b/libvdrtools/indy-wallet/src/cache/lru.rs @@ -1,15 +1,18 @@ +use std::num::NonZeroUsize; + +use lru::LruCache as InnerCache; + use crate::cache::{ - cache::Cache, wallet_cache::{WalletCacheKey, WalletCacheValue}, + Cache, }; -use lru::LruCache as InnerCache; pub struct LruCache { inner: InnerCache, } impl LruCache { - pub fn new(size: usize) -> LruCache { + pub fn new(size: NonZeroUsize) -> LruCache { LruCache { inner: InnerCache::new(size), } @@ -42,6 +45,6 @@ impl Cache for LruCache { } fn cap(&self) -> usize { - self.inner.cap() + self.inner.cap().into() } } diff --git a/libvdrtools/indy-wallet/src/cache/mod.rs b/libvdrtools/indy-wallet/src/cache/mod.rs index 922e4fff0f..747dc487fc 100644 --- a/libvdrtools/indy-wallet/src/cache/mod.rs +++ b/libvdrtools/indy-wallet/src/cache/mod.rs @@ -1,3 +1,14 @@ -mod cache; mod lru; pub mod wallet_cache; + +use crate::cache::wallet_cache::{WalletCacheKey, WalletCacheValue}; + +pub trait Cache { + fn put(&mut self, key: WalletCacheKey, value: WalletCacheValue) -> Option; + fn get(&mut self, key: &WalletCacheKey) -> Option<&WalletCacheValue>; + fn get_mut(&mut self, key: &WalletCacheKey) -> Option<&mut WalletCacheValue>; + fn pop(&mut self, key: &WalletCacheKey) -> Option; + fn peek(&self, key: &WalletCacheKey) -> Option<&WalletCacheValue>; + fn len(&self) -> usize; + fn cap(&self) -> usize; +} diff --git a/libvdrtools/indy-wallet/src/cache/wallet_cache.rs b/libvdrtools/indy-wallet/src/cache/wallet_cache.rs index 8d9bc01294..2bf91dad4e 100644 --- a/libvdrtools/indy-wallet/src/cache/wallet_cache.rs +++ b/libvdrtools/indy-wallet/src/cache/wallet_cache.rs @@ -1,5 +1,17 @@ +use std::{ + collections::{HashMap, HashSet}, + iter::FromIterator, + num::NonZeroUsize, + sync::{ + atomic::{AtomicUsize, Ordering}, + Mutex, RwLock, + }, +}; + +use indy_api_types::domain::wallet::{CacheConfig, CachingAlgorithm}; + use crate::{ - cache::{cache::Cache, lru::LruCache}, + cache::{lru::LruCache, Cache}, storage::{ StorageRecord, Tag, Tag::{Encrypted, PlainText}, @@ -9,15 +21,6 @@ use crate::{ wallet::EncryptedValue, RecordOptions, }; -use indy_api_types::domain::wallet::{CacheConfig, CachingAlgorithm}; -use std::{ - collections::{HashMap, HashSet}, - iter::FromIterator, - sync::{ - atomic::{AtomicUsize, Ordering}, - Mutex, RwLock, - }, -}; #[derive(PartialEq, Eq, Hash)] pub struct WalletCacheKey { @@ -40,7 +43,9 @@ impl WalletCache { match config { Some(cache_config) if cache_config.size > 0 && !cache_config.entities.is_empty() => { let cache = match cache_config.algorithm { - CachingAlgorithm::LRU => LruCache::new(cache_config.size), + CachingAlgorithm::LRU => { + LruCache::new(NonZeroUsize::new(cache_config.size).unwrap()) + } }; WalletCache { cache: Some(Mutex::new(Box::new(cache))), diff --git a/libvdrtools/indy-wallet/src/encryption.rs b/libvdrtools/indy-wallet/src/encryption.rs index 28c94a14fc..d3df984a1a 100644 --- a/libvdrtools/indy-wallet/src/encryption.rs +++ b/libvdrtools/indy-wallet/src/encryption.rs @@ -49,11 +49,11 @@ impl KeyDerivationData { (KeyDerivationMethod::RAW, &Metadata::MetadataRaw(_)) => { KeyDerivationData::Raw(passphrase) } - (KeyDerivationMethod::ARGON2I_INT, &Metadata::MetadataArgon(ref metadata)) => { + (KeyDerivationMethod::ARGON2I_INT, Metadata::MetadataArgon(metadata)) => { let master_key_salt = master_key_salt_from_slice(&metadata.master_key_salt)?; KeyDerivationData::Argon2iInt(passphrase, master_key_salt) } - (KeyDerivationMethod::ARGON2I_MOD, &Metadata::MetadataArgon(ref metadata)) => { + (KeyDerivationMethod::ARGON2I_MOD, Metadata::MetadataArgon(metadata)) => { let master_key_salt = master_key_salt_from_slice(&metadata.master_key_salt)?; KeyDerivationData::Argon2iMod(passphrase, master_key_salt) } @@ -426,7 +426,8 @@ pub(super) fn decrypt_storage_record( // #[test] // fn test_encrypt_decrypt_tags() { -// let tags = serde_json::from_str(r#"{"tag1":"value1", "tag2":"value2", "~tag3":"value3"}"#).unwrap(); +// let tags = serde_json::from_str(r#"{"tag1":"value1", "tag2":"value2", +// "~tag3":"value3"}"#).unwrap(); // let tag_name_key = chacha20poly1305_ietf::gen_key(); // let tag_value_key = chacha20poly1305_ietf::gen_key(); @@ -453,12 +454,13 @@ pub(super) fn decrypt_storage_record( // let value = "test_value"; // let encrypted_value = EncryptedValue::encrypt(value, &keys.value_key); // let type_ = "test_type"; -// let encrypted_name = encrypt_as_searchable(name.as_bytes(), &keys.name_key, &keys.item_hmac_key); -// let encrypted_type = encrypt_as_searchable(type_.as_bytes(), &keys.type_key, &keys.item_hmac_key); -// let mut tags = HashMap::new(); +// let encrypted_name = encrypt_as_searchable(name.as_bytes(), &keys.name_key, +// &keys.item_hmac_key); let encrypted_type = encrypt_as_searchable(type_.as_bytes(), +// &keys.type_key, &keys.item_hmac_key); let mut tags = HashMap::new(); // tags.insert("tag_name_1".to_string(), "tag_value_1".to_string()); // tags.insert("~tag_name_2".to_string(), "tag_value_2".to_string()); -// let encrypted_tags = encrypt_tags(&tags, &keys.tag_name_key, &keys.tag_value_key, &keys.tags_hmac_key); +// let encrypted_tags = encrypt_tags(&tags, &keys.tag_name_key, &keys.tag_value_key, +// &keys.tags_hmac_key); // let storage_record = StorageRecord { // id: encrypted_name, @@ -482,12 +484,13 @@ pub(super) fn decrypt_storage_record( // let value = "test_value"; // let encrypted_value = EncryptedValue::encrypt(value, &keys.value_key); // let type_ = "test_type"; -// let encrypted_name = encrypt_as_searchable(name.as_bytes(), &keys.name_key, &keys.item_hmac_key); -// let encrypted_type = encrypt_as_searchable(type_.as_bytes(), &keys.type_key, &keys.item_hmac_key); -// let mut tags = HashMap::new(); +// let encrypted_name = encrypt_as_searchable(name.as_bytes(), &keys.name_key, +// &keys.item_hmac_key); let encrypted_type = encrypt_as_searchable(type_.as_bytes(), +// &keys.type_key, &keys.item_hmac_key); let mut tags = HashMap::new(); // tags.insert("tag_name_1".to_string(), "tag_value_1".to_string()); // tags.insert("~tag_name_2".to_string(), "tag_value_2".to_string()); -// let encrypted_tags = encrypt_tags(&tags, &keys.tag_name_key, &keys.tag_value_key, &keys.tags_hmac_key); +// let encrypted_tags = encrypt_tags(&tags, &keys.tag_name_key, &keys.tag_value_key, +// &keys.tags_hmac_key); // let storage_record = StorageRecord { // id: encrypted_name, diff --git a/libvdrtools/indy-wallet/src/export_import.rs b/libvdrtools/indy-wallet/src/export_import.rs index 0aa1ab1e90..3dd2eb2e46 100644 --- a/libvdrtools/indy-wallet/src/export_import.rs +++ b/libvdrtools/indy-wallet/src/export_import.rs @@ -1,26 +1,23 @@ use std::{ io, io::{BufReader, BufWriter, Read, Write}, + sync::Arc, time::{SystemTime, UNIX_EPOCH}, }; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; - use indy_api_types::{ domain::wallet::{KeyDerivationMethod, Record}, errors::prelude::*, }; - use indy_utils::crypto::{ chacha20poly1305_ietf, hash::{hash, HASHBYTES}, pwhash_argon2i13, }; - use serde::{Deserialize, Serialize}; use crate::{encryption::KeyDerivationData, Wallet, WalletRecord}; -use std::sync::Arc; const CHUNK_SIZE: usize = 1024; @@ -30,7 +27,8 @@ pub enum EncryptionMethod { ChaCha20Poly1305IETF { // pwhash_argon2i13::Salt as bytes. Random salt used for deriving of key from passphrase salt: Vec, - // chacha20poly1305_ietf::Nonce as bytes. Random start nonce. We increment nonce for each chunk to be sure in export file consistency + // chacha20poly1305_ietf::Nonce as bytes. Random start nonce. We increment nonce for each + // chunk to be sure in export file consistency nonce: Vec, // size of encrypted chunk chunk_size: usize, @@ -39,14 +37,16 @@ pub enum EncryptionMethod { ChaCha20Poly1305IETFInteractive { // pwhash_argon2i13::Salt as bytes. Random salt used for deriving of key from passphrase salt: Vec, - // chacha20poly1305_ietf::Nonce as bytes. Random start nonce. We increment nonce for each chunk to be sure in export file consistency + // chacha20poly1305_ietf::Nonce as bytes. Random start nonce. We increment nonce for each + // chunk to be sure in export file consistency nonce: Vec, // size of encrypted chunk chunk_size: usize, }, // **ChaCha20-Poly1305-IETF raw key** cypher in blocks per chunk_size bytes ChaCha20Poly1305IETFRaw { - // chacha20poly1305_ietf::Nonce as bytes. Random start nonce. We increment nonce for each chunk to be sure in export file consistency + // chacha20poly1305_ietf::Nonce as bytes. Random start nonce. We increment nonce for each + // chunk to be sure in export file consistency nonce: Vec, // size of encrypted chunk chunk_size: usize, @@ -173,6 +173,7 @@ pub(super) async fn export_continue( Ok(()) } +#[allow(clippy::type_complexity)] pub(super) fn preparse_file_to_import( reader: T, passphrase: &str, @@ -305,7 +306,7 @@ where )?; wallet - .add(&record.type_, &record.id, &record.value, &record.tags) + .add(&record.type_, &record.id, &record.value, &record.tags, true) .await?; } diff --git a/libvdrtools/indy-wallet/src/lib.rs b/libvdrtools/indy-wallet/src/lib.rs index 01129372b2..8528783d88 100644 --- a/libvdrtools/indy-wallet/src/lib.rs +++ b/libvdrtools/indy-wallet/src/lib.rs @@ -1,16 +1,14 @@ -#![allow(clippy::all)] - use std::{ collections::{HashMap, HashSet}, - fs, + fmt, fs, io::BufReader, path::PathBuf, - sync::Arc, + sync::{Arc, Mutex}, unimplemented, }; use indy_api_types::{ - domain::wallet::{Config, Credentials, ExportConfig, Record, Tags}, + domain::wallet::{CacheConfig, Config, Credentials, ExportConfig, Record, Tags}, errors::prelude::*, WalletHandle, }; @@ -18,10 +16,9 @@ use indy_utils::{ crypto::chacha20poly1305_ietf::{self, Key as MasterKey}, secret, }; -use log::{debug, trace}; +use log::{error, info, trace, warn}; use serde::{Deserialize, Serialize}; use serde_json::Value as SValue; -use std::sync::Mutex; pub use crate::encryption::KeyDerivationData; use crate::{ @@ -32,7 +29,6 @@ use crate::{ }, wallet::{Keys, Wallet}, }; -use indy_api_types::domain::wallet::CacheConfig; mod encryption; mod iterator; @@ -46,6 +42,15 @@ mod cache; mod export_import; mod wallet; +#[derive(Debug)] +pub struct MigrationResult { + migrated: u32, + skipped: u32, + duplicated: u32, + failed: u32, +} + +#[allow(clippy::type_complexity)] pub struct WalletService { storage_types: Mutex>>, wallets: Mutex>>, @@ -76,6 +81,7 @@ pub struct WalletService { cache_hit_metrics: WalletCacheHitMetrics, } +#[allow(clippy::new_without_default)] impl WalletService { pub fn new() -> WalletService { let storage_types = { @@ -356,7 +362,7 @@ impl WalletService { ) -> IndyResult<()> { let wallet = self.get_wallet(wallet_handle).await?; wallet - .add(type_, name, value, tags) + .add(type_, name, value, tags, true) .await .map_err(|err| WalletService::_map_wallet_storage_error(err, type_, name)) } @@ -708,7 +714,7 @@ impl WalletService { old_wh: WalletHandle, new_wh: WalletHandle, mut migrate_fn: impl FnMut(Record) -> Result, E>, - ) -> IndyResult<()> + ) -> IndyResult where E: std::fmt::Display, { @@ -716,50 +722,113 @@ impl WalletService { let new_wallet = self.get_wallet(new_wh).await?; let mut records = old_wallet.get_all().await?; - let mut num_records = 0; + let total = records.get_total_count()?; + info!("Migrating {total:?} records"); + let mut num_record = 0; + let mut migration_result = MigrationResult { + migrated: 0, + skipped: 0, + duplicated: 0, + failed: 0, + }; + + while let Some(source_record) = records.next().await? { + num_record += 1; + if num_record % 1000 == 1 { + warn!( + "Migrating wallet record number {num_record} / {total:?}, intermediary \ + migration result: ${migration_result:?}" + ); + } + trace!("Migrating record: {:?}", source_record); + let unwrapped_type_ = match &source_record.type_ { + None => { + warn!( + "Skipping item missing 'type' field, record ({num_record}): \ + {source_record:?}" + ); + migration_result.skipped += 1; + continue; + } + Some(type_) => type_.clone(), + }; + let unwrapped_value = match &source_record.value { + None => { + warn!( + "Skipping item missing 'value' field, record ({num_record}): \ + {source_record:?}" + ); + migration_result.skipped += 1; + continue; + } + Some(value) => value.clone(), + }; + let unwrapped_tags = match &source_record.tags { + None => HashMap::new(), + Some(tags) => tags.clone(), + }; - while let Some(WalletRecord { - type_, - id, - value, - tags, - }) = records.next().await? - { - num_records += 1; let record = Record { - type_: type_.ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - "No type fetched for exported record", - ) - })?, - id, - value: value.ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - "No value fetched for exported record", - ) - })?, - tags: tags.ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - "No tags fetched for exported record", - ) - })?, + type_: unwrapped_type_, + id: source_record.id.clone(), + value: unwrapped_value, + tags: unwrapped_tags, + }; + + let migrated_record = match migrate_fn(record) { + Ok(record) => match record { + None => { + warn!("Skipping non-migratable record ({num_record}): {source_record:?}"); + migration_result.skipped += 1; + continue; + } + Some(record) => record, + }, + Err(err) => { + warn!( + "Skipping item due failed item migration, record ({num_record}): \ + {source_record:?}, err: {err}" + ); + migration_result.failed += 1; + continue; + } }; - if let Some(record) = migrate_fn(record) - .map_err(|e| IndyError::from_msg(IndyErrorKind::InvalidStructure, e.to_string()))? + match new_wallet + .add( + &migrated_record.type_, + &migrated_record.id, + &migrated_record.value, + &migrated_record.tags, + false, + ) + .await { - new_wallet - .add(&record.type_, &record.id, &record.value, &record.tags) - .await?; + Err(err) => match err.kind() { + IndyErrorKind::WalletItemAlreadyExists => { + trace!( + "Record type: {migrated_record:?} already exists in destination \ + wallet, skipping" + ); + migration_result.duplicated += 1; + continue; + } + _ => { + error!( + "Error adding record {migrated_record:?} to destination wallet: \ + {err:?}" + ); + migration_result.failed += 1; + return Err(err); + } + }, + Ok(()) => { + migration_result.migrated += 1; + } } } - - debug!("{num_records} records have been migrated!"); - - Ok(()) + warn!("Migration of total {total:?} records completed, result: ${migration_result:?}"); + Ok(migration_result) } pub async fn export_wallet( @@ -907,6 +976,7 @@ impl WalletService { self.cache_hit_metrics.get_data() } + #[allow(clippy::type_complexity)] fn _get_config_and_cred_for_storage( &self, config: &Config, @@ -1070,7 +1140,7 @@ pub struct MetadataRaw { pub keys: Vec, } -#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Clone, Serialize, Deserialize, PartialEq, Eq)] pub struct WalletRecord { #[serde(rename = "type")] type_: Option, @@ -1079,6 +1149,17 @@ pub struct WalletRecord { tags: Option, } +impl fmt::Debug for WalletRecord { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("WalletRecord") + .field("type_", &self.type_) + .field("id", &self.id) + .field("value", &self.value.as_ref().map(|_| "******")) + .field("tags", &self.tags) + .finish() + } +} + impl Ord for WalletRecord { fn cmp(&self, other: &Self) -> ::std::cmp::Ordering { (&self.type_, &self.id).cmp(&(&other.type_, &other.id)) diff --git a/libvdrtools/indy-wallet/src/query_encryption.rs b/libvdrtools/indy-wallet/src/query_encryption.rs index e710e061ef..bf94a545c2 100644 --- a/libvdrtools/indy-wallet/src/query_encryption.rs +++ b/libvdrtools/indy-wallet/src/query_encryption.rs @@ -1,11 +1,11 @@ use indy_api_types::errors::prelude::*; +use indy_utils::wql::Query; use super::{ encryption::encrypt_as_searchable, language::{Operator, TagName, TargetValue}, wallet::Keys, }; -use indy_utils::wql::Query; // Performs encryption of WQL query // WQL query is provided as top-level Operator diff --git a/libvdrtools/indy-wallet/src/storage/default/mod.rs b/libvdrtools/indy-wallet/src/storage/default/mod.rs index cc5632c00d..d32790f65b 100644 --- a/libvdrtools/indy-wallet/src/storage/default/mod.rs +++ b/libvdrtools/indy-wallet/src/storage/default/mod.rs @@ -3,17 +3,16 @@ use std::{ fs, }; +use async_trait::async_trait; use indy_api_types::errors::prelude::*; use indy_utils::environment; +use log::LevelFilter; use serde::Deserialize; use sqlx::{ sqlite::{SqliteConnectOptions, SqliteJournalMode, SqlitePoolOptions}, ConnectOptions, SqlitePool, }; -use async_trait::async_trait; -use log::LevelFilter; - use crate::{ language, storage::{StorageIterator, StorageRecord, Tag, TagName, WalletStorage, WalletStorageType}, @@ -117,7 +116,6 @@ impl WalletStorage for SQLiteStorage { /// * `IndyError::Closed` - Storage is closed /// * `IndyError::ItemNotFound` - Item is not found in database /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - /// async fn get(&self, type_: &[u8], id: &[u8], options: &str) -> IndyResult { let options: RecordOptions = serde_json::from_str(options).to_indy( IndyErrorKind::InvalidStructure, @@ -130,7 +128,7 @@ impl WalletStorage for SQLiteStorage { sqlx::query_as("SELECT id, value, key FROM items where type = ?1 AND name = ?2") .bind(type_) .bind(id) - .fetch_one(&mut conn) + .fetch_one(&mut *conn) .await?; let value = if options.retrieve_value { @@ -153,7 +151,7 @@ impl WalletStorage for SQLiteStorage { "SELECT name, value from tags_plaintext where item_id = ?", ) .bind(item_id) - .fetch_all(&mut conn) + .fetch_all(&mut *conn) .await? .drain(..) .map(|r| Tag::PlainText(r.0, r.1)), @@ -164,7 +162,7 @@ impl WalletStorage for SQLiteStorage { "SELECT name, value from tags_encrypted where item_id = ?", ) .bind(item_id) - .fetch_all(&mut conn) + .fetch_all(&mut *conn) .await? .drain(..) .map(|r| Tag::Encrypted(r.0, r.1)), @@ -206,7 +204,6 @@ impl WalletStorage for SQLiteStorage { /// * `IndyError::Closed` - Storage is closed /// * `IndyError::ItemAlreadyExists` - Item is already present in database /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - /// async fn add( &self, type_: &[u8], @@ -221,7 +218,7 @@ impl WalletStorage for SQLiteStorage { .bind(id) .bind(&value.data) .bind(&value.key) - .execute(&mut tx) + .execute(&mut *tx) .await? .last_insert_rowid(); @@ -234,7 +231,7 @@ impl WalletStorage for SQLiteStorage { .bind(id) .bind(tag_name) .bind(tag_data) - .execute(&mut tx) + .execute(&mut *tx) .await? } Tag::PlainText(ref tag_name, ref tag_data) => { @@ -244,7 +241,7 @@ impl WalletStorage for SQLiteStorage { .bind(id) .bind(tag_name) .bind(tag_data) - .execute(&mut tx) + .execute(&mut *tx) .await? } }; @@ -261,9 +258,9 @@ impl WalletStorage for SQLiteStorage { sqlx::query("UPDATE items SET value = ?1, key = ?2 WHERE type = ?3 AND name = ?4") .bind(&value.data) .bind(&value.key) - .bind(&type_) - .bind(&id) - .execute(&mut tx) + .bind(type_) + .bind(id) + .execute(&mut *tx) .await? .rows_affected(); @@ -290,29 +287,31 @@ impl WalletStorage for SQLiteStorage { sqlx::query_as("SELECT id FROM items WHERE type = ?1 AND name = ?2") .bind(type_) .bind(id) - .fetch_one(&mut tx) + .fetch_one(&mut *tx) .await?; for tag in tags { match *tag { Tag::Encrypted(ref tag_name, ref tag_data) => { sqlx::query( - "INSERT OR REPLACE INTO tags_encrypted (item_id, name, value) VALUES (?1, ?2, ?3)", + "INSERT OR REPLACE INTO tags_encrypted (item_id, name, value) VALUES (?1, \ + ?2, ?3)", ) .bind(item_id) .bind(tag_name) .bind(tag_data) - .execute(&mut tx) + .execute(&mut *tx) .await? } Tag::PlainText(ref tag_name, ref tag_data) => { sqlx::query( - "INSERT OR REPLACE INTO tags_plaintext (item_id, name, value) VALUES (?1, ?2, ?3)", + "INSERT OR REPLACE INTO tags_plaintext (item_id, name, value) VALUES (?1, \ + ?2, ?3)", ) .bind(item_id) .bind(tag_name) .bind(tag_data) - .execute(&mut tx) + .execute(&mut *tx) .await? } }; @@ -328,18 +327,18 @@ impl WalletStorage for SQLiteStorage { let (item_id,): (i64,) = sqlx::query_as("SELECT id FROM items WHERE type = ?1 AND name = ?2") .bind(type_) - .bind(&id) - .fetch_one(&mut tx) + .bind(id) + .fetch_one(&mut *tx) .await?; sqlx::query("DELETE FROM tags_encrypted WHERE item_id = ?1") .bind(item_id) - .execute(&mut tx) + .execute(&mut *tx) .await?; sqlx::query("DELETE FROM tags_plaintext WHERE item_id = ?1") .bind(item_id) - .execute(&mut tx) + .execute(&mut *tx) .await?; for tag in tags { @@ -351,7 +350,7 @@ impl WalletStorage for SQLiteStorage { .bind(item_id) .bind(tag_name) .bind(tag_data) - .execute(&mut tx) + .execute(&mut *tx) .await? } Tag::PlainText(ref tag_name, ref tag_data) => { @@ -361,7 +360,7 @@ impl WalletStorage for SQLiteStorage { .bind(item_id) .bind(tag_name) .bind(tag_data) - .execute(&mut tx) + .execute(&mut *tx) .await? } }; @@ -379,7 +378,7 @@ impl WalletStorage for SQLiteStorage { sqlx::query_as("SELECT id FROM items WHERE type = ?1 AND name = ?2") .bind(type_) .bind(id) - .fetch_one(&mut tx) + .fetch_one(&mut *tx) .await?; for tag_name in tag_names { @@ -388,14 +387,14 @@ impl WalletStorage for SQLiteStorage { sqlx::query("DELETE FROM tags_encrypted WHERE item_id = ?1 AND name = ?2") .bind(item_id) .bind(tag_name) - .execute(&mut tx) + .execute(&mut *tx) .await? } TagName::OfPlain(ref tag_name) => { sqlx::query("DELETE FROM tags_plaintext WHERE item_id = ?1 AND name = ?2") .bind(item_id) .bind(tag_name) - .execute(&mut tx) + .execute(&mut *tx) .await? } }; @@ -430,14 +429,13 @@ impl WalletStorage for SQLiteStorage { /// * `IndyError::Closed` - Storage is closed /// * `IndyError::ItemNotFound` - Item is not found in database /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - /// async fn delete(&self, type_: &[u8], id: &[u8]) -> IndyResult<()> { let mut tx = self.pool.begin().await?; let rows_affected = sqlx::query("DELETE FROM items where type = ?1 AND name = ?2") .bind(type_) .bind(id) - .execute(&mut tx) + .execute(&mut *tx) .await? .rows_affected(); @@ -461,7 +459,7 @@ impl WalletStorage for SQLiteStorage { let mut conn = self.pool.acquire().await?; let (metadata,): (Vec,) = sqlx::query_as::<_, (Vec,)>("SELECT value FROM metadata") - .fetch_one(&mut conn) + .fetch_one(&mut *conn) .await?; Ok(metadata) @@ -472,7 +470,7 @@ impl WalletStorage for SQLiteStorage { sqlx::query("UPDATE metadata SET value = ?1") .bind(metadata) - .execute(&mut tx) + .execute(&mut *tx) .await?; tx.commit().await?; @@ -487,7 +485,7 @@ impl WalletStorage for SQLiteStorage { sqlx::query_as::<_, (i64, Vec, String)>( "SELECT item_id, name, value from tags_plaintext", ) - .fetch_all(&mut conn) + .fetch_all(&mut *conn) .await? .drain(..) .map(|r| (r.0, Tag::PlainText(r.1, r.2))), @@ -497,7 +495,7 @@ impl WalletStorage for SQLiteStorage { sqlx::query_as::<_, (i64, Vec, Vec)>( "SELECT item_id, name, value from tags_encrypted", ) - .fetch_all(&mut conn) + .fetch_all(&mut *conn) .await? .drain(..) .map(|r| (r.0, Tag::Encrypted(r.1, r.2))), @@ -512,7 +510,7 @@ impl WalletStorage for SQLiteStorage { let records: VecDeque<_> = sqlx::query_as::<_, (i64, Vec, Vec, Vec, Vec)>( "SELECT id, name, value, key, type FROM items", ) - .fetch_all(&mut conn) + .fetch_all(&mut *conn) .await? .drain(..) .map(|r| { @@ -565,7 +563,7 @@ impl WalletStorage for SQLiteStorage { } } - let mut records = query.fetch_all(&mut conn).await?; + let mut records = query.fetch_all(&mut *conn).await?; let mut mtags = if options.retrieve_tags && !records.is_empty() { let mut tags: Vec<(i64, Tag)> = Vec::new(); @@ -592,7 +590,7 @@ impl WalletStorage for SQLiteStorage { tags.extend( query - .fetch_all(&mut conn) + .fetch_all(&mut *conn) .await? .drain(..) .map(|r| (r.0, Tag::PlainText(r.1, r.2))), @@ -615,7 +613,7 @@ impl WalletStorage for SQLiteStorage { tags.extend( query - .fetch_all(&mut conn) + .fetch_all(&mut *conn) .await? .drain(..) .map(|r| (r.0, Tag::Encrypted(r.1, r.2))), @@ -673,7 +671,7 @@ impl WalletStorage for SQLiteStorage { } } - let (total_count,) = query.fetch_one(&mut conn).await?; + let (total_count,) = query.fetch_one(&mut *conn).await?; Some(total_count as usize) } else { None @@ -712,7 +710,6 @@ impl WalletStorageType for SQLiteStorageType { /// /// * `IndyError::NotFound` - File with the provided id not found /// * `IOError(..)` - Deletion of the file form the file-system failed - /// async fn delete_storage( &self, id: &str, @@ -764,7 +761,6 @@ impl WalletStorageType for SQLiteStorageType { /// * `IOError("Error occurred while creating wallet file:..)"` - Creation of schema failed /// * `IOError("Error occurred while inserting the keys...")` - Insertion of keys failed /// * `IOError(..)` - Deletion of the file form the file-system failed - /// async fn create_storage( &self, id: &str, @@ -906,7 +902,6 @@ impl WalletStorageType for SQLiteStorageType { /// /// * `IndyError::NotFound` - File with the provided id not found /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - /// async fn open_storage( &self, id: &str, @@ -927,10 +922,10 @@ impl WalletStorageType for SQLiteStorageType { )); } - let mut connect_options = SqliteConnectOptions::new() + let connect_options = SqliteConnectOptions::new() .filename(db_path.as_path()) - .journal_mode(SqliteJournalMode::Wal); - connect_options.disable_statement_logging(); + .journal_mode(SqliteJournalMode::Wal) + .disable_statement_logging(); Ok(Box::new(SQLiteStorage { pool: SqlitePoolOptions::default() diff --git a/libvdrtools/indy-wallet/src/storage/default/query.rs b/libvdrtools/indy-wallet/src/storage/default/query.rs index e2ad6f5b79..d639b2c713 100644 --- a/libvdrtools/indy-wallet/src/storage/default/query.rs +++ b/libvdrtools/indy-wallet/src/storage/default/query.rs @@ -116,10 +116,7 @@ fn eq_to_sql<'a>( arguments: &mut Vec>, ) -> IndyResult { match (name, value) { - ( - &TagName::PlainTagName(ref queried_name), - &TargetValue::Unencrypted(ref queried_value), - ) => { + (TagName::PlainTagName(queried_name), TargetValue::Unencrypted(ref queried_value)) => { arguments.push(queried_name.into()); arguments.push(queried_value.into()); Ok( @@ -128,8 +125,8 @@ fn eq_to_sql<'a>( ) } ( - &TagName::EncryptedTagName(ref queried_name), - &TargetValue::Encrypted(ref queried_value), + TagName::EncryptedTagName(ref queried_name), + TargetValue::Encrypted(ref queried_value), ) => { arguments.push(queried_name.into()); arguments.push(queried_value.into()); @@ -151,10 +148,7 @@ fn neq_to_sql<'a>( arguments: &mut Vec>, ) -> IndyResult { match (name, value) { - ( - &TagName::PlainTagName(ref queried_name), - &TargetValue::Unencrypted(ref queried_value), - ) => { + (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { arguments.push(queried_name.into()); arguments.push(queried_value.into()); Ok( @@ -163,8 +157,8 @@ fn neq_to_sql<'a>( ) } ( - &TagName::EncryptedTagName(ref queried_name), - &TargetValue::Encrypted(ref queried_value), + TagName::EncryptedTagName(ref queried_name), + TargetValue::Encrypted(ref queried_value), ) => { arguments.push(queried_name.into()); arguments.push(queried_value.into()); @@ -186,10 +180,7 @@ fn gt_to_sql<'a>( arguments: &mut Vec>, ) -> IndyResult { match (name, value) { - ( - &TagName::PlainTagName(ref queried_name), - &TargetValue::Unencrypted(ref queried_value), - ) => { + (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { arguments.push(queried_name.into()); arguments.push(queried_value.into()); Ok( @@ -210,10 +201,7 @@ fn gte_to_sql<'a>( arguments: &mut Vec>, ) -> IndyResult { match (name, value) { - ( - &TagName::PlainTagName(ref queried_name), - &TargetValue::Unencrypted(ref queried_value), - ) => { + (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { arguments.push(queried_name.into()); arguments.push(queried_value.into()); Ok( @@ -234,10 +222,7 @@ fn lt_to_sql<'a>( arguments: &mut Vec>, ) -> IndyResult { match (name, value) { - ( - &TagName::PlainTagName(ref queried_name), - &TargetValue::Unencrypted(ref queried_value), - ) => { + (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { arguments.push(queried_name.into()); arguments.push(queried_value.into()); Ok( @@ -258,10 +243,7 @@ fn lte_to_sql<'a>( arguments: &mut Vec>, ) -> IndyResult { match (name, value) { - ( - &TagName::PlainTagName(ref queried_name), - &TargetValue::Unencrypted(ref queried_value), - ) => { + (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { arguments.push(queried_name.into()); arguments.push(queried_value.into()); Ok( @@ -282,10 +264,7 @@ fn like_to_sql<'a>( arguments: &mut Vec>, ) -> IndyResult { match (name, value) { - ( - &TagName::PlainTagName(ref queried_name), - &TargetValue::Unencrypted(ref queried_value), - ) => { + (TagName::PlainTagName(ref queried_name), TargetValue::Unencrypted(ref queried_value)) => { arguments.push(queried_name.into()); arguments.push(queried_value.into()); Ok( diff --git a/libvdrtools/indy-wallet/src/storage/mysql/mod.rs b/libvdrtools/indy-wallet/src/storage/mysql/mod.rs index 5b70cec054..f479e8fd1f 100644 --- a/libvdrtools/indy-wallet/src/storage/mysql/mod.rs +++ b/libvdrtools/indy-wallet/src/storage/mysql/mod.rs @@ -122,12 +122,12 @@ impl MySqlStorageType { return Ok(connection.clone()); } - let mut my_sql_connect_options = MySqlConnectOptions::new() + let my_sql_connect_options = MySqlConnectOptions::new() .host(host_addr) .database(&config.db_name) .username(&credentials.user) - .password(&credentials.pass); - my_sql_connect_options.log_statements(LevelFilter::Debug); + .password(&credentials.pass) + .log_statements(LevelFilter::Debug); let connection = MySqlPoolOptions::default() .max_connections(config.connection_limit) @@ -169,7 +169,6 @@ impl WalletStorage for MySqlStorage { /// * `IndyError::Closed` - Storage is closed /// * `IndyError::ItemNotFound` - Item is not found in database /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - /// async fn get(&self, type_: &[u8], id: &[u8], options: &str) -> IndyResult { let options: RecordOptions = serde_json::from_str(options).to_indy( IndyErrorKind::InvalidStructure, @@ -201,7 +200,7 @@ impl WalletStorage for MySqlStorage { .bind(self.wallet_id) .bind(&base64::encode(type_)) .bind(&base64::encode(id)) - .fetch_one(&mut conn) + .fetch_one(&mut *conn) .await?; let value = if let Some(value) = value { @@ -253,7 +252,6 @@ impl WalletStorage for MySqlStorage { /// * `IndyError::Closed` - Storage is closed /// * `IndyError::ItemAlreadyExists` - Item is already present in database /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - /// async fn add( &self, type_: &[u8], @@ -273,8 +271,8 @@ impl WalletStorage for MySqlStorage { .bind(&base64::encode(id)) .bind(&value.to_bytes()) .bind(&_tags_to_json(tags)?) - .bind(&self.wallet_id) - .execute(&mut tx) + .bind(self.wallet_id) + .execute(&mut *tx) .await?; tx.commit().await?; @@ -296,8 +294,8 @@ impl WalletStorage for MySqlStorage { .bind(&value.to_bytes()) .bind(&base64::encode(type_)) .bind(&base64::encode(id)) - .bind(&self.wallet_id) - .execute(&mut tx) + .bind(self.wallet_id) + .execute(&mut *tx) .await? .rows_affected(); @@ -343,8 +341,8 @@ impl WalletStorage for MySqlStorage { )) .bind(&base64::encode(type_)) .bind(&base64::encode(id)) - .bind(&self.wallet_id) - .execute(&mut tx) + .bind(self.wallet_id) + .execute(&mut *tx) .await? .rows_affected(); @@ -379,8 +377,8 @@ impl WalletStorage for MySqlStorage { .bind(&_tags_to_json(tags)?) .bind(&base64::encode(type_)) .bind(&base64::encode(id)) - .bind(&self.wallet_id) - .execute(&mut tx) + .bind(self.wallet_id) + .execute(&mut *tx) .await? .rows_affected(); @@ -426,8 +424,8 @@ impl WalletStorage for MySqlStorage { )) .bind(&base64::encode(type_)) .bind(&base64::encode(id)) - .bind(&self.wallet_id) - .execute(&mut tx) + .bind(self.wallet_id) + .execute(&mut *tx) .await? .rows_affected(); @@ -472,7 +470,6 @@ impl WalletStorage for MySqlStorage { /// * `IndyError::Closed` - Storage is closed /// * `IndyError::ItemNotFound` - Item is not found in database /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - /// async fn delete(&self, type_: &[u8], id: &[u8]) -> IndyResult<()> { let mut tx = self.write_pool.begin().await?; @@ -485,8 +482,8 @@ impl WalletStorage for MySqlStorage { ) .bind(&base64::encode(type_)) .bind(&base64::encode(id)) - .bind(&self.wallet_id) - .execute(&mut tx) + .bind(self.wallet_id) + .execute(&mut *tx) .await? .rows_affected(); @@ -516,8 +513,8 @@ impl WalletStorage for MySqlStorage { WHERE id = ? "#, ) - .bind(&self.wallet_id) - .fetch_one(&mut conn) + .bind(self.wallet_id) + .fetch_one(&mut *conn) .await?; base64::decode(&metadata) @@ -534,8 +531,8 @@ impl WalletStorage for MySqlStorage { "#, ) .bind(base64::encode(metadata)) - .bind(&self.wallet_id) - .execute(&mut tx) + .bind(self.wallet_id) + .execute(&mut *tx) .await?; tx.commit().await?; @@ -614,7 +611,7 @@ impl WalletStorage for MySqlStorage { } } - let (total_count,) = query.fetch_one(&mut conn).await?; + let (total_count,) = query.fetch_one(&mut *conn).await?; Some(total_count as usize) } else { None @@ -713,7 +710,6 @@ impl WalletStorageType for MySqlStorageType { /// /// * `IndyError::NotFound` - File with the provided id not found /// * `IOError(..)` - Deletion of the file form the file-system failed - /// async fn delete_storage( &self, id: &str, @@ -754,7 +750,7 @@ impl WalletStorageType for MySqlStorageType { "#, ) .bind(id) - .execute(&mut tx) + .execute(&mut *tx) .await; let rows_affected = res?.rows_affected(); @@ -802,7 +798,6 @@ impl WalletStorageType for MySqlStorageType { /// * `IOError("Error occurred while creating wallet file:..)"` - Creation of schema failed /// * `IOError("Error occurred while inserting the keys...")` - Insertion of keys failed /// * `IOError(..)` - Deletion of the file form the file-system failed - /// async fn create_storage( &self, id: &str, @@ -831,11 +826,11 @@ impl WalletStorageType for MySqlStorageType { "Absent credentials json", ))?; - let mut my_sql_connect_options = MySqlConnectOptions::new() + let my_sql_connect_options = MySqlConnectOptions::new() .host(&config.write_host) .username(&credentials.user) - .password(&credentials.pass); - my_sql_connect_options.log_statements(LevelFilter::Debug); + .password(&credentials.pass) + .log_statements(LevelFilter::Debug); let mut pool = MySqlPoolOptions::default() .max_connections(1) @@ -853,7 +848,7 @@ impl WalletStorageType for MySqlStorageType { "CREATE DATABASE IF NOT EXISTS `{}`;", config.db_name )) - .execute(&mut con) + .execute(&mut *con) .await?; // Replace the previous single use pool @@ -873,7 +868,7 @@ impl WalletStorageType for MySqlStorageType { PRIMARY KEY (`wallet_id`, `type`, `name`) );"#, ) - .execute(&mut con) + .execute(&mut *con) .await?; sqlx::query( @@ -885,7 +880,7 @@ impl WalletStorageType for MySqlStorageType { PRIMARY KEY (`id`) );"#, ) - .execute(&mut con) + .execute(&mut *con) .await?; let mut tx = pool.begin().await?; @@ -898,7 +893,7 @@ impl WalletStorageType for MySqlStorageType { ) .bind(id) .bind(base64::encode(metadata)) - .execute(&mut tx) + .execute(&mut *tx) .await; match res { @@ -943,7 +938,6 @@ impl WalletStorageType for MySqlStorageType { /// /// * `IndyError::NotFound` - File with the provided id not found /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query - /// async fn open_storage( &self, id: &str, @@ -1002,20 +996,29 @@ impl WalletStorageType for MySqlStorageType { } #[cfg(test)] +#[allow(clippy::all)] mod tests { - #[allow(unused_imports)] - use indy_utils::{assert_kind, environment}; + use indy_utils::environment; use super::{super::Tag, *}; - // docker run --name indy-mysql -e MYSQL_ROOT_PASSWORD=pass@word1 -p 3306:3306 -d mysql:latest + #[allow(unused_macros)] + macro_rules! assert_kind { + ($kind:expr, $var:expr) => { + match $var { + Err(e) => assert_eq!($kind, e.kind()), + _ => assert!(false, "Result expected to be error"), + } + }; + } #[async_std::test] #[cfg(feature = "benchmark")] async fn mysql_storage_sync_send() { - use futures::{channel::oneshot, executor::ThreadPool, future::join_all}; use std::{sync::Arc, time::SystemTime}; + use futures::{channel::oneshot, executor::ThreadPool, future::join_all}; + let count = 1000; let executor = ThreadPool::new().expect("Failed to new ThreadPool"); let storage_type = Arc::new(Box::new(MySqlStorageType::new())); @@ -2028,11 +2031,11 @@ mod tests { } fn _metadata() -> Vec { - return vec![ + vec![ 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, - ]; + ] } fn _type(i: u8) -> Vec { @@ -2136,10 +2139,10 @@ mod tests { } fn _tags() -> Vec { - let mut tags: Vec = Vec::new(); - tags.push(Tag::Encrypted(vec![1, 5, 8], vec![3, 5, 6])); - tags.push(Tag::PlainText(vec![1, 5, 8, 1], "Plain value".to_string())); - tags + vec![ + Tag::Encrypted(vec![1, 5, 8], vec![3, 5, 6]), + Tag::PlainText(vec![1, 5, 8, 1], "Plain value".to_string()), + ] } fn _new_tags() -> Vec { diff --git a/libvdrtools/indy-wallet/src/wallet.rs b/libvdrtools/indy-wallet/src/wallet.rs index 1146245004..e21d29afff 100644 --- a/libvdrtools/indy-wallet/src/wallet.rs +++ b/libvdrtools/indy-wallet/src/wallet.rs @@ -1,12 +1,11 @@ use std::{collections::HashMap, sync::Arc}; +use futures::future::join; use indy_api_types::errors::prelude::*; - use indy_utils::{ crypto::{chacha20poly1305_ietf, hmacsha256}, wql::Query, }; - use serde::{Deserialize, Serialize}; use zeroize::Zeroize; @@ -19,7 +18,6 @@ use crate::{ storage::StorageRecord, RecordOptions, WalletRecord, }; -use futures::future::join; #[derive(Serialize, Deserialize)] pub(super) struct Keys { @@ -121,7 +119,8 @@ impl EncryptedValue { #[allow(dead_code)] pub fn from_bytes(joined_data: &[u8]) -> IndyResult { - // value_key is stored as NONCE || CYPHERTEXT. Lenth of CYPHERTHEXT is length of DATA + length of TAG. + // value_key is stored as NONCE || CYPHERTEXT. Lenth of CYPHERTHEXT is length of DATA + + // length of TAG. if joined_data.len() < ENCRYPTED_KEY_LEN { return Err(err_msg( IndyErrorKind::InvalidStructure, @@ -166,6 +165,7 @@ impl Wallet { name: &str, value: &str, tags: &HashMap, + cache_record: bool, ) -> IndyResult<()> { let etype = encrypt_as_searchable( type_.as_bytes(), @@ -189,7 +189,9 @@ impl Wallet { ); self.storage.add(&etype, &ename, &evalue, &etags).await?; - self.cache.add(type_, &etype, &ename, &evalue, &etags); + if cache_record { + self.cache.add(type_, &etype, &ename, &evalue, &etags); + } Ok(()) } @@ -472,7 +474,7 @@ impl Wallet { Ok(WalletIterator::new(all_items, self.keys.clone())) } - pub fn get_id<'a>(&'a self) -> &'a str { + pub fn get_id(&self) -> &str { &self.id } } diff --git a/libvdrtools/mac.build.sh b/libvdrtools/mac.build.sh deleted file mode 100755 index db29ff9783..0000000000 --- a/libvdrtools/mac.build.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash - -onred='\033[41m' -ongreen='\033[42m' -onyellow='\033[43m' -endcolor="\033[0m" - -# Handle errors -set -e -error_report() { - echo -e "${onred}Error: failed on line $1.$endcolor" -} -trap 'error_report $LINENO' ERR - -echo -e "${onyellow}Installing libvdrtools...$endcolor" - -function brew_install { - if brew ls --versions $1 >/dev/null; then - if [[ $(brew outdated $1) ]]; then - HOMEBREW_NO_AUTO_UPDATE=1 brew upgrade $1 - fi - else - HOMEBREW_NO_AUTO_UPDATE=1 brew install $1 - fi -} - -if [[ "$OSTYPE" == "darwin"* ]]; then - xcode-select --version || xcode-select --install - brew --version || yes | /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" - cmake --version || brew install cmake # brew install cmake throws error, not warning if already installed - curl https://sh.rustup.rs -sSf | sh -s -- -y - export PATH="$HOME/.cargo/bin:$PATH" # so can use cargo without relog - brew_install pkg-config - brew_install libsodium - brew_install automake - brew_install autoconf - brew_install openssl - brew_install zeromq - brew_install zmq - export PKG_CONFIG_ALLOW_CROSS=1 - export CARGO_INCREMENTAL=1 - export RUST_LOG=indy=trace - export RUST_TEST_THREADS=1 - export OPENSSL_DIR=/usr/local/opt/`ls /usr/local/opt/ | grep openssl | sort | tail -1` - cargo build - export LIBRARY_PATH=$(pwd)/target/debug - cd ../cli - cargo build - echo 'export DYLD_LIBRARY_PATH='$LIBRARY_PATH' -export LD_LIBRARY_PATH='$LIBRARY_PATH >> ~/.bash_profile - echo -e "${ongreen}libvdrtools installed.$endcolor" -else - echo -e "${onred}You are not running MacOS. This is a MacOS installer.$endcolor" -fi - - diff --git a/libvdrtools/rustfmt.toml b/libvdrtools/rustfmt.toml deleted file mode 100644 index d9ba5fdb90..0000000000 --- a/libvdrtools/rustfmt.toml +++ /dev/null @@ -1 +0,0 @@ -imports_granularity = "Crate" \ No newline at end of file diff --git a/libvdrtools/src/controllers/anoncreds/issuer.rs b/libvdrtools/src/controllers/anoncreds/issuer.rs deleted file mode 100644 index f0f9e104a5..0000000000 --- a/libvdrtools/src/controllers/anoncreds/issuer.rs +++ /dev/null @@ -1,1505 +0,0 @@ -use std::{ - collections::{HashMap, HashSet}, - sync::Arc, -}; - -// use async_std::task::spawn_blocking; -use indy_api_types::{domain::wallet::Tags, errors::prelude::*, WalletHandle}; -use indy_wallet::{RecordOptions, WalletService}; - -use ursa::cl::{ - new_nonce, CredentialKeyCorrectnessProof, CredentialPrivateKey, - RevocationRegistryDelta as CryptoRevocationRegistryDelta, Witness, -}; - -pub use crate::{ - domain::{ - anoncreds::{ - credential::{Credential, CredentialValues}, - credential_definition::{ - CredentialDefinition, CredentialDefinitionConfig, - CredentialDefinitionCorrectnessProof, CredentialDefinitionData, - CredentialDefinitionId, CredentialDefinitionPrivateKey, CredentialDefinitionV1, - SignatureType, TemporaryCredentialDefinition, - }, - credential_offer::CredentialOffer, - credential_request::CredentialRequest, - revocation_registry::{RevocationRegistry, RevocationRegistryV1}, - revocation_registry_definition::{ - IssuanceType, RegistryType, RevocationRegistryConfig, RevocationRegistryDefinition, - RevocationRegistryDefinitionPrivate, RevocationRegistryDefinitionV1, - RevocationRegistryDefinitionValue, RevocationRegistryId, RevocationRegistryInfo, - }, - revocation_registry_delta::{RevocationRegistryDelta, RevocationRegistryDeltaV1}, - schema::{AttributeNames, Schema, SchemaId, SchemaV1}, - }, - crypto::did::DidValue, - }, - services::{AnoncredsHelpers, BlobStorageService, CryptoService, IssuerService}, -}; - -use super::tails::{store_tails_from_generator, SDKTailsAccessor}; - -pub struct IssuerController { - pub issuer_service: Arc, - pub blob_storage_service: Arc, - pub wallet_service: Arc, - pub crypto_service: Arc, -} - -impl IssuerController { - pub fn new( - issuer_service: Arc, - blob_storage_service: Arc, - wallet_service: Arc, - crypto_service: Arc, - ) -> IssuerController { - IssuerController { - issuer_service, - blob_storage_service, - wallet_service, - crypto_service, - } - } - - /* - These functions wrap the Ursa algorithm as documented in this paper: - https://github.com/hyperledger/ursa/blob/master/libursa/docs/AnonCred.pdf - - And is documented in this HIPE: - https://github.com/hyperledger/indy-hipe/blob/c761c583b1e01c1e9d3ceda2b03b35336fdc8cc1/text/anoncreds-protocol/README.md - */ - - /// Create credential schema entity that describes credential attributes list and allows credentials - /// interoperability. - /// - /// Schema is public and intended to be shared with all anoncreds workflow actors usually by publishing SCHEMA transaction - /// to Indy distributed ledger. - /// - /// It is IMPORTANT for current version POST Schema in Ledger and after that GET it from Ledger - /// with correct seq_no to save compatibility with Ledger. - /// After that can call indy_issuer_create_and_store_credential_def to build corresponding Credential Definition. - /// - /// #Params - - /// issuer_did: DID of schema issuer - /// name: a name the schema - /// version: a version of the schema - /// attrs: a list of schema attributes descriptions (the number of attributes should be less or equal than 125) - /// `["attr1", "attr2"]` - /// - /// #Returns - /// schema_id: identifier of created schema - /// schema_json: schema as json: - /// { - /// id: identifier of schema - /// attrNames: array of attribute name strings - /// name: schema's name string - /// version: schema's version string, - /// ver: version of the Schema json - /// } - /// - /// #Errors - /// Common* - /// Anoncreds* - pub fn create_schema( - &self, - issuer_did: DidValue, - name: String, - version: String, - attrs: AttributeNames, - ) -> IndyResult<(String, String)> { - trace!( - "create_schema > issuer_did {:?} name {:?} version {:?} attrs {:?}", - issuer_did, - name, - version, - attrs - ); - - self.crypto_service.validate_did(&issuer_did)?; - - let schema_id = SchemaId::new(&issuer_did, &name, &version)?; - - let schema = Schema::SchemaV1(SchemaV1 { - id: schema_id.clone(), - name, - version, - attr_names: attrs, - seq_no: None, - }); - - let schema_json = serde_json::to_string(&schema) - .to_indy(IndyErrorKind::InvalidState, "Cannot serialize Schema")?; - - let res = Ok((schema_id.0, schema_json)); - trace!("create_schema < {:?}", res); - res - } - - /// Create credential definition entity that encapsulates credentials issuer DID, credential schema, secrets used for signing credentials - /// and secrets used for credentials revocation. - /// - /// Credential definition entity contains private and public parts. Private part will be stored in the wallet. Public part - /// will be returned as json intended to be shared with all anoncreds workflow actors usually by publishing CRED_DEF transaction - /// to Indy distributed ledger. - /// - /// It is IMPORTANT for current version GET Schema from Ledger with correct seq_no to save compatibility with Ledger. - /// - /// Note: Use combination of `indy_issuer_rotate_credential_def_start` and `indy_issuer_rotate_credential_def_apply` functions - /// to generate new keys for an existing credential definition. - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet). - /// issuer_did: a DID of the issuer - /// schema_json: credential schema as a json: { - /// id: identifier of schema - /// attrNames: array of attribute name strings - /// name: schema's name string - /// version: schema's version string, - /// seqNo: (Optional) schema's sequence number on the ledger, - /// ver: version of the Schema json - /// } - /// tag: any string that allows to distinguish between credential definitions for the same issuer and schema - /// signature_type: credential definition type (optional, 'CL' by default) that defines credentials signature and revocation math. - /// Supported signature types: - /// - 'CL': Camenisch-Lysyanskaya credential signature type that is implemented according to the algorithm in this paper: - /// https://github.com/hyperledger/ursa/blob/master/libursa/docs/AnonCred.pdf - /// And is documented in this HIPE: - /// https://github.com/hyperledger/indy-hipe/blob/c761c583b1e01c1e9d3ceda2b03b35336fdc8cc1/text/anoncreds-protocol/README.md - /// config_json: (optional) type-specific configuration of credential definition as json: - /// - 'CL': - /// { - /// "support_revocation" - bool (optional, default false) whether to request non-revocation credential - /// } - /// - /// #Returns - /// cred_def_id: identifier of created credential definition - /// cred_def_json: public part of created credential definition - /// { - /// id: string - identifier of credential definition - /// schemaId: string - identifier of stored in ledger schema - /// type: string - type of the credential definition. CL is the only supported type now. - /// tag: string - allows to distinct between credential definitions for the same issuer and schema - /// value: Dictionary with Credential Definition's data is depended on the signature type: { - /// primary: primary credential public key, - /// Optional: revocation credential public key - /// }, - /// ver: Version of the CredDef json - /// } - /// - /// Note: `primary` and `revocation` fields of credential definition are complex opaque types that contain data structures internal to Ursa. - /// They should not be parsed and are likely to change in future versions. - /// - /// #Errors - /// Common* - /// Wallet* - /// Anoncreds* - pub async fn create_and_store_credential_definition( - &self, - wallet_handle: WalletHandle, - issuer_did: DidValue, - schema: Schema, - tag: String, - type_: Option, - config: Option, - ) -> IndyResult<(String, String)> { - trace!( - "create_and_store_credential_definition > wallet_handle {:?} \ - issuer_did {:?} schema {:?} tag {:?} \ - type_ {:?}, config {:?}", - wallet_handle, - issuer_did, - schema, - tag, - type_, - config - ); - - let mut schema = SchemaV1::from(schema); - - match (issuer_did.get_method(), schema.id.get_method()) { - (None, Some(_)) => { - return Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "You can't use unqualified Did with fully qualified Schema", - )); - } - (Some(prefix_), None) => { - schema.id = schema.id.qualify(&prefix_)?; - } - _ => {} - }; - - let cred_def_config = config.unwrap_or_default(); - - let signature_type = if let Some(type_) = type_ { - serde_json::from_str::(&format!("\"{}\"", type_)).to_indy( - IndyErrorKind::InvalidStructure, - "Invalid Signature Type format", - )? - } else { - SignatureType::CL - }; - - let schema_id = schema - .seq_no - .map(|n| SchemaId(n.to_string())) - .unwrap_or_else(|| schema.id.clone()); - - let cred_def_id = - CredentialDefinitionId::new(&issuer_did, &schema_id, signature_type.to_str(), &tag)?; - - let cred_def = self - .wallet_service - .get_indy_record_value::( - wallet_handle, - &cred_def_id.0, - &RecordOptions::id_value(), - ) - .await; - - if let Ok(cred_def) = cred_def { - let res = Ok((cred_def_id.0, cred_def)); - - trace!( - "create_and_store_credential_definition < already exists {:?}", - res - ); - - return res; - } - - let tag = tag.to_string(); - let attr_names = schema.attr_names.clone(); - - let (credential_definition_value, cred_priv_key, cred_key_correctness_proof) = self - ._create_credential_definition(&attr_names, cred_def_config.support_revocation) - .await?; - - let cred_def = CredentialDefinition::CredentialDefinitionV1(CredentialDefinitionV1 { - id: cred_def_id.clone(), - schema_id: schema_id.clone(), - signature_type, - tag, - value: credential_definition_value, - }); - - let cred_def_priv_key = CredentialDefinitionPrivateKey { - value: cred_priv_key, - }; - - let cred_def_correctness_proof = CredentialDefinitionCorrectnessProof { - value: cred_key_correctness_proof, - }; - - let schema_ = Schema::SchemaV1(schema.clone()); - - let cred_def_json = self - .wallet_service - .add_indy_object(wallet_handle, &cred_def_id.0, &cred_def, &HashMap::new()) - .await?; - - self.wallet_service - .add_indy_object( - wallet_handle, - &cred_def_id.0, - &cred_def_priv_key, - &HashMap::new(), - ) - .await?; - - self.wallet_service - .add_indy_object( - wallet_handle, - &cred_def_id.0, - &cred_def_correctness_proof, - &HashMap::new(), - ) - .await?; - - let _ = self - .wallet_service - .add_indy_object(wallet_handle, &schema_id.0, &schema_, &HashMap::new()) - .await - .ok(); - - let schema_id = schema.id.clone(); - - self._wallet_set_schema_id(wallet_handle, &cred_def_id.0, &schema_id) - .await?; // TODO: FIXME delete temporary storing of schema id - - let res = Ok((cred_def_id.0, cred_def_json)); - trace!("create_and_store_credential_definition < {:?}", res); - res - } - - async fn _create_credential_definition( - &self, - attr_names: &AttributeNames, - support_revocation: bool, - ) -> IndyResult<( - CredentialDefinitionData, - CredentialPrivateKey, - CredentialKeyCorrectnessProof, - )> { - // let attr_names = attr_names.clone(); - - IssuerService::new_credential_definition(attr_names, support_revocation) - // let res = spawn_blocking(move || { - // IssuerService::new_credential_definition(&attr_names, support_revocation) - // }) - // .await?; - - // Ok(res) - } - - /// Generate temporary credential definitional keys for an existing one (owned by the caller of the library). - /// - /// Use `indy_issuer_rotate_credential_def_apply` function to set generated temporary keys as the main. - /// - /// WARNING: Rotating the credential definitional keys will result in making all credentials issued under the previous keys unverifiable. - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet). - /// cred_def_id: an identifier of created credential definition stored in the wallet - /// config_json: (optional) type-specific configuration of credential definition as json: - /// - 'CL': - /// { - /// "support_revocation" - bool (optional, default false) whether to request non-revocation credential - /// } - /// - /// #Returns - /// cred_def_json: public part of temporary created credential definition - /// { - /// id: string - identifier of credential definition - /// schemaId: string - identifier of stored in ledger schema - /// type: string - type of the credential definition. CL is the only supported type now. - /// tag: string - allows to distinct between credential definitions for the same issuer and schema - /// value: Dictionary with Credential Definition's data is depended on the signature type: { - /// primary: primary credential public key, - /// Optional: revocation credential public key - /// }, - only this field differs from the original credential definition - /// ver: Version of the CredDef json - /// } - /// - /// Note: `primary` and `revocation` fields of credential definition are complex opaque types that contain data structures internal to Ursa. - /// They should not be parsed and are likely to change in future versions. - /// - /// #Errors - /// Common* - /// Wallet* - /// Anoncreds* - pub async fn rotate_credential_definition_start( - &self, - wallet_handle: WalletHandle, - cred_def_id: CredentialDefinitionId, - cred_def_config: Option, - ) -> IndyResult { - trace!( - "rotate_credential_definition_start > \ - wallet_handle {:?} cred_def_id {:?} cred_def_config {:?}", - wallet_handle, - cred_def_id, - cred_def_config - ); - - let cred_def = self - .wallet_service - .get_indy_object::( - wallet_handle, - &cred_def_id.0, - &RecordOptions::id_value(), - ) - .await?; - - let cred_def = CredentialDefinitionV1::from(cred_def); - - let temp_cred_def = self - .wallet_service - .get_indy_object::( - wallet_handle, - &cred_def_id.0, - &RecordOptions::id_value(), - ) - .await; - - if let Ok(temp_cred_def) = temp_cred_def { - let cred_def_json = serde_json::to_string(&temp_cred_def.cred_def).to_indy( - IndyErrorKind::InvalidState, - "Can't serialize CredentialDefinition", - )?; - - let res = Ok(cred_def_json); - - trace!( - "rotate_credential_definition_start < already exists {:?}", - res - ); - - return res; - } - - let schema = self - .wallet_service - .get_indy_object::( - wallet_handle, - &cred_def.schema_id.0, - &RecordOptions::id_value(), - ) - .await?; - - let schema = SchemaV1::from(schema); - - let support_revocation = cred_def_config - .map(|config| config.support_revocation) - .unwrap_or_default(); - - let (credential_definition_value, cred_priv_key, cred_key_correctness_proof) = self - ._create_credential_definition(&schema.attr_names, support_revocation) - .await?; - - let cred_def = CredentialDefinition::CredentialDefinitionV1(CredentialDefinitionV1 { - id: cred_def_id.clone(), - schema_id: cred_def.schema_id.clone(), - signature_type: cred_def.signature_type.clone(), - tag: cred_def.tag.clone(), - value: credential_definition_value, - }); - - let cred_def_priv_key = CredentialDefinitionPrivateKey { - value: cred_priv_key, - }; - - let cred_def_correctness_proof = CredentialDefinitionCorrectnessProof { - value: cred_key_correctness_proof, - }; - - let cred_def_json = ::serde_json::to_string(&cred_def).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize CredentialDefinition", - )?; - - let temp_cred_def = TemporaryCredentialDefinition { - cred_def, - cred_def_priv_key, - cred_def_correctness_proof, - }; - - self.wallet_service - .add_indy_object( - wallet_handle, - &cred_def_id.0, - &temp_cred_def, - &HashMap::new(), - ) - .await?; - - let res = Ok(cred_def_json); - trace!("rotate_credential_definition_start < {:?}", res); - res - } - - /// Apply temporary keys as main for an existing Credential Definition (owned by the caller of the library). - /// - /// WARNING: Rotating the credential definitional keys will result in making all credentials issued under the previous keys unverifiable. - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet). - /// cred_def_id: an identifier of created credential definition stored in the wallet - /// - /// #Returns - /// - /// #Errors - /// Common* - /// Wallet* - /// Anoncreds* - pub async fn rotate_credential_definition_apply( - &self, - wallet_handle: WalletHandle, - cred_def_id: CredentialDefinitionId, - ) -> IndyResult<()> { - trace!( - "rotate_credential_definition_apply > wallet_handle {:?} cred_def_id {:?}", - wallet_handle, - cred_def_id - ); - - let _cred_def: CredentialDefinition = self - .wallet_service - .get_indy_object(wallet_handle, &cred_def_id.0, &RecordOptions::id_value()) - .await?; - - let temp_cred_def: TemporaryCredentialDefinition = self - .wallet_service - .get_indy_object(wallet_handle, &cred_def_id.0, &RecordOptions::id_value()) - .await?; - - self.wallet_service - .update_indy_object(wallet_handle, &cred_def_id.0, &temp_cred_def.cred_def) - .await?; - - self.wallet_service - .update_indy_object( - wallet_handle, - &cred_def_id.0, - &temp_cred_def.cred_def_priv_key, - ) - .await?; - - self.wallet_service - .update_indy_object( - wallet_handle, - &cred_def_id.0, - &temp_cred_def.cred_def_correctness_proof, - ) - .await?; - - self.wallet_service - .delete_indy_record::(wallet_handle, &cred_def_id.0) - .await?; - - trace!("rotate_credential_definition_apply <<<"); - Ok(()) - } - - /// Create a new revocation registry for the given credential definition as tuple of entities - /// - Revocation registry definition that encapsulates credentials definition reference, revocation type specific configuration and - /// secrets used for credentials revocation - /// - Revocation registry state that stores the information about revoked entities in a non-disclosing way. The state can be - /// represented as ordered list of revocation registry entries were each entry represents the list of revocation or issuance operations. - /// - /// Revocation registry definition entity contains private and public parts. Private part will be stored in the wallet. Public part - /// will be returned as json intended to be shared with all anoncreds workflow actors usually by publishing REVOC_REG_DEF transaction - /// to Indy distributed ledger. - /// - /// Revocation registry state is stored on the wallet and also intended to be shared as the ordered list of REVOC_REG_ENTRY transactions. - /// This call initializes the state in the wallet and returns the initial entry. - /// - /// Some revocation registry types (for example, 'CL_ACCUM') can require generation of binary blob called tails used to hide information about revoked credentials in public - /// revocation registry and intended to be distributed out of leger (REVOC_REG_DEF transaction will still contain uri and hash of tails). - /// This call requires access to pre-configured blob storage writer instance handle that will allow to write generated tails. - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet). - /// issuer_did: a DID of the issuer - /// revoc_def_type: revocation registry type (optional, default value depends on credential definition type). Supported types are: - /// - 'CL_ACCUM': Type-3 pairing based accumulator implemented according to the algorithm in this paper: - /// https://github.com/hyperledger/ursa/blob/master/libursa/docs/AnonCred.pdf - /// This type is default for 'CL' credential definition type. - /// tag: any string that allows to distinct between revocation registries for the same issuer and credential definition - /// cred_def_id: id of stored in ledger credential definition - /// config_json: type-specific configuration of revocation registry as json: - /// - 'CL_ACCUM': { - /// "issuance_type": (optional) type of issuance. Currently supported: - /// 1) ISSUANCE_BY_DEFAULT: all indices are assumed to be issued and initial accumulator is calculated over all indices; - /// Revocation Registry is updated only during revocation. - /// 2) ISSUANCE_ON_DEMAND: nothing is issued initially accumulator is 1 (used by default); - /// "max_cred_num": maximum number of credentials the new registry can process (optional, default 100000) - /// } - /// tails_writer_handle: handle of blob storage to store tails (returned by `indy_open_blob_storage_writer`). - /// - /// NOTE: - /// Recursive creation of folder for Default Tails Writer (correspondent to `tails_writer_handle`) - /// in the system-wide temporary directory may fail in some setup due to permissions: `IO error: Permission denied`. - /// In this case use `TMPDIR` environment variable to define temporary directory specific for an application. - /// - /// #Returns - /// revoc_reg_id: identifier of created revocation registry definition - /// revoc_reg_def_json: public part of revocation registry definition - /// { - /// "id": string - ID of the Revocation Registry, - /// "revocDefType": string - Revocation Registry type (only CL_ACCUM is supported for now), - /// "tag": string - Unique descriptive ID of the Registry, - /// "credDefId": string - ID of the corresponding CredentialDefinition, - /// "value": Registry-specific data { - /// "issuanceType": string - Type of Issuance(ISSUANCE_BY_DEFAULT or ISSUANCE_ON_DEMAND), - /// "maxCredNum": number - Maximum number of credentials the Registry can serve. - /// "tailsHash": string - Hash of tails. - /// "tailsLocation": string - Location of tails file. - /// "publicKeys": - Registry's public key (opaque type that contains data structures internal to Ursa. - /// It should not be parsed and are likely to change in future versions). - /// }, - /// "ver": string - version of revocation registry definition json. - /// } - /// revoc_reg_entry_json: revocation registry entry that defines initial state of revocation registry - /// { - /// value: { - /// prevAccum: string - previous accumulator value. - /// accum: string - current accumulator value. - /// issued: array - an array of issued indices. - /// revoked: array an array of revoked indices. - /// }, - /// ver: string - version revocation registry entry json - /// } - /// - /// #Errors - /// Common* - /// Wallet* - /// Anoncreds* - pub async fn create_and_store_revocation_registry( - &self, - wallet_handle: WalletHandle, - issuer_did: DidValue, - type_: Option, - tag: String, - cred_def_id: CredentialDefinitionId, - config: RevocationRegistryConfig, - tails_writer_handle: i32, - ) -> IndyResult<(String, String, String)> { - trace!( - "create_and_store_revocation_registry > wallet_handle {:?} \ - issuer_did {:?} type_ {:?} tag: {:?} cred_def_id {:?} \ - config: {:?} tails_handle {:?}", - wallet_handle, - issuer_did, - type_, - tag, - cred_def_id, - config, - tails_writer_handle - ); - - match (issuer_did.get_method(), cred_def_id.get_method()) { - (None, Some(_)) => { - return Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "You can't use unqualified Did with fully qualified Credential Definition", - )); - } - (Some(_), None) => { - return Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "You can't use fully qualified Did with unqualified Credential Definition", - )); - } - _ => {} - }; - - let rev_reg_type = if let Some(type_) = type_ { - serde_json::from_str::(&format!("\"{}\"", type_)).to_indy( - IndyErrorKind::InvalidStructure, - "Invalid Registry Type format", - )? - } else { - RegistryType::CL_ACCUM - }; - - let issuance_type = config - .issuance_type - .clone() - .unwrap_or(IssuanceType::ISSUANCE_ON_DEMAND); - - let max_cred_num = config.max_cred_num.unwrap_or(100000); - - let rev_reg_id = - RevocationRegistryId::new(&issuer_did, &cred_def_id, &rev_reg_type.to_str(), &tag)?; - - if let (Ok(rev_reg_def), Ok(rev_reg)) = ( - self.wallet_service - .get_indy_record_value::( - wallet_handle, - &rev_reg_id.0, - &RecordOptions::id_value(), - ) - .await, - self.wallet_service - .get_indy_record_value::( - wallet_handle, - &rev_reg_id.0, - &RecordOptions::id_value(), - ) - .await, - ) { - let res = Ok((cred_def_id.0.to_string(), rev_reg_def, rev_reg)); - - trace!( - "create_and_store_revocation_registry < already exists {:?}", - res - ); - - return res; - } - - let cred_def: CredentialDefinition = self - .wallet_service - .get_indy_object(wallet_handle, &cred_def_id.0, &RecordOptions::id_value()) - .await?; - - let (revoc_public_keys, revoc_key_private, revoc_registry, mut revoc_tails_generator) = - self.issuer_service.new_revocation_registry( - &CredentialDefinitionV1::from(cred_def), - max_cred_num, - issuance_type.to_bool(), - &issuer_did, - )?; - - let (tails_location, tails_hash) = store_tails_from_generator( - self.blob_storage_service.clone(), - tails_writer_handle, - &mut revoc_tails_generator, - ) - .await?; - - let revoc_reg_def_value = RevocationRegistryDefinitionValue { - max_cred_num, - issuance_type, - public_keys: revoc_public_keys, - tails_location, - tails_hash, - }; - - let revoc_reg_def = RevocationRegistryDefinition::RevocationRegistryDefinitionV1( - RevocationRegistryDefinitionV1 { - id: rev_reg_id.clone(), - revoc_def_type: rev_reg_type, - tag: tag.to_string(), - cred_def_id: cred_def_id.clone(), - value: revoc_reg_def_value, - }, - ); - - let revoc_reg = RevocationRegistry::RevocationRegistryV1(RevocationRegistryV1 { - value: revoc_registry, - }); - - let revoc_reg_def_priv = RevocationRegistryDefinitionPrivate { - value: revoc_key_private, - }; - - let revoc_reg_def_json = self - .wallet_service - .add_indy_object( - wallet_handle, - &rev_reg_id.0, - &revoc_reg_def, - &HashMap::new(), - ) - .await?; - - let revoc_reg_json = self - .wallet_service - .add_indy_object(wallet_handle, &rev_reg_id.0, &revoc_reg, &HashMap::new()) - .await?; - - self.wallet_service - .add_indy_object( - wallet_handle, - &rev_reg_id.0, - &revoc_reg_def_priv, - &HashMap::new(), - ) - .await?; - - let rev_reg_info = RevocationRegistryInfo { - id: rev_reg_id.clone(), - curr_id: 0, - used_ids: HashSet::new(), - }; - - self.wallet_service - .add_indy_object(wallet_handle, &rev_reg_id.0, &rev_reg_info, &HashMap::new()) - .await?; - - let res = Ok((rev_reg_id.0, revoc_reg_def_json, revoc_reg_json)); - trace!("create_and_store_revocation_registry < {:?}", res); - res - } - - /// Create credential offer that will be used by Prover for - /// credential request creation. Offer includes nonce and key correctness proof - /// for authentication between protocol steps and integrity checking. - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet) - /// cred_def_id: id of credential definition stored in the wallet - /// - /// #Returns - /// credential offer json: - /// { - /// "schema_id": string, - identifier of schema - /// "cred_def_id": string, - identifier of credential definition - /// // Fields below can depend on Credential Definition type - /// "nonce": string, - /// "key_correctness_proof" : key correctness proof for credential definition correspondent to cred_def_id - /// (opaque type that contains data structures internal to Ursa. - /// It should not be parsed and are likely to change in future versions). - /// } - /// - /// #Errors - /// Common* - /// Wallet* - /// Anoncreds* - pub async fn create_credential_offer( - &self, - wallet_handle: WalletHandle, - cred_def_id: CredentialDefinitionId, - ) -> IndyResult { - trace!( - "create_credential_offer > wallet_handle {:?} cred_def_id {:?}", - wallet_handle, - cred_def_id - ); - - let cred_def_correctness_proof: CredentialDefinitionCorrectnessProof = self - .wallet_service - .get_indy_object(wallet_handle, &cred_def_id.0, &RecordOptions::id_value()) - .await?; - - let nonce = new_nonce()?; - - let schema_id = self - ._wallet_get_schema_id(wallet_handle, &cred_def_id.0) - .await?; // TODO: FIXME get CredDef from wallet and use CredDef.schema_id - - let credential_offer = CredentialOffer { - schema_id, - cred_def_id: cred_def_id.clone(), - key_correctness_proof: cred_def_correctness_proof.value, - nonce, - method_name: None, - }; - - let credential_offer_json = serde_json::to_string(&credential_offer).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize CredentialOffer", - )?; - - let res = Ok(credential_offer_json); - trace!("create_credential_offer < {:?}", res); - res - } - - /// Check Cred Request for the given Cred Offer and issue Credential for the given Cred Request. - /// - /// Cred Request must match Cred Offer. The credential definition and revocation registry definition - /// referenced in Cred Offer and Cred Request must be already created and stored into the wallet. - /// - /// Information for this credential revocation will be store in the wallet as part of revocation registry under - /// generated cred_revoc_id local for this wallet. - /// - /// This call returns revoc registry delta as json file intended to be shared as REVOC_REG_ENTRY transaction. - /// Note that it is possible to accumulate deltas to reduce ledger load. - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet). - /// cred_offer_json: a cred offer created by indy_issuer_create_credential_offer - /// cred_req_json: a credential request created by indy_prover_create_credential_req - /// cred_values_json: a credential containing attribute values for each of requested attribute names. - /// Example: - /// { - /// "attr1" : {"raw": "value1", "encoded": "value1_as_int" }, - /// "attr2" : {"raw": "value1", "encoded": "value1_as_int" } - /// } - /// If you want to use empty value for some credential field, you should set "raw" to "" and "encoded" should not be empty - /// rev_reg_id: id of revocation registry stored in the wallet - /// blob_storage_reader_handle: configuration of blob storage reader handle that will allow to read revocation tails (returned by `indy_open_blob_storage_reader`) - /// - /// #Returns - /// cred_json: Credential json containing signed credential values - /// { - /// "schema_id": string, - identifier of schema - /// "cred_def_id": string, - identifier of credential definition - /// "rev_reg_def_id", Optional, - identifier of revocation registry - /// "values": , - credential values. - /// // Fields below can depend on Cred Def type - /// "signature": , - /// (opaque type that contains data structures internal to Ursa. - /// It should not be parsed and are likely to change in future versions). - /// "signature_correctness_proof": credential signature correctness proof - /// (opaque type that contains data structures internal to Ursa. - /// It should not be parsed and are likely to change in future versions). - /// "rev_reg" - (Optional) revocation registry accumulator value on the issuing moment. - /// (opaque type that contains data structures internal to Ursa. - /// It should not be parsed and are likely to change in future versions). - /// "witness" - (Optional) revocation related data - /// (opaque type that contains data structures internal to Ursa. - /// It should not be parsed and are likely to change in future versions). - /// } - /// cred_revoc_id: local id for revocation info (Can be used for revocation of this credential) - /// revoc_reg_delta_json: Revocation registry delta json with a newly issued credential - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn new_credential( - &self, - wallet_handle: WalletHandle, - cred_offer: CredentialOffer, - cred_request: CredentialRequest, - cred_values: CredentialValues, - rev_reg_id: Option, - blob_storage_reader_handle: Option, - ) -> IndyResult<(String, Option, Option)> { - trace!( - "new_credential > wallet_handle {:?} cred_offer {:?} \ - cred_request {:?} cred_values {:?} rev_reg_id {:?} \ - blob_storage_reader_handle {:?}", - wallet_handle, - secret!(&cred_offer), - secret!(&cred_request), - secret!(&cred_values), - rev_reg_id, - blob_storage_reader_handle - ); - - let cred_def_id = match cred_offer.method_name { - Some(ref method_name) => cred_offer.cred_def_id.qualify(method_name)?, - None => cred_offer.cred_def_id.clone(), - }; - - let cred_def: CredentialDefinitionV1 = CredentialDefinitionV1::from( - self.wallet_service - .get_indy_object::( - wallet_handle, - &cred_def_id.0, - &RecordOptions::id_value(), - ) - .await?, - ); - - let cred_def_priv_key: CredentialDefinitionPrivateKey = self - .wallet_service - .get_indy_object(wallet_handle, &cred_def_id.0, &RecordOptions::id_value()) - .await?; - - let (rev_reg_def, mut rev_reg, rev_reg_def_priv, sdk_tails_accessor, rev_reg_info) = - match rev_reg_id { - Some(ref r_reg_id) => { - let rev_reg_def: RevocationRegistryDefinitionV1 = - RevocationRegistryDefinitionV1::from( - self._wallet_get_rev_reg_def(wallet_handle, &r_reg_id) - .await?, - ); - - let rev_reg: RevocationRegistryV1 = RevocationRegistryV1::from( - self._wallet_get_rev_reg(wallet_handle, &r_reg_id).await?, - ); - - let rev_key_priv: RevocationRegistryDefinitionPrivate = self - .wallet_service - .get_indy_object(wallet_handle, &r_reg_id.0, &RecordOptions::id_value()) - .await?; - - let mut rev_reg_info = self - ._wallet_get_rev_reg_info(wallet_handle, &r_reg_id) - .await?; - - rev_reg_info.curr_id += 1; - - if rev_reg_info.curr_id > rev_reg_def.value.max_cred_num { - return Err(err_msg( - IndyErrorKind::RevocationRegistryFull, - "RevocationRegistryAccumulator is full", - )); - } - - if rev_reg_def.value.issuance_type == IssuanceType::ISSUANCE_ON_DEMAND { - rev_reg_info.used_ids.insert(rev_reg_info.curr_id); - } - - // TODO: FIXME: Review error kind! - let blob_storage_reader_handle = - blob_storage_reader_handle.ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - "TailsReaderHandle not found", - ) - })?; - - let sdk_tails_accessor = SDKTailsAccessor::new( - self.blob_storage_service.clone(), - blob_storage_reader_handle, - &rev_reg_def, - ) - .await?; - - ( - Some(rev_reg_def), - Some(rev_reg), - Some(rev_key_priv), - Some(sdk_tails_accessor), - Some(rev_reg_info), - ) - } - None => (None, None, None, None, None), - }; - - let (credential_signature, signature_correctness_proof, rev_reg_delta) = - self.issuer_service.new_credential( - &cred_def, - &cred_def_priv_key.value, - &cred_offer.nonce, - &cred_request, - &cred_values, - rev_reg_info.as_ref().map(|r_reg_info| r_reg_info.curr_id), - rev_reg_def.as_ref(), - rev_reg.as_mut().map(|r_reg| &mut r_reg.value), - rev_reg_def_priv - .as_ref() - .map(|r_reg_def_priv| &r_reg_def_priv.value), - sdk_tails_accessor.as_ref(), - )?; - - let witness = if let ( - &Some(ref r_reg_def), - &Some(ref r_reg), - &Some(ref rev_tails_accessor), - &Some(ref rev_reg_info), - ) = (&rev_reg_def, &rev_reg, &sdk_tails_accessor, &rev_reg_info) - { - let (issued, revoked) = match r_reg_def.value.issuance_type { - IssuanceType::ISSUANCE_ON_DEMAND => (rev_reg_info.used_ids.clone(), HashSet::new()), - IssuanceType::ISSUANCE_BY_DEFAULT => { - (HashSet::new(), rev_reg_info.used_ids.clone()) - } - }; - - let rev_reg_delta = - CryptoRevocationRegistryDelta::from_parts(None, &r_reg.value, &issued, &revoked); - - Some(Witness::new( - rev_reg_info.curr_id, - r_reg_def.value.max_cred_num, - r_reg_def.value.issuance_type.to_bool(), - &rev_reg_delta, - rev_tails_accessor, - )?) - } else { - None - }; - - let cred_rev_reg_id = match (rev_reg_id.as_ref(), cred_offer.method_name.as_ref()) { - (Some(rev_reg_id), Some(ref _method_name)) => Some(rev_reg_id.to_unqualified()), - (rev_reg_id, _) => rev_reg_id.cloned(), - }; - - let credential = Credential { - schema_id: cred_offer.schema_id.clone(), - cred_def_id: cred_offer.cred_def_id.clone(), - rev_reg_id: cred_rev_reg_id, - values: cred_values.clone(), - signature: credential_signature, - signature_correctness_proof, - rev_reg: rev_reg.map(|r_reg| r_reg.value), - witness, - }; - - let cred_json = serde_json::to_string(&credential) - .to_indy(IndyErrorKind::InvalidState, "Cannot serialize Credential")?; - - let rev_reg_delta_json = rev_reg_delta - .map(|r_reg_delta| { - RevocationRegistryDelta::RevocationRegistryDeltaV1(RevocationRegistryDeltaV1 { - value: r_reg_delta, - }) - }) - .as_ref() - .map(serde_json::to_string) - .map_or(Ok(None), |v| v.map(Some)) - .to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize RevocationRegistryDelta", - )?; - - if let (Some(r_reg), Some(r_reg_id), Some(r_reg_info)) = - (credential.rev_reg, rev_reg_id, rev_reg_info.clone()) - { - let revoc_reg = - RevocationRegistry::RevocationRegistryV1(RevocationRegistryV1 { value: r_reg }); - - self.wallet_service - .update_indy_object(wallet_handle, &r_reg_id.0, &revoc_reg) - .await?; - self.wallet_service - .update_indy_object(wallet_handle, &r_reg_id.0, &r_reg_info) - .await?; - }; - - let cred_rev_id = rev_reg_info.map(|r_reg_info| r_reg_info.curr_id.to_string()); - - let res = Ok((cred_json, cred_rev_id, rev_reg_delta_json)); - trace!("new_credential < {:?}", secret!(&res)); - res - } - - /// Revoke a credential identified by a cred_revoc_id (returned by indy_issuer_create_credential). - /// - /// The corresponding credential definition and revocation registry must be already - /// created an stored into the wallet. - /// - /// This call returns revoc registry delta as json file intended to be shared as REVOC_REG_ENTRY transaction. - /// Note that it is possible to accumulate deltas to reduce ledger load. - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet). - /// blob_storage_reader_cfg_handle: configuration of blob storage reader handle that will allow to read revocation tails (returned by `indy_open_blob_storage_reader`). - /// rev_reg_id: id of revocation registry stored in wallet - /// cred_revoc_id: local id for revocation info related to issued credential - /// - /// #Returns - /// revoc_reg_delta_json: Revocation registry delta json with a revoked credential - /// { - /// value: { - /// prevAccum: string - previous accumulator value. - /// accum: string - current accumulator value. - /// revoked: array an array of revoked indices. - /// }, - /// ver: string - version revocation registry delta json - /// } - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn revoke_credential( - &self, - wallet_handle: WalletHandle, - blob_storage_reader_handle: i32, - rev_reg_id: RevocationRegistryId, - cred_revoc_id: String, - ) -> IndyResult { - trace!( - "revoke_credential > wallet_handle {:?} \ - blob_storage_reader_handle {:?} \ - rev_reg_id {:?} cred_revoc_id {:?}", - wallet_handle, - blob_storage_reader_handle, - rev_reg_id, - secret!(&cred_revoc_id) - ); - - let cred_revoc_id = AnoncredsHelpers::parse_cred_rev_id(&cred_revoc_id)?; - - let revocation_registry_definition: RevocationRegistryDefinitionV1 = - RevocationRegistryDefinitionV1::from( - self._wallet_get_rev_reg_def(wallet_handle, &rev_reg_id) - .await?, - ); - - let mut rev_reg: RevocationRegistryV1 = - RevocationRegistryV1::from(self._wallet_get_rev_reg(wallet_handle, &rev_reg_id).await?); - - let sdk_tails_accessor = SDKTailsAccessor::new( - self.blob_storage_service.clone(), - blob_storage_reader_handle, - &revocation_registry_definition, - ) - .await?; - - if cred_revoc_id > revocation_registry_definition.value.max_cred_num + 1 { - return Err(err_msg( - IndyErrorKind::InvalidUserRevocId, - format!( - "Revocation id: {:?} not found in RevocationRegistry", - cred_revoc_id - ), - )); - } - - let mut rev_reg_info = self - ._wallet_get_rev_reg_info(wallet_handle, &rev_reg_id) - .await?; - - match revocation_registry_definition.value.issuance_type { - IssuanceType::ISSUANCE_ON_DEMAND => { - if !rev_reg_info.used_ids.remove(&cred_revoc_id) { - return Err(err_msg( - IndyErrorKind::InvalidUserRevocId, - format!( - "Revocation id: {:?} not found in RevocationRegistry", - cred_revoc_id - ), - )); - }; - } - IssuanceType::ISSUANCE_BY_DEFAULT => { - if !rev_reg_info.used_ids.insert(cred_revoc_id) { - return Err(err_msg( - IndyErrorKind::InvalidUserRevocId, - format!( - "Revocation id: {:?} not found in RevocationRegistry", - cred_revoc_id - ), - )); - } - } - }; - - let rev_reg_delta = self.issuer_service.revoke( - &mut rev_reg.value, - revocation_registry_definition.value.max_cred_num, - cred_revoc_id, - &sdk_tails_accessor, - )?; - - let rev_reg_delta = - RevocationRegistryDelta::RevocationRegistryDeltaV1(RevocationRegistryDeltaV1 { - value: rev_reg_delta, - }); - - let rev_reg_delta_json = serde_json::to_string(&rev_reg_delta).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize RevocationRegistryDelta", - )?; - - let rev_reg = RevocationRegistry::RevocationRegistryV1(rev_reg); - - self.wallet_service - .update_indy_object(wallet_handle, &rev_reg_id.0, &rev_reg) - .await?; - - self.wallet_service - .update_indy_object(wallet_handle, &rev_reg_id.0, &rev_reg_info) - .await?; - - let res = Ok(rev_reg_delta_json); - trace!("revoke_credential < {:?}", res); - res - } - - async fn _recovery_credential( - &self, - wallet_handle: WalletHandle, - blob_storage_reader_handle: i32, - rev_reg_id: &RevocationRegistryId, - cred_revoc_id: &str, - ) -> IndyResult { - trace!("recovery_credential >>> wallet_handle: {:?}, blob_storage_reader_handle: {:?}, rev_reg_id: {:?}, cred_revoc_id: {:?}", - wallet_handle, blob_storage_reader_handle, rev_reg_id, secret!(cred_revoc_id)); - - let cred_revoc_id = AnoncredsHelpers::parse_cred_rev_id(cred_revoc_id)?; - - let revocation_registry_definition: RevocationRegistryDefinitionV1 = - RevocationRegistryDefinitionV1::from( - self._wallet_get_rev_reg_def(wallet_handle, &rev_reg_id) - .await?, - ); - - let mut rev_reg: RevocationRegistryV1 = - RevocationRegistryV1::from(self._wallet_get_rev_reg(wallet_handle, &rev_reg_id).await?); - - let sdk_tails_accessor = SDKTailsAccessor::new( - self.blob_storage_service.clone(), - blob_storage_reader_handle, - &revocation_registry_definition, - ) - .await?; - - if cred_revoc_id > revocation_registry_definition.value.max_cred_num + 1 { - return Err(err_msg( - IndyErrorKind::InvalidUserRevocId, - format!( - "Revocation id: {:?} not found in RevocationRegistry", - cred_revoc_id - ), - )); - } - - let mut rev_reg_info = self - ._wallet_get_rev_reg_info(wallet_handle, &rev_reg_id) - .await?; - - match revocation_registry_definition.value.issuance_type { - IssuanceType::ISSUANCE_ON_DEMAND => { - if !rev_reg_info.used_ids.insert(cred_revoc_id) { - return Err(err_msg( - IndyErrorKind::InvalidUserRevocId, - format!( - "Revocation id: {:?} not found in RevocationRegistry", - cred_revoc_id - ), - )); - } - } - IssuanceType::ISSUANCE_BY_DEFAULT => { - if !rev_reg_info.used_ids.remove(&cred_revoc_id) { - return Err(err_msg( - IndyErrorKind::InvalidUserRevocId, - format!( - "Revocation id: {:?} not found in RevocationRegistry", - cred_revoc_id - ), - )); - } - } - }; - - let revocation_registry_delta = self.issuer_service.recovery( - &mut rev_reg.value, - revocation_registry_definition.value.max_cred_num, - cred_revoc_id, - &sdk_tails_accessor, - )?; - - let rev_reg_delta = - RevocationRegistryDelta::RevocationRegistryDeltaV1(RevocationRegistryDeltaV1 { - value: revocation_registry_delta, - }); - - let rev_reg_delta_json = serde_json::to_string(&rev_reg_delta).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize RevocationRegistryDelta: {:?}", - )?; - - let rev_reg = RevocationRegistry::RevocationRegistryV1(rev_reg); - - self.wallet_service - .update_indy_object(wallet_handle, &rev_reg_id.0, &rev_reg) - .await?; - - self.wallet_service - .update_indy_object(wallet_handle, &rev_reg_id.0, &rev_reg_info) - .await?; - - let res = Ok(rev_reg_delta_json); - trace!("recovery_credential < {:?}", res); - res - } - - /// Merge two revocation registry deltas (returned by indy_issuer_create_credential or indy_issuer_revoke_credential) to accumulate common delta. - /// Send common delta to ledger to reduce the load. - /// - /// #Params - - /// rev_reg_delta_json: revocation registry delta. - /// { - /// value: { - /// prevAccum: string - previous accumulator value. - /// accum: string - current accumulator value. - /// issued: array an array of issued indices. - /// revoked: array an array of revoked indices. - /// }, - /// ver: string - version revocation registry delta json - /// } - /// - /// other_rev_reg_delta_json: revocation registry delta for which PrevAccum value is equal to value of accum field of rev_reg_delta_json parameter. - /// - /// #Returns - /// merged_rev_reg_delta: Merged revocation registry delta - /// { - /// value: { - /// prevAccum: string - previous accumulator value. - /// accum: string - current accumulator value. - /// issued: array an array of issued indices. - /// revoked: array an array of revoked indices. - /// }, - /// ver: string - version revocation registry delta json - /// } - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub fn merge_revocation_registry_deltas( - &self, - rev_reg_delta: RevocationRegistryDelta, - other_rev_reg_delta: RevocationRegistryDelta, - ) -> IndyResult { - trace!( - "merge_revocation_registry_deltas > rev_reg_delta {:?} other_rev_reg_delta {:?}", - rev_reg_delta, - other_rev_reg_delta - ); - - let mut rev_reg_delta = RevocationRegistryDeltaV1::from(rev_reg_delta); - let other_rev_reg_delta = RevocationRegistryDeltaV1::from(other_rev_reg_delta); - - rev_reg_delta.value.merge(&other_rev_reg_delta.value)?; - - let rev_reg_delta = - RevocationRegistryDelta::RevocationRegistryDeltaV1(rev_reg_delta.clone()); - - let merged_rev_reg_delta_json = serde_json::to_string(&rev_reg_delta).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize RevocationRegistryDelta", - )?; - - let res = Ok(merged_rev_reg_delta_json); - trace!("merge_revocation_registry_deltas < {:?}", res); - res - } - - // TODO: DELETE IT - async fn _wallet_set_schema_id( - &self, - wallet_handle: WalletHandle, - id: &str, - schema_id: &SchemaId, - ) -> IndyResult<()> { - self.wallet_service - .add_record( - wallet_handle, - &self.wallet_service.add_prefix("SchemaId"), - id, - &schema_id.0, - &Tags::new(), - ) - .await - } - - // TODO: DELETE IT - async fn _wallet_get_schema_id( - &self, - wallet_handle: WalletHandle, - key: &str, - ) -> IndyResult { - let schema_id_record = self - .wallet_service - .get_record( - wallet_handle, - &self.wallet_service.add_prefix("SchemaId"), - &key, - &RecordOptions::id_value(), - ) - .await?; - - schema_id_record - .get_value() - .map(|id| SchemaId(id.to_string())) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("SchemaId not found for id: {}", key), - ) - }) - } - - async fn _wallet_get_rev_reg_def( - &self, - wallet_handle: WalletHandle, - key: &RevocationRegistryId, - ) -> IndyResult { - self.wallet_service - .get_indy_object(wallet_handle, &key.0, &RecordOptions::id_value()) - .await - } - - async fn _wallet_get_rev_reg( - &self, - wallet_handle: WalletHandle, - key: &RevocationRegistryId, - ) -> IndyResult { - self.wallet_service - .get_indy_object(wallet_handle, &key.0, &RecordOptions::id_value()) - .await - } - - async fn _wallet_get_rev_reg_info( - &self, - wallet_handle: WalletHandle, - key: &RevocationRegistryId, - ) -> IndyResult { - self.wallet_service - .get_indy_object(wallet_handle, &key.0, &RecordOptions::id_value()) - .await - } -} diff --git a/libvdrtools/src/controllers/anoncreds/mod.rs b/libvdrtools/src/controllers/anoncreds/mod.rs deleted file mode 100644 index a7d2e092b2..0000000000 --- a/libvdrtools/src/controllers/anoncreds/mod.rs +++ /dev/null @@ -1,8 +0,0 @@ -mod issuer; -mod prover; -mod tails; -mod verifier; - -pub use issuer::{CredentialDefinitionId, IssuerController}; -pub use prover::ProverController; -pub use verifier::VerifierController; diff --git a/libvdrtools/src/controllers/anoncreds/prover.rs b/libvdrtools/src/controllers/anoncreds/prover.rs deleted file mode 100644 index 9c7537cc11..0000000000 --- a/libvdrtools/src/controllers/anoncreds/prover.rs +++ /dev/null @@ -1,1883 +0,0 @@ -use std::{ - collections::{HashMap, HashSet}, - ops::DerefMut, - sync::Arc, -}; - -use futures::lock::Mutex; -use indy_api_types::{errors::prelude::*, SearchHandle, WalletHandle}; -use indy_utils::next_search_handle; -use indy_wallet::{RecordOptions, SearchOptions, WalletRecord, WalletSearch, WalletService}; -use log::trace; -use serde_json::Value; -use ursa::cl::{new_nonce, RevocationRegistry, Witness}; - -use crate::{ - domain::{ - anoncreds::{ - credential::{Credential, CredentialInfo}, - credential_attr_tag_policy::CredentialAttrTagPolicy, - credential_definition::{ - cred_defs_map_to_cred_defs_v1_map, CredentialDefinition, CredentialDefinitionId, - CredentialDefinitionV1, CredentialDefinitions, - }, - credential_for_proof_request::{CredentialsForProofRequest, RequestedCredential}, - credential_offer::CredentialOffer, - credential_request::{CredentialRequest, CredentialRequestMetadata}, - master_secret::MasterSecret, - proof_request::{ - NonRevocedInterval, PredicateInfo, ProofRequest, ProofRequestExtraQuery, - }, - requested_credential::RequestedCredentials, - revocation_registry_definition::{ - RevocationRegistryDefinition, RevocationRegistryDefinitionV1, - }, - revocation_registry_delta::{RevocationRegistryDelta, RevocationRegistryDeltaV1}, - revocation_state::{RevocationState, RevocationStates}, - schema::{schemas_map_to_schemas_v1_map, Schemas}, - }, - crypto::did::DidValue, - }, - services::{AnoncredsHelpers, BlobStorageService, CryptoService, ProverService}, - utils::wql::Query, -}; - -use super::tails::SDKTailsAccessor; - -struct SearchForProofRequest { - search: WalletSearch, - interval: Option, - predicate_info: Option, -} - -impl SearchForProofRequest { - fn new( - search: WalletSearch, - interval: Option, - predicate_info: Option, - ) -> Self { - Self { - search, - interval, - predicate_info, - } - } -} - -pub struct ProverController { - prover_service: Arc, - wallet_service: Arc, - crypto_service: Arc, - blob_storage_service: Arc, - searches: Mutex>>, - searches_for_proof_requests: - Mutex>>>>, -} - -impl ProverController { - pub(crate) fn new( - prover_service: Arc, - wallet_service: Arc, - crypto_service: Arc, - blob_storage_service: Arc, - ) -> ProverController { - ProverController { - prover_service, - wallet_service, - crypto_service, - blob_storage_service, - searches: Mutex::new(HashMap::new()), - searches_for_proof_requests: Mutex::new(HashMap::new()), - } - } - - /// Creates a master secret with a given id and stores it in the wallet. - /// The id must be unique. - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet). - /// master_secret_id: (optional, if not present random one will be generated) new master id - /// - /// #Returns - /// out_master_secret_id: Id of generated master secret - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn create_master_secret( - &self, - wallet_handle: WalletHandle, - master_secret_id: Option, - ) -> IndyResult { - trace!( - "create_master_secret > wallet_handle {:?} master_secret_id {:?}", - wallet_handle, - master_secret_id - ); - - let master_secret_id = master_secret_id.unwrap_or_else(|| uuid::Uuid::new_v4().to_string()); - - if self - .wallet_service - .record_exists::(wallet_handle, &master_secret_id) - .await? - { - return Err(err_msg( - IndyErrorKind::MasterSecretDuplicateName, - format!("MasterSecret already exists {}", master_secret_id), - )); - } - - let master_secret = self.prover_service.new_master_secret()?; - - let master_secret = MasterSecret { - value: master_secret, - }; - - self.wallet_service - .add_indy_object( - wallet_handle, - &master_secret_id, - &master_secret, - &HashMap::new(), - ) - .await?; - - let res = Ok(master_secret_id); - trace!("create_master_secret < {:?}", res); - res - } - - /// Creates a credential request for the given credential offer. - /// - /// The method creates a blinded master secret for a master secret identified by a provided name. - /// The master secret identified by the name must be already stored in the secure wallet (see prover_create_master_secret) - /// The blinded master secret is a part of the credential request. - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet) - /// prover_did: a DID of the prover - /// cred_offer_json: credential offer as a json containing information about the issuer and a credential - /// { - /// "schema_id": string, - identifier of schema - /// "cred_def_id": string, - identifier of credential definition - /// ... - /// Other fields that contains data structures internal to Ursa. - /// These fields should not be parsed and are likely to change in future versions. - /// } - /// cred_def_json: credential definition json related to in - /// master_secret_id: the id of the master secret stored in the wallet - /// - /// #Returns - /// cred_req_json: Credential request json for creation of credential by Issuer - /// { - /// "prover_did" : string, - /// "cred_def_id" : string, - /// // Fields below can depend on Cred Def type - /// "blinded_ms" : , - /// (opaque type that contains data structures internal to Ursa. - /// It should not be parsed and are likely to change in future versions). - /// "blinded_ms_correctness_proof" : , - /// (opaque type that contains data structures internal to Ursa. - /// It should not be parsed and are likely to change in future versions). - /// "nonce": string - /// } - /// cred_req_metadata_json: Credential request metadata json for further processing of received form Issuer credential. - /// Credential request metadata contains data structures internal to Ursa. - /// Credential request metadata mustn't be shared with Issuer. - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn create_credential_request( - &self, - wallet_handle: WalletHandle, - prover_did: DidValue, - cred_offer: CredentialOffer, - cred_def: CredentialDefinition, - master_secret_id: String, - ) -> IndyResult<(String, String)> { - trace!( - "create_credential_request > wallet_handle {:?} \ - prover_did {:?} cred_offer {:?} cred_def {:?} \ - master_secret_id: {:?}", - wallet_handle, - prover_did, - cred_offer, - cred_def, - master_secret_id - ); - - let cred_def = CredentialDefinitionV1::from(cred_def); - - self.crypto_service.validate_did(&prover_did)?; - - let master_secret: MasterSecret = self - ._wallet_get_master_secret(wallet_handle, &master_secret_id) - .await?; - - let (blinded_ms, ms_blinding_data, blinded_ms_correctness_proof) = self - .prover_service - .new_credential_request(&cred_def, &master_secret.value, &cred_offer)?; - - let nonce = new_nonce()?; - - let credential_request = CredentialRequest { - prover_did, - cred_def_id: cred_offer.cred_def_id.clone(), - blinded_ms, - blinded_ms_correctness_proof, - nonce, - }; - - let credential_request_metadata = CredentialRequestMetadata { - master_secret_blinding_data: ms_blinding_data, - nonce: credential_request.nonce.try_clone()?, - master_secret_name: master_secret_id.to_string(), - }; - - let cred_req_json = serde_json::to_string(&credential_request).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize CredentialRequest", - )?; - - let cred_req_metadata_json = serde_json::to_string(&credential_request_metadata).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize CredentialRequestMetadata", - )?; - - let res = Ok((cred_req_json, cred_req_metadata_json)); - trace!("create_credential_request < {:?}", res); - res - } - - /// Set credential attribute tagging policy. - /// Writes a non-secret record marking attributes to tag, and optionally - /// updates tags on existing credentials on the credential definition to match. - /// - /// EXPERIMENTAL - /// - /// The following tags are always present on write: - /// { - /// "schema_id": , - /// "schema_issuer_did": , - /// "schema_name": , - /// "schema_version": , - /// "issuer_did": , - /// "cred_def_id": , - /// "rev_reg_id": , // "None" as string if not present - /// } - /// - /// The policy sets the following tags for each attribute it marks taggable, written to subsequent - /// credentials and (optionally) all existing credentials on the credential definition: - /// { - /// "attr::::marker": "1", - /// "attr::::value": , - /// } - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet). - /// cred_def_id: credential definition id - /// tag_attrs_json: JSON array with names of attributes to tag by policy, or null for all - /// retroactive: boolean, whether to apply policy to existing credentials on credential definition identifier - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn set_credential_attr_tag_policy( - &self, - wallet_handle: WalletHandle, - cred_def_id: CredentialDefinitionId, - catpol: Option, - retroactive: bool, - ) -> IndyResult<()> { - trace!( - "set_credential_attr_tag_policy > wallet_handle {:?} \ - cred_def_id {:?} catpol {:?} retroactive {:?}", - wallet_handle, - cred_def_id, - catpol, - retroactive - ); - - match catpol { - Some(ref pol) => { - self.wallet_service - .upsert_indy_object(wallet_handle, &cred_def_id.0, pol) - .await?; - } - None => { - if self - .wallet_service - .record_exists::(wallet_handle, &cred_def_id.0) - .await? - { - self.wallet_service - .delete_indy_record::( - wallet_handle, - &cred_def_id.0, - ) - .await?; - } - } - }; - - // Cascade whether we updated policy or not: could be a retroactive cred attr tags reset to existing policy - if retroactive { - let query_json = format!(r#"{{"cred_def_id": "{}"}}"#, cred_def_id.0); - - let mut credentials_search = self - .wallet_service - .search_indy_records::( - wallet_handle, - query_json.as_str(), - &SearchOptions::id_value(), - ) - .await?; - - while let Some(credential_record) = credentials_search.fetch_next_record().await? { - let (_, credential) = self._get_credential(&credential_record)?; - - let cred_tags = self - .prover_service - .build_credential_tags(&credential, catpol.as_ref())?; - - self.wallet_service - .update_record_tags( - wallet_handle, - self.wallet_service.add_prefix("Credential").as_str(), - credential_record.get_id(), - &cred_tags, - ) - .await?; - } - } - - let res = Ok(()); - trace!("set_credential_attr_tag_policy < {:?}", res); - res - } - - /// Get credential attribute tagging policy by credential definition id. - /// - /// EXPERIMENTAL - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet). - /// cred_def_id: credential definition id - /// - /// #Returns - /// JSON array with all attributes that current policy marks taggable; - /// null for default policy (tag all credential attributes). - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn get_credential_attr_tag_policy( - &self, - wallet_handle: WalletHandle, - cred_def_id: CredentialDefinitionId, - ) -> IndyResult { - trace!( - "get_credential_attr_tag_policy > wallet_handle {:?} \ - cred_def_id {:?}", - wallet_handle, - cred_def_id - ); - - let catpol = self - ._get_credential_attr_tag_policy(wallet_handle, &cred_def_id) - .await?; - - let res = Ok(catpol); - trace!("get_credential_attr_tag_policy < {:?}", res); - res - } - - /// Check credential provided by Issuer for the given credential request, - /// updates the credential by a master secret and stores in a secure wallet. - /// - /// To support efficient and flexible search the following tags will be created for stored credential: - /// { - /// "schema_id": , - /// "schema_issuer_did": , - /// "schema_name": , - /// "schema_version": , - /// "issuer_did": , - /// "cred_def_id": , - /// "rev_reg_id": , // "None" as string if not present - /// // for every attribute in that credential attribute tagging policy marks taggable - /// "attr::::marker": "1", - /// "attr::::value": , - /// } - /// - /// #Params - - /// wallet_handle: wallet handle (created by open_wallet). - /// cred_id: (optional, default is a random one) identifier by which credential will be stored in the wallet - /// cred_req_metadata_json: a credential request metadata created by indy_prover_create_credential_req - /// cred_json: credential json received from issuer - /// { - /// "schema_id": string, - identifier of schema - /// "cred_def_id": string, - identifier of credential definition - /// "rev_reg_def_id", Optional, - identifier of revocation registry - /// "values": - credential values - /// { - /// "attr1" : {"raw": "value1", "encoded": "value1_as_int" }, - /// "attr2" : {"raw": "value1", "encoded": "value1_as_int" } - /// } - /// // Fields below can depend on Cred Def type - /// Other fields that contains data structures internal to Ursa. - /// These fields should not be parsed and are likely to change in future versions. - /// } - /// cred_def_json: credential definition json related to in - /// rev_reg_def_json: revocation registry definition json related to in - /// - /// #Returns - /// out_cred_id: identifier by which credential is stored in the wallet - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn store_credential( - &self, - wallet_handle: WalletHandle, - cred_id: Option, - cred_req_metadata: CredentialRequestMetadata, - mut credential: Credential, - cred_def: CredentialDefinition, - rev_reg_def: Option, - ) -> IndyResult { - trace!( - "store_credential > wallet_handle {:?} \ - cred_id {:?} cred_req_metadata {:?} \ - credential {:?} cred_def {:?} \ - rev_reg_def {:?}", - wallet_handle, - cred_id, - cred_req_metadata, - credential, - cred_def, - rev_reg_def - ); - - let cred_def = CredentialDefinitionV1::from(cred_def); - let rev_reg_def = rev_reg_def.map(RevocationRegistryDefinitionV1::from); - - let master_secret: MasterSecret = self - ._wallet_get_master_secret(wallet_handle, &cred_req_metadata.master_secret_name) - .await?; - - self.prover_service.process_credential( - &mut credential, - &cred_req_metadata, - &master_secret.value, - &cred_def, - rev_reg_def.as_ref(), - )?; - - credential.rev_reg = None; - credential.witness = None; - - let out_cred_id = cred_id.unwrap_or_else(|| uuid::Uuid::new_v4().to_string()); - - let catpol_json = self - ._get_credential_attr_tag_policy(wallet_handle, &credential.cred_def_id) - .await?; - - let catpol: Option = if catpol_json.ne("null") { - Some(serde_json::from_str(catpol_json.as_str()).to_indy( - IndyErrorKind::InvalidState, - "Cannot deserialize CredentialAttrTagPolicy", - )?) - } else { - None - }; - - let cred_tags = self - .prover_service - .build_credential_tags(&credential, catpol.as_ref())?; - - self.wallet_service - .add_indy_object(wallet_handle, &out_cred_id, &credential, &cred_tags) - .await?; - - let res = Ok(out_cred_id); - trace!("store_credential < {:?}", res); - res - } - - /// Gets human readable credentials according to the filter. - /// If filter is NULL, then all credentials are returned. - /// Credentials can be filtered by Issuer, credential_def and/or Schema. - /// - /// #Params - /// wallet_handle: wallet handle (created by open_wallet). - /// filter_json: filter for credentials - /// { - /// "schema_id": string, (Optional) - /// "schema_issuer_did": string, (Optional) - /// "schema_name": string, (Optional) - /// "schema_version": string, (Optional) - /// "issuer_did": string, (Optional) - /// "cred_def_id": string, (Optional) - /// } - /// - /// #Returns - /// credentials json - /// [{ - /// "referent": string, - id of credential in the wallet - /// "attrs": {"key1":"raw_value1", "key2":"raw_value2"}, - credential attributes - /// "schema_id": string, - identifier of schema - /// "cred_def_id": string, - identifier of credential definition - /// "rev_reg_id": Optional, - identifier of revocation registry definition - /// "cred_rev_id": Optional - identifier of credential in the revocation registry definition - /// }] - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - #[no_mangle] - pub async fn get_credentials( - &self, - wallet_handle: WalletHandle, - filter_json: Option, - ) -> IndyResult { - trace!( - "get_credentials > wallet_handle {:?} filter_json {:?}", - wallet_handle, - filter_json - ); - - let filter_json = filter_json.as_deref().unwrap_or("{}"); - let mut credentials_info: Vec = Vec::new(); - - let mut credentials_search = self - .wallet_service - .search_indy_records::( - wallet_handle, - filter_json, - &SearchOptions::id_value(), - ) - .await?; - - while let Some(credential_record) = credentials_search.fetch_next_record().await? { - let (referent, credential) = self._get_credential(&credential_record)?; - credentials_info.push(self._get_credential_info(&referent, credential)) - } - - let credentials_info_json = serde_json::to_string(&credentials_info).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize list of CredentialInfo", - )?; - - let res = Ok(credentials_info_json); - trace!("get_credentials < {:?}", res); - res - } - - /// Gets human readable credential by the given id. - /// - /// #Params - /// wallet_handle: wallet handle (created by open_wallet). - /// cred_id: Identifier by which requested credential is stored in the wallet - /// - /// #Returns - /// credential json: - /// { - /// "referent": string, - id of credential in the wallet - /// "attrs": {"key1":"raw_value1", "key2":"raw_value2"}, - credential attributes - /// "schema_id": string, - identifier of schema - /// "cred_def_id": string, - identifier of credential definition - /// "rev_reg_id": Optional, - identifier of revocation registry definition - /// "cred_rev_id": Optional - identifier of credential in the revocation registry definition - /// } - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn get_credential( - &self, - wallet_handle: WalletHandle, - cred_id: String, - ) -> IndyResult { - trace!( - "get_credentials > wallet_handle {:?} cred_id {:?}", - wallet_handle, - cred_id - ); - - let credential: Credential = self - .wallet_service - .get_indy_object(wallet_handle, &cred_id, &RecordOptions::id_value()) - .await?; - - let credential_info = self._get_credential_info(&cred_id, credential); - - let credential_info_json = serde_json::to_string(&credential_info).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize CredentialInfo", - )?; - - let res = Ok(credential_info_json); - trace!("get_credential < {:?}", res); - res - } - - /// Search for credentials stored in wallet. - /// Credentials can be filtered by tags created during saving of credential. - /// - /// Instead of immediately returning of fetched credentials - /// this call returns search_handle that can be used later - /// to fetch records by small batches (with indy_prover_fetch_credentials). - /// - /// #Params - /// wallet_handle: wallet handle (created by open_wallet). - /// query_json: Wql query filter for credentials searching based on tags. - /// where query: indy-sdk/docs/design/011-wallet-query-language/README.md - /// - /// #Returns - /// search_handle: Search handle that can be used later to fetch records by small batches (with indy_prover_fetch_credentials) - /// total_count: Total count of records - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn search_credentials( - &self, - wallet_handle: WalletHandle, - query_json: Option, - ) -> IndyResult<(SearchHandle, usize)> { - trace!( - "search_credentials > wallet_handle {:?} query_json {:?}", - wallet_handle, - query_json - ); - - let credentials_search = self - .wallet_service - .search_indy_records::( - wallet_handle, - query_json.as_deref().unwrap_or("{}"), - &SearchOptions::id_value(), - ) - .await?; - - let total_count = credentials_search.get_total_count()?.unwrap_or(0); - - let handle: SearchHandle = next_search_handle(); - - self.searches - .lock() - .await - .insert(handle, Box::new(credentials_search)); - - let res = (handle, total_count); - trace!("search_credentials < {:?}", res); - Ok(res) - } - - /// Fetch next credentials for search. - /// - /// #Params - /// search_handle: Search handle (created by indy_prover_search_credentials) - /// count: Count of credentials to fetch - /// - /// #Returns - /// credentials_json: List of human readable credentials: - /// [{ - /// "referent": string, - id of credential in the wallet - /// "attrs": {"key1":"raw_value1", "key2":"raw_value2"}, - credential attributes - /// "schema_id": string, - identifier of schema - /// "cred_def_id": string, - identifier of credential definition - /// "rev_reg_id": Optional, - identifier of revocation registry definition - /// "cred_rev_id": Optional - identifier of credential in the revocation registry definition - /// }] - /// NOTE: The list of length less than the requested count means credentials search iterator is completed. - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn fetch_credentials( - &self, - search_handle: SearchHandle, - count: usize, - ) -> IndyResult { - trace!( - "fetch_credentials > search_handle {:?} count {:?}", - search_handle, - count - ); - - let mut searches = self.searches.lock().await; - - let search = searches.get_mut(&search_handle).ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidWalletHandle, - "Unknown CredentialsSearch handle", - ) - })?; - - let mut credentials_info: Vec = Vec::new(); - - for _ in 0..count { - match search.fetch_next_record().await? { - Some(credential_record) => { - let (referent, credential) = self._get_credential(&credential_record)?; - credentials_info.push(self._get_credential_info(&referent, credential)) - } - None => break, - } - } - - let credentials_info_json = serde_json::to_string(&credentials_info).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize list of CredentialInfo", - )?; - - let res = Ok(credentials_info_json); - trace!("fetch_credentials < {:?}", res); - res - } - - /// Close credentials search (make search handle invalid) - /// - /// #Params - /// search_handle: Search handle (created by indy_prover_search_credentials) - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn close_credentials_search(&self, search_handle: SearchHandle) -> IndyResult<()> { - trace!( - "close_credentials_search > search_handle {:?}", - search_handle - ); - - self.searches - .lock() - .await - .remove(&search_handle) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidWalletHandle, - "Unknown CredentialsSearch handle", - ) - })?; - - let res = Ok(()); - trace!("close_credentials_search < {:?}", res); - res - } - - /// Gets human readable credentials matching the given proof request. - /// - /// NOTE: This method is deprecated because immediately returns all fetched credentials. - /// Use to fetch records by small batches. - /// - /// #Params - /// wallet_handle: wallet handle (created by open_wallet). - /// proof_request_json: proof request json - /// { - /// "name": string, - /// "version": string, - /// "nonce": string, - a decimal number represented as a string (use `indy_generate_nonce` function to generate 80-bit number) - /// "requested_attributes": { // set of requested attributes - /// "": , // see below - /// ..., - /// }, - /// "requested_predicates": { // set of requested predicates - /// "": , // see below - /// ..., - /// }, - /// "non_revoked": Optional<>, // see below, - /// // If specified prover must proof non-revocation - /// // for date in this interval for each attribute - /// // (applies to every attribute and predicate but can be overridden on attribute level), - /// "ver": Optional - proof request version: - /// - omit or "1.0" to use unqualified identifiers for restrictions - /// - "2.0" to use fully qualified identifiers for restrictions - /// } - /// - /// where - /// attr_referent: Proof-request local identifier of requested attribute - /// attr_info: Describes requested attribute - /// { - /// "name": Optional, // attribute name, (case insensitive and ignore spaces) - /// "names": Optional<[string, string]>, // attribute names, (case insensitive and ignore spaces) - /// // NOTE: should either be "name" or "names", not both and not none of them. - /// // Use "names" to specify several attributes that have to match a single credential. - /// "restrictions": Optional, // see below - /// "non_revoked": Optional<>, // see below, - /// // If specified prover must proof non-revocation - /// // for date in this interval this attribute - /// // (overrides proof level interval) - /// } - /// predicate_referent: Proof-request local identifier of requested attribute predicate - /// predicate_info: Describes requested attribute predicate - /// { - /// "name": attribute name, (case insensitive and ignore spaces) - /// "p_type": predicate type (">=", ">", "<=", "<") - /// "p_value": int predicate value - /// "restrictions": Optional, // see below - /// "non_revoked": Optional<>, // see below, - /// // If specified prover must proof non-revocation - /// // for date in this interval this attribute - /// // (overrides proof level interval) - /// } - /// non_revoc_interval: Defines non-revocation interval - /// { - /// "from": Optional, // timestamp of interval beginning - /// "to": Optional, // timestamp of interval ending - /// } - /// filter_json: - /// { - /// "schema_id": string, (Optional) - /// "schema_issuer_did": string, (Optional) - /// "schema_name": string, (Optional) - /// "schema_version": string, (Optional) - /// "issuer_did": string, (Optional) - /// "cred_def_id": string, (Optional) - /// } - /// - /// #Returns - /// credentials_json: json with credentials for the given proof request. - /// { - /// "attrs": { - /// "": [{ cred_info: , interval: Optional }], - /// ..., - /// }, - /// "predicates": { - /// "requested_predicates": [{ cred_info: , timestamp: Optional }, { cred_info: , timestamp: Optional }], - /// "requested_predicate_2_referent": [{ cred_info: , timestamp: Optional }] - /// } - /// }, where is - /// { - /// "referent": string, - id of credential in the wallet - /// "attrs": {"key1":"raw_value1", "key2":"raw_value2"}, - credential attributes - /// "schema_id": string, - identifier of schema - /// "cred_def_id": string, - identifier of credential definition - /// "rev_reg_id": Optional, - identifier of revocation registry definition - /// "cred_rev_id": Optional - identifier of credential in the revocation registry definition - /// } - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - #[deprecated( - since = "1.6.1", - note = "Please use indy_prover_search_credentials_for_proof_req instead!" - )] - #[no_mangle] - pub async fn get_credentials_for_proof_req( - &self, - wallet_handle: WalletHandle, - proof_request: ProofRequest, - ) -> IndyResult { - trace!( - "get_credentials_for_proof_req > wallet_handle {:?} proof_request {:?}", - wallet_handle, - proof_request - ); - - let proof_req = proof_request.value(); - let proof_req_version = proof_request.version(); - - let mut credentials_for_proof_request: CredentialsForProofRequest = - CredentialsForProofRequest::default(); - - for (attr_id, requested_attr) in proof_req.requested_attributes.iter() { - let query = self.prover_service.process_proof_request_restrictions( - &proof_req_version, - &requested_attr.name, - &requested_attr.names, - &attr_id, - &requested_attr.restrictions, - &None, - )?; - - let interval = AnoncredsHelpers::get_non_revoc_interval( - &proof_req.non_revoked, - &requested_attr.non_revoked, - ); - - let credentials_for_attribute = self - ._query_requested_credentials(wallet_handle, &query, None, &interval) - .await?; - - credentials_for_proof_request - .attrs - .insert(attr_id.to_string(), credentials_for_attribute); - } - - for (predicate_id, requested_predicate) in proof_req.requested_predicates.iter() { - let query = self.prover_service.process_proof_request_restrictions( - &proof_req_version, - &Some(requested_predicate.name.clone()), - &None, - &predicate_id, - &requested_predicate.restrictions, - &None, - )?; - - let interval = AnoncredsHelpers::get_non_revoc_interval( - &proof_req.non_revoked, - &requested_predicate.non_revoked, - ); - - let credentials_for_predicate = self - ._query_requested_credentials( - wallet_handle, - &query, - Some(&requested_predicate), - &interval, - ) - .await?; - - credentials_for_proof_request - .predicates - .insert(predicate_id.to_string(), credentials_for_predicate); - } - - let credentials_for_proof_request_json = - serde_json::to_string(&credentials_for_proof_request).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize CredentialsForProofRequest", - )?; - - let res = Ok(credentials_for_proof_request_json); - trace!("get_credentials_for_proof_req < {:?}", res); - res - } - - /// Search for credentials matching the given proof request. - /// - /// Instead of immediately returning of fetched credentials - /// this call returns search_handle that can be used later - /// to fetch records by small batches (with indy_prover_fetch_credentials_for_proof_req). - /// - /// #Params - /// wallet_handle: wallet handle (created by open_wallet). - /// proof_request_json: proof request json - /// { - /// "name": string, - /// "version": string, - /// "nonce": string, - a decimal number represented as a string (use `indy_generate_nonce` function to generate 80-bit number) - /// "requested_attributes": { // set of requested attributes - /// "": , // see below - /// ..., - /// }, - /// "requested_predicates": { // set of requested predicates - /// "": , // see below - /// ..., - /// }, - /// "non_revoked": Optional<>, // see below, - /// // If specified prover must proof non-revocation - /// // for date in this interval for each attribute - /// // (applies to every attribute and predicate but can be overridden on attribute level) - /// // (can be overridden on attribute level) - /// "ver": Optional - proof request version: - /// - omit or "1.0" to use unqualified identifiers for restrictions - /// - "2.0" to use fully qualified identifiers for restrictions - /// } - /// - /// where - /// attr_info: Describes requested attribute - /// { - /// "name": Optional, // attribute name, (case insensitive and ignore spaces) - /// "names": Optional<[string, string]>, // attribute names, (case insensitive and ignore spaces) - /// // NOTE: should either be "name" or "names", not both and not none of them. - /// // Use "names" to specify several attributes that have to match a single credential. - /// "restrictions": Optional, // see below - /// "non_revoked": Optional<>, // see below, - /// // If specified prover must proof non-revocation - /// // for date in this interval this attribute - /// // (overrides proof level interval) - /// } - /// predicate_referent: Proof-request local identifier of requested attribute predicate - /// predicate_info: Describes requested attribute predicate - /// { - /// "name": attribute name, (case insensitive and ignore spaces) - /// "p_type": predicate type (">=", ">", "<=", "<") - /// "p_value": predicate value - /// "restrictions": Optional, // see below - /// "non_revoked": Optional<>, // see below, - /// // If specified prover must proof non-revocation - /// // for date in this interval this attribute - /// // (overrides proof level interval) - /// } - /// non_revoc_interval: Defines non-revocation interval - /// { - /// "from": Optional, // timestamp of interval beginning - /// "to": Optional, // timestamp of interval ending - /// } - /// extra_query_json:(Optional) List of extra queries that will be applied to correspondent attribute/predicate: - /// { - /// "": , - /// "": , - /// } - /// where wql query: indy-sdk/docs/design/011-wallet-query-language/README.md - /// The list of allowed keys that can be combine into complex queries. - /// "schema_id": , - /// "schema_issuer_did": , - /// "schema_name": , - /// "schema_version": , - /// "issuer_did": , - /// "cred_def_id": , - /// "rev_reg_id": , // "None" as string if not present - /// // the following keys can be used for every `attribute name` in credential. - /// "attr::::marker": "1", - to filter based on existence of a specific attribute - /// "attr::::value": , - to filter based on value of a specific attribute - /// - /// - /// #Returns - /// search_handle: Search handle that can be used later to fetch records by small batches (with indy_prover_fetch_credentials_for_proof_req) - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn search_credentials_for_proof_req( - &self, - wallet_handle: WalletHandle, - proof_request: ProofRequest, - extra_query: Option, - ) -> IndyResult { - trace!( - "search_credentials_for_proof_req > wallet_handle {:?} \ - proof_request {:?} extra_query {:?}", - wallet_handle, - proof_request, - extra_query - ); - - let proof_req = proof_request.value(); - let version = proof_request.version(); - - let mut credentials_for_proof_request_search = - HashMap::>>::new(); - - for (attr_id, requested_attr) in proof_req.requested_attributes.iter() { - let query = self.prover_service.process_proof_request_restrictions( - &version, - &requested_attr.name, - &requested_attr.names, - &attr_id, - &requested_attr.restrictions, - &extra_query.as_ref(), - )?; - - let credentials_search = self - .wallet_service - .search_indy_records::( - wallet_handle, - &query.to_string(), - &SearchOptions::id_value(), - ) - .await?; - - let interval = AnoncredsHelpers::get_non_revoc_interval( - &proof_req.non_revoked, - &requested_attr.non_revoked, - ); - - credentials_for_proof_request_search.insert( - attr_id.to_string(), - Arc::new(Mutex::new(SearchForProofRequest::new( - credentials_search, - interval, - None, - ))), - ); - } - - for (predicate_id, requested_predicate) in proof_req.requested_predicates.iter() { - let query = self.prover_service.process_proof_request_restrictions( - &version, - &Some(requested_predicate.name.clone()), - &None, - &predicate_id, - &requested_predicate.restrictions, - &extra_query.as_ref(), - )?; - - let credentials_search = self - .wallet_service - .search_indy_records::( - wallet_handle, - &query.to_string(), - &SearchOptions::id_value(), - ) - .await?; - - let interval = AnoncredsHelpers::get_non_revoc_interval( - &proof_req.non_revoked, - &requested_predicate.non_revoked, - ); - - credentials_for_proof_request_search.insert( - predicate_id.to_string(), - Arc::new(Mutex::new(SearchForProofRequest::new( - credentials_search, - interval, - Some(requested_predicate.clone()), - ))), - ); - } - - let search_handle = next_search_handle(); - - self.searches_for_proof_requests - .lock() - .await - .insert(search_handle, credentials_for_proof_request_search); - - let res = Ok(search_handle); - trace!("search_credentials_for_proof_req < {:?}", search_handle); - res - } - - /// Fetch next credentials for the requested item using proof request search - /// handle (created by indy_prover_search_credentials_for_proof_req). - /// - /// #Params - /// search_handle: Search handle (created by indy_prover_search_credentials_for_proof_req) - /// item_referent: Referent of attribute/predicate in the proof request - /// count: Count of credentials to fetch - /// - /// #Returns - /// credentials_json: List of credentials for the given proof request. - /// [{ - /// cred_info: , - /// interval: Optional - /// }] - /// where - /// credential_info: - /// { - /// "referent": string, - id of credential in the wallet - /// "attrs": {"key1":"raw_value1", "key2":"raw_value2"}, - credential attributes - /// "schema_id": string, - identifier of schema - /// "cred_def_id": string, - identifier of credential definition - /// "rev_reg_id": Optional, - identifier of revocation registry definition - /// "cred_rev_id": Optional - identifier of credential in the revocation registry definition - /// } - /// non_revoc_interval: - /// { - /// "from": Optional, // timestamp of interval beginning - /// "to": Optional, // timestamp of interval ending - /// } - /// NOTE: The list of length less than the requested count means that search iterator - /// correspondent to the requested is completed. - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn fetch_credential_for_proof_request( - &self, - search_handle: SearchHandle, - item_referent: String, - count: usize, - ) -> IndyResult { - trace!( - "fetch_credential_for_proof_request > search_handle {:?} \ - item_referent {:?} count {:?}", - search_handle, - item_referent, - count - ); - - let search_mut = { - let mut searches = self.searches_for_proof_requests.lock().await; - - searches - .get_mut(&search_handle) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidWalletHandle, - "Unknown CredentialsSearch", - ) - })? - .get(&item_referent) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidWalletHandle, - "Unknown item referent for CredentialsSearch handle", - ) - })? - .clone() - }; - - let mut search_lock = search_mut.lock().await; - let search: &mut SearchForProofRequest = search_lock.deref_mut(); - - let requested_credentials: Vec = self - ._get_requested_credentials( - &mut search.search, - search.predicate_info.as_ref(), - &search.interval, - Some(count), - ) - .await?; - - let requested_credentials_json = serde_json::to_string(&requested_credentials).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize list of RequestedCredential", - )?; - - let res = Ok(requested_credentials_json); - trace!("fetch_credential_for_proof_request < {:?}", res); - res - } - - /// Close credentials search for proof request (make search handle invalid) - /// - /// #Params - /// search_handle: Search handle (created by indy_prover_search_credentials_for_proof_req) - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn close_credentials_search_for_proof_req( - &self, - search_handle: SearchHandle, - ) -> IndyResult<()> { - trace!( - "close_credentials_search_for_proof_req > search_handle {:?}", - search_handle - ); - - self.searches_for_proof_requests - .lock() - .await - .remove(&search_handle) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidWalletHandle, - "Unknown CredentialsSearch handle", - ) - })?; - - let res = Ok(()); - trace!("close_credentials_search_for_proof_req < {:?}", res); - res - } - - /// Deletes credential by given id. - /// - /// #Params - /// wallet_handle: wallet handle (created by open_wallet). - /// cred_id: Identifier by which requested credential is stored in the wallet - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn delete_credential( - &self, - wallet_handle: WalletHandle, - cred_id: String, - ) -> IndyResult<()> { - trace!( - "delete_credential > wallet_handle {:?} cred_id {:?}", - wallet_handle, - cred_id - ); - - if !self - .wallet_service - .record_exists::(wallet_handle, &cred_id) - .await? - { - return Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Credential not found", - )); - } - - self.wallet_service - .delete_indy_record::(wallet_handle, &cred_id) - .await?; - - let res = Ok(()); - trace!("delete_credential < {:?}", res); - res - } - - /// Creates a proof according to the given proof request - /// Either a corresponding credential with optionally revealed attributes or self-attested attribute must be provided - /// for each requested attribute (see indy_prover_get_credentials_for_pool_req). - /// A proof request may request multiple credentials from different schemas and different issuers. - /// All required schemas, public keys and revocation registries must be provided. - /// The proof request also contains nonce. - /// The proof contains either proof or self-attested attribute value for each requested attribute. - /// - /// #Params - /// wallet_handle: wallet handle (created by open_wallet). - - /// proof_request_json: proof request json - /// { - /// "name": string, - /// "version": string, - /// "nonce": string, - a decimal number represented as a string (use `indy_generate_nonce` function to generate 80-bit number) - /// "requested_attributes": { // set of requested attributes - /// "": , // see below - /// ..., - /// }, - /// "requested_predicates": { // set of requested predicates - /// "": , // see below - /// ..., - /// }, - /// "non_revoked": Optional<>, // see below, - /// // If specified prover must proof non-revocation - /// // for date in this interval for each attribute - /// // (applies to every attribute and predicate but can be overridden on attribute level) - /// // (can be overridden on attribute level) - /// "ver": Optional - proof request version: - /// - omit or "1.0" to use unqualified identifiers for restrictions - /// - "2.0" to use fully qualified identifiers for restrictions - /// } - /// requested_credentials_json: either a credential or self-attested attribute for each requested attribute - /// { - /// "self_attested_attributes": { - /// "self_attested_attribute_referent": string - /// }, - /// "requested_attributes": { - /// "requested_attribute_referent_1": {"cred_id": string, "timestamp": Optional, revealed: }}, - /// "requested_attribute_referent_2": {"cred_id": string, "timestamp": Optional, revealed: }} - /// }, - /// "requested_predicates": { - /// "requested_predicates_referent_1": {"cred_id": string, "timestamp": Optional }}, - /// } - /// } - /// master_secret_id: the id of the master secret stored in the wallet - /// schemas_json: all schemas participating in the proof request - /// { - /// : , - /// : , - /// : , - /// } - /// credential_defs_json: all credential definitions participating in the proof request - /// { - /// "cred_def1_id": , - /// "cred_def2_id": , - /// "cred_def3_id": , - /// } - /// rev_states_json: all revocation states participating in the proof request - /// { - /// "rev_reg_def1_id or credential_1_id": { - /// "timestamp1": , - /// "timestamp2": , - /// }, - /// "rev_reg_def2_id or credential_1_id"": { - /// "timestamp3": - /// }, - /// "rev_reg_def3_id or credential_1_id"": { - /// "timestamp4": - /// }, - /// } - /// Note: use credential_id instead rev_reg_id in case proving several credentials from the same revocation registry. - /// - /// where - /// attr_referent: Proof-request local identifier of requested attribute - /// attr_info: Describes requested attribute - /// { - /// "name": Optional, // attribute name, (case insensitive and ignore spaces) - /// "names": Optional<[string, string]>, // attribute names, (case insensitive and ignore spaces) - /// // NOTE: should either be "name" or "names", not both and not none of them. - /// // Use "names" to specify several attributes that have to match a single credential. - /// "restrictions": Optional, // see below - /// "non_revoked": Optional<>, // see below, - /// // If specified prover must proof non-revocation - /// // for date in this interval this attribute - /// // (overrides proof level interval) - /// } - /// predicate_referent: Proof-request local identifier of requested attribute predicate - /// predicate_info: Describes requested attribute predicate - /// { - /// "name": attribute name, (case insensitive and ignore spaces) - /// "p_type": predicate type (">=", ">", "<=", "<") - /// "p_value": predicate value - /// "restrictions": Optional, // see below - /// "non_revoked": Optional<>, // see below, - /// // If specified prover must proof non-revocation - /// // for date in this interval this attribute - /// // (overrides proof level interval) - /// } - /// non_revoc_interval: Defines non-revocation interval - /// { - /// "from": Optional, // timestamp of interval beginning - /// "to": Optional, // timestamp of interval ending - /// } - /// where wql query: indy-sdk/docs/design/011-wallet-query-language/README.md - /// The list of allowed keys that can be combine into complex queries. - /// "schema_id": , - /// "schema_issuer_did": , - /// "schema_name": , - /// "schema_version": , - /// "issuer_did": , - /// "cred_def_id": , - /// "rev_reg_id": , // "None" as string if not present - /// // the following keys can be used for every `attribute name` in credential. - /// "attr::::marker": "1", - to filter based on existence of a specific attribute - /// "attr::::value": , - to filter based on value of a specific attribute - /// - /// #Returns - /// Proof json - /// For each requested attribute either a proof (with optionally revealed attribute value) or - /// self-attested attribute value is provided. - /// Each proof is associated with a credential and corresponding schema_id, cred_def_id, rev_reg_id and timestamp. - /// There is also aggregated proof part common for all credential proofs. - /// { - /// "requested_proof": { - /// "revealed_attrs": { - /// "requested_attr1_id": {sub_proof_index: number, raw: string, encoded: string}, - /// "requested_attr4_id": {sub_proof_index: number: string, encoded: string}, - /// }, - /// "revealed_attr_groups": { - /// "requested_attr5_id": { - /// "sub_proof_index": number, - /// "values": { - /// "attribute_name": { - /// "raw": string, - /// "encoded": string - /// } - /// }, - /// } - /// }, - /// "unrevealed_attrs": { - /// "requested_attr3_id": {sub_proof_index: number} - /// }, - /// "self_attested_attrs": { - /// "requested_attr2_id": self_attested_value, - /// }, - /// "predicates": { - /// "requested_predicate_1_referent": {sub_proof_index: int}, - /// "requested_predicate_2_referent": {sub_proof_index: int}, - /// } - /// } - /// "proof": { - /// "proofs": [ , , ], - /// "aggregated_proof": - /// } (opaque type that contains data structures internal to Ursa. - /// It should not be parsed and are likely to change in future versions). - /// "identifiers": [{schema_id, cred_def_id, Optional, Optional}] - /// } - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub async fn create_proof( - &self, - wallet_handle: WalletHandle, - proof_req: ProofRequest, - requested_credentials: RequestedCredentials, - master_secret_id: String, - schemas: Schemas, - cred_defs: CredentialDefinitions, - rev_states: RevocationStates, - ) -> IndyResult { - trace!( - "create_proof > wallet_handle {:?} \ - proof_req {:?} requested_credentials {:?} \ - master_secret_id {:?} schemas {:?} \ - cred_defs {:?} rev_states {:?}", - wallet_handle, - proof_req, - requested_credentials, - master_secret_id, - schemas, - cred_defs, - rev_states - ); - - let schemas = schemas_map_to_schemas_v1_map(schemas); - let cred_defs = cred_defs_map_to_cred_defs_v1_map(cred_defs); - - let master_secret = self - ._wallet_get_master_secret(wallet_handle, &master_secret_id) - .await?; - - let cred_refs_for_attrs = requested_credentials - .requested_attributes - .values() - .map(|requested_attr| requested_attr.cred_id.clone()) - .collect::>(); - - let cred_refs_for_predicates = requested_credentials - .requested_predicates - .values() - .map(|requested_predicate| requested_predicate.cred_id.clone()) - .collect::>(); - - let cred_referents = cred_refs_for_attrs - .union(&cred_refs_for_predicates) - .cloned() - .collect::>(); - - let mut credentials: HashMap = - HashMap::with_capacity(cred_referents.len()); - - for cred_referent in cred_referents.into_iter() { - let credential: Credential = self - .wallet_service - .get_indy_object(wallet_handle, &cred_referent, &RecordOptions::id_value()) - .await?; - credentials.insert(cred_referent, credential); - } - - let proof = self.prover_service.create_proof( - &credentials, - &proof_req, - &requested_credentials, - &master_secret.value, - &schemas, - &cred_defs, - &rev_states, - )?; - - let proof_json = serde_json::to_string(&proof) - .to_indy(IndyErrorKind::InvalidState, "Cannot serialize FullProof")?; - - let res = Ok(proof_json); - trace!("create_proof <{:?}", res); - res - } - - /// Create revocation state for a credential that corresponds to a particular time. - /// - /// Note that revocation delta must cover the whole registry existence time. - /// You can use `from`: `0` and `to`: `needed_time` as parameters for building request to get correct revocation delta. - /// - /// The resulting revocation state and provided timestamp can be saved and reused later with applying a new - /// revocation delta with `indy_update_revocation_state` function. - /// This new delta should be received with parameters: `from`: `timestamp` and `to`: `needed_time`. - /// - /// #Params - - /// blob_storage_reader_handle: configuration of blob storage reader handle that will allow to read revocation tails (returned by `indy_open_blob_storage_reader`) - /// rev_reg_def_json: revocation registry definition json related to `rev_reg_id` in a credential - /// rev_reg_delta_json: revocation registry delta which covers the whole registry existence time - /// timestamp: time represented as a total number of seconds from Unix Epoch. - /// cred_rev_id: user credential revocation id in revocation registry (match to `cred_rev_id` in a credential) - /// - /// #Returns - /// revocation state json: - /// { - /// "rev_reg": , - /// "witness": , (opaque type that contains data structures internal to Ursa. - /// It should not be parsed and are likely to change in future versions). - /// "timestamp" : integer - /// } - /// - /// #Errors - /// Common* - /// Wallet* - /// Anoncreds* - pub async fn create_revocation_state( - &self, - blob_storage_reader_handle: i32, - revoc_reg_def: RevocationRegistryDefinition, - rev_reg_delta: RevocationRegistryDelta, - timestamp: u64, - cred_rev_id: String, - ) -> IndyResult { - trace!( - "create_revocation_state > blob_storage_reader_handle {:?} \ - revoc_reg_def {:?} rev_reg_delta {:?} timestamp {:?} \ - cred_rev_id {:?}", - blob_storage_reader_handle, - revoc_reg_def, - rev_reg_delta, - timestamp, - cred_rev_id - ); - - let revoc_reg_def = RevocationRegistryDefinitionV1::from(revoc_reg_def); - let rev_idx = AnoncredsHelpers::parse_cred_rev_id(&cred_rev_id)?; - - let sdk_tails_accessor = SDKTailsAccessor::new( - self.blob_storage_service.clone(), - blob_storage_reader_handle, - &revoc_reg_def, - ) - .await?; - - let rev_reg_delta = RevocationRegistryDeltaV1::from(rev_reg_delta); - - let witness = Witness::new( - rev_idx, - revoc_reg_def.value.max_cred_num, - revoc_reg_def.value.issuance_type.to_bool(), - &rev_reg_delta.value, - &sdk_tails_accessor, - )?; - - let revocation_state = RevocationState { - witness, - rev_reg: RevocationRegistry::from(rev_reg_delta.value), - timestamp, - }; - - let revocation_state_json = serde_json::to_string(&revocation_state).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize RevocationState", - )?; - - let res = Ok(revocation_state_json); - trace!("create_revocation_state < {:?}", res); - res - } - - /// Create a new revocation state for a credential based on a revocation state created before. - /// Note that provided revocation delta must cover the registry gap from based state creation until the specified time - /// (this new delta should be received with parameters: `from`: `state_timestamp` and `to`: `needed_time`). - /// - /// This function reduces the calculation time. - /// - /// The resulting revocation state and provided timestamp can be saved and reused later by applying a new revocation delta again. - /// - /// #Params - - /// blob_storage_reader_handle: configuration of blob storage reader handle that will allow to read revocation tails (returned by `indy_open_blob_storage_reader`) - /// rev_state_json: revocation registry state json - /// rev_reg_def_json: revocation registry definition json related to `rev_reg_id` in a credential - /// rev_reg_delta_json: revocation registry definition delta which covers the gap form original `rev_state_json` creation till the requested timestamp - /// timestamp: time represented as a total number of seconds from Unix Epoch - /// cred_rev_id: user credential revocation id in revocation registry (match to `cred_rev_id` in a credential) - /// - /// #Returns - /// revocation state json: - /// { - /// "rev_reg": , - /// "witness": , (opaque type that contains data structures internal to Ursa. - /// It should not be parsed and are likely to change in future versions). - /// "timestamp" : integer - /// } - /// - /// #Errors - /// Common* - /// Wallet* - /// Anoncreds* - pub async fn update_revocation_state( - &self, - blob_storage_reader_handle: i32, - mut rev_state: RevocationState, - rev_reg_def: RevocationRegistryDefinition, - rev_reg_delta: RevocationRegistryDelta, - timestamp: u64, - cred_rev_id: String, - ) -> IndyResult { - trace!( - "update_revocation_state > blob_storage_reader_handle {:?} \ - rev_state {:?} rev_reg_def {:?} rev_reg_delta {:?} \ - timestamp {:?} cred_rev_id {:?}", - blob_storage_reader_handle, - rev_state, - rev_reg_def, - rev_reg_delta, - timestamp, - cred_rev_id - ); - - let revocation_registry_definition = RevocationRegistryDefinitionV1::from(rev_reg_def); - let rev_reg_delta = RevocationRegistryDeltaV1::from(rev_reg_delta); - let rev_idx = AnoncredsHelpers::parse_cred_rev_id(&cred_rev_id)?; - - let sdk_tails_accessor = SDKTailsAccessor::new( - self.blob_storage_service.clone(), - blob_storage_reader_handle, - &revocation_registry_definition, - ) - .await?; - - rev_state.witness.update( - rev_idx, - revocation_registry_definition.value.max_cred_num, - &rev_reg_delta.value, - &sdk_tails_accessor, - )?; - - rev_state.rev_reg = RevocationRegistry::from(rev_reg_delta.value); - rev_state.timestamp = timestamp; - - let rev_state_json = serde_json::to_string(&rev_state).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize RevocationState", - )?; - - let res = Ok(rev_state_json); - trace!("update_revocation_state < {:?}", res); - res - } - - fn _get_credential_info(&self, referent: &str, credential: Credential) -> CredentialInfo { - let credential_values: HashMap = credential - .values - .0 - .into_iter() - .map(|(attr, values)| (attr, values.raw)) - .collect(); - - CredentialInfo { - referent: referent.to_string(), - attrs: credential_values, - schema_id: credential.schema_id, - cred_def_id: credential.cred_def_id, - rev_reg_id: credential.rev_reg_id, - cred_rev_id: credential - .signature - .extract_index() - .map(|idx| idx.to_string()), - } - } - - fn _get_credential(&self, record: &WalletRecord) -> IndyResult<(String, Credential)> { - let referent = record.get_id(); - - let value = record.get_value().ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - "Credential not found for id: {}", - ) - })?; - - let credential: Credential = serde_json::from_str(value) - .to_indy(IndyErrorKind::InvalidState, "Cannot deserialize Credential")?; - - Ok((referent.to_string(), credential)) - } - - async fn _query_requested_credentials( - &self, - wallet_handle: WalletHandle, - query_json: &Query, - predicate_info: Option<&PredicateInfo>, - interval: &Option, - ) -> IndyResult> { - trace!( - "_query_requested_credentials > wallet_handle {:?} \ - query_json {:?} predicate_info {:?}", - wallet_handle, - query_json, - predicate_info - ); - - let mut credentials_search = self - .wallet_service - .search_indy_records::( - wallet_handle, - &query_json.to_string(), - &SearchOptions::id_value(), - ) - .await?; - - let credentials = self - ._get_requested_credentials(&mut credentials_search, predicate_info, interval, None) - .await?; - - let res = Ok(credentials); - trace!("_query_requested_credentials < {:?}", res); - res - } - - async fn _get_requested_credentials( - &self, - credentials_search: &mut WalletSearch, - predicate_info: Option<&PredicateInfo>, - interval: &Option, - max_count: Option, - ) -> IndyResult> { - let mut credentials: Vec = Vec::new(); - - if let Some(0) = max_count { - return Ok(vec![]); - } - - while let Some(credential_record) = credentials_search.fetch_next_record().await? { - let (referent, credential) = self._get_credential(&credential_record)?; - - if let Some(predicate) = predicate_info { - let values = self - .prover_service - .get_credential_values_for_attribute(&credential.values.0, &predicate.name) - .ok_or_else(|| { - err_msg(IndyErrorKind::InvalidState, "Credential values not found") - })?; - - let satisfy = self - .prover_service - .attribute_satisfy_predicate(predicate, &values.encoded)?; - if !satisfy { - continue; - } - } - - credentials.push(RequestedCredential { - cred_info: self._get_credential_info(&referent, credential), - interval: interval.clone(), - }); - - if let Some(mut count) = max_count { - count -= 1; - if count == 0 { - break; - } - } - } - - Ok(credentials) - } - - async fn _wallet_get_master_secret( - &self, - wallet_handle: WalletHandle, - key: &str, - ) -> IndyResult { - self.wallet_service - .get_indy_object(wallet_handle, &key, &RecordOptions::id_value()) - .await - } - - async fn _get_credential_attr_tag_policy( - &self, - wallet_handle: WalletHandle, - cred_def_id: &CredentialDefinitionId, - ) -> IndyResult { - let catpol = self - .wallet_service - .get_indy_opt_object::( - wallet_handle, - &cred_def_id.0, - &RecordOptions::id_value(), - ) - .await? - .as_ref() - .map(serde_json::to_string) - .transpose() - .to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize CredentialAttrTagPolicy", - )? - .unwrap_or_else(|| Value::Null.to_string()); - - Ok(catpol) - } -} diff --git a/libvdrtools/src/controllers/anoncreds/tails.rs b/libvdrtools/src/controllers/anoncreds/tails.rs deleted file mode 100644 index 7f29a1eb87..0000000000 --- a/libvdrtools/src/controllers/anoncreds/tails.rs +++ /dev/null @@ -1,123 +0,0 @@ -use std::sync::Arc; - -use indy_api_types::errors::prelude::*; -use log::trace; - -use ursa::{ - cl::{RevocationTailsAccessor, RevocationTailsGenerator, Tail}, - errors::prelude::{UrsaCryptoError, UrsaCryptoErrorKind}, -}; - -use crate::utils::crypto::base58::{FromBase58, ToBase58}; - -use crate::{ - domain::anoncreds::revocation_registry_definition::RevocationRegistryDefinitionV1, - services::BlobStorageService, -}; - -const TAILS_BLOB_TAG_SZ: u8 = 2; -const TAIL_SIZE: usize = Tail::BYTES_REPR_SIZE; - -pub(crate) struct SDKTailsAccessor { - tails_service: Arc, - tails_reader_handle: i32, -} - -impl SDKTailsAccessor { - pub(crate) async fn new( - tails_service: Arc, - tails_reader_handle: i32, - rev_reg_def: &RevocationRegistryDefinitionV1, - ) -> IndyResult { - let tails_hash = - rev_reg_def.value.tails_hash.from_base58().map_err(|_| { - err_msg(IndyErrorKind::InvalidState, "Invalid base58 for Tails hash") - })?; - - let tails_reader_handle = tails_service - .open_blob( - tails_reader_handle, - &rev_reg_def.value.tails_location, - tails_hash.as_slice(), - ) - .await?; - - Ok(SDKTailsAccessor { - tails_service, - tails_reader_handle, - }) - } -} - -impl Drop for SDKTailsAccessor { - fn drop(&mut self) { - #[allow(unused_must_use)] //TODO - { - self.tails_service - .close(self.tails_reader_handle) - .map_err(map_err_err!()); - } - } -} - -impl RevocationTailsAccessor for SDKTailsAccessor { - fn access_tail( - &self, - tail_id: u32, - accessor: &mut dyn FnMut(&Tail), - ) -> Result<(), UrsaCryptoError> { - trace!("access_tail > tail_id {:?}", tail_id); - - // FIXME: Potentially it is significant lock - let tail_bytes = self - .tails_service - .read( - self.tails_reader_handle, - TAIL_SIZE, - TAIL_SIZE * tail_id as usize + TAILS_BLOB_TAG_SZ as usize, - ) - .map_err(|_| { - UrsaCryptoError::from_msg( - UrsaCryptoErrorKind::InvalidState, - "Can't read tail bytes from blob storage", - ) - })?; // FIXME: IO error should be returned - - let tail = Tail::from_bytes(tail_bytes.as_slice())?; - accessor(&tail); - - let res = Ok(()); - trace!("access_tail < {:?}", res); - res - } -} - -pub(crate) async fn store_tails_from_generator( - service: Arc, - writer_handle: i32, - rtg: &mut RevocationTailsGenerator, -) -> IndyResult<(String, String)> { - trace!( - "store_tails_from_generator > writer_handle {:?}", - writer_handle - ); - - let blob_handle = service.create_blob(writer_handle).await?; - - let version = vec![0u8, TAILS_BLOB_TAG_SZ]; - service.append(blob_handle, version.as_slice()).await?; - - while let Some(tail) = rtg.try_next()? { - let tail_bytes = tail.to_bytes()?; - service.append(blob_handle, tail_bytes.as_slice()).await?; - } - - let tails_info = service - .finalize(blob_handle) - .await - .map(|(location, hash)| (location, hash.to_base58()))?; - - let res = Ok(tails_info); - trace!("store_tails_from_generator < {:?}", res); - res -} diff --git a/libvdrtools/src/controllers/anoncreds/verifier.rs b/libvdrtools/src/controllers/anoncreds/verifier.rs deleted file mode 100644 index a3542ddec7..0000000000 --- a/libvdrtools/src/controllers/anoncreds/verifier.rs +++ /dev/null @@ -1,238 +0,0 @@ -use std::sync::Arc; - -use indy_api_types::errors::prelude::*; -use log::trace; - -use crate::{ - domain::anoncreds::{ - credential_definition::{cred_defs_map_to_cred_defs_v1_map, CredentialDefinitions}, - proof::Proof, - proof_request::ProofRequest, - revocation_registry::{rev_regs_map_to_rev_regs_local_map, RevocationRegistries}, - revocation_registry_definition::{ - rev_reg_defs_map_to_rev_reg_defs_v1_map, RevocationRegistryDefinitions, - }, - schema::{schemas_map_to_schemas_v1_map, Schemas}, - }, - services::VerifierService, -}; - -pub struct VerifierController { - verifier_service: Arc, -} - -impl VerifierController { - pub(crate) fn new(verifier_service: Arc) -> VerifierController { - VerifierController { verifier_service } - } - - /// Verifies a proof (of multiple credential). - /// All required schemas, public keys and revocation registries must be provided. - /// - /// IMPORTANT: You must use *_id's (`schema_id`, `cred_def_id`, `rev_reg_id`) listed in `proof[identifiers]` - /// as the keys for corresponding `schemas_json`, `credential_defs_json`, `rev_reg_defs_json`, `rev_regs_json` objects. - /// - /// #Params - /// wallet_handle: wallet handle (created by open_wallet). - - /// proof_request_json: proof request json - /// { - /// "name": string, - /// "version": string, - /// "nonce": string, - a decimal number represented as a string (use `indy_generate_nonce` function to generate 80-bit number) - /// "requested_attributes": { // set of requested attributes - /// "": , // see below - /// ..., - /// }, - /// "requested_predicates": { // set of requested predicates - /// "": , // see below - /// ..., - /// }, - /// "non_revoked": Optional<>, // see below, - /// // If specified prover must proof non-revocation - /// // for date in this interval for each attribute - /// // (can be overridden on attribute level) - /// "ver": Optional - proof request version: - /// - omit or "1.0" to use unqualified identifiers for restrictions - /// - "2.0" to use fully qualified identifiers for restrictions - /// } - /// proof_json: created for request proof json - /// { - /// "requested_proof": { - /// "revealed_attrs": { - /// "requested_attr1_id": {sub_proof_index: number, raw: string, encoded: string}, // NOTE: check that `encoded` value match to `raw` value on application level - /// "requested_attr4_id": {sub_proof_index: number: string, encoded: string}, // NOTE: check that `encoded` value match to `raw` value on application level - /// }, - /// "revealed_attr_groups": { - /// "requested_attr5_id": { - /// "sub_proof_index": number, - /// "values": { - /// "attribute_name": { - /// "raw": string, - /// "encoded": string - /// } - /// }, // NOTE: check that `encoded` value match to `raw` value on application level - /// } - /// }, - /// "unrevealed_attrs": { - /// "requested_attr3_id": {sub_proof_index: number} - /// }, - /// "self_attested_attrs": { - /// "requested_attr2_id": self_attested_value, - /// }, - /// "requested_predicates": { - /// "requested_predicate_1_referent": {sub_proof_index: int}, - /// "requested_predicate_2_referent": {sub_proof_index: int}, - /// } - /// } - /// "proof": { - /// "proofs": [ , , ], - /// "aggregated_proof": - /// } - /// "identifiers": [{schema_id, cred_def_id, Optional, Optional}] - /// } - /// schemas_json: all schemas participating in the proof - /// { - /// : , - /// : , - /// : , - /// } - /// credential_defs_json: all credential definitions participating in the proof - /// { - /// "cred_def1_id": , - /// "cred_def2_id": , - /// "cred_def3_id": , - /// } - /// rev_reg_defs_json: all revocation registry definitions participating in the proof - /// { - /// "rev_reg_def1_id": , - /// "rev_reg_def2_id": , - /// "rev_reg_def3_id": , - /// } - /// rev_regs_json: all revocation registries participating in the proof - /// { - /// "rev_reg_def1_id": { - /// "timestamp1": , - /// "timestamp2": , - /// }, - /// "rev_reg_def2_id": { - /// "timestamp3": - /// }, - /// "rev_reg_def3_id": { - /// "timestamp4": - /// }, - /// } - /// where - /// attr_referent: Proof-request local identifier of requested attribute - /// attr_info: Describes requested attribute - /// { - /// "name": Optional, // attribute name, (case insensitive and ignore spaces) - /// "names": Optional<[string, string]>, // attribute names, (case insensitive and ignore spaces) - /// // NOTE: should either be "name" or "names", not both and not none of them. - /// // Use "names" to specify several attributes that have to match a single credential. - /// "restrictions": Optional, // see below - /// "non_revoked": Optional<>, // see below, - /// // If specified prover must proof non-revocation - /// // for date in this interval this attribute - /// // (overrides proof level interval) - /// } - /// predicate_referent: Proof-request local identifier of requested attribute predicate - /// predicate_info: Describes requested attribute predicate - /// { - /// "name": attribute name, (case insensitive and ignore spaces) - /// "p_type": predicate type (">=", ">", "<=", "<") - /// "p_value": predicate value - /// "restrictions": Optional, // see below - /// "non_revoked": Optional<>, // see below, - /// // If specified prover must proof non-revocation - /// // for date in this interval this attribute - /// // (overrides proof level interval) - /// } - /// non_revoc_interval: Defines non-revocation interval - /// { - /// "from": Optional, // timestamp of interval beginning - /// "to": Optional, // timestamp of interval ending - /// } - /// where wql query: indy-sdk/docs/design/011-wallet-query-language/README.md - /// The list of allowed keys that can be combine into complex queries. - /// "schema_id": , - /// "schema_issuer_did": , - /// "schema_name": , - /// "schema_version": , - /// "issuer_did": , - /// "cred_def_id": , - /// "rev_reg_id": , // "None" as string if not present - /// // the following keys can be used for every `attribute name` in credential. - /// "attr::::marker": "1", - to filter based on existence of a specific attribute - /// "attr::::value": , - to filter based on value of a specific attribute - /// - /// - /// #Returns - /// valid: true - if signature is valid, false - otherwise - /// - /// #Errors - /// Anoncreds* - /// Common* - /// Wallet* - pub fn verify_proof( - &self, - proof_req: ProofRequest, - proof: Proof, - schemas: Schemas, - cred_defs: CredentialDefinitions, - rev_reg_defs: RevocationRegistryDefinitions, - rev_regs: RevocationRegistries, - ) -> IndyResult { - trace!( - "verify_proof > proof_req {:?} \ - proof {:?} schemas {:?} cred_defs {:?} \ - rev_reg_defs {:?} rev_regs {:?}", - proof_req, - proof, - schemas, - cred_defs, - rev_reg_defs, - rev_regs - ); - - let schemas = schemas_map_to_schemas_v1_map(schemas); - let cred_defs = cred_defs_map_to_cred_defs_v1_map(cred_defs); - let rev_reg_defs = rev_reg_defs_map_to_rev_reg_defs_v1_map(rev_reg_defs); - let rev_regs = rev_regs_map_to_rev_regs_local_map(rev_regs); - - let valid = self.verifier_service.verify( - &proof, - &proof_req.value(), - &schemas, - &cred_defs, - &rev_reg_defs, - &rev_regs, - )?; - - let res = Ok(valid); - trace!("verify_proof < {:?}", res); - res - } - - /// Generates 80-bit numbers that can be used as a nonce for proof request. - /// - /// #Params - - /// - /// #Returns - /// nonce: generated number as a string - /// - pub fn generate_nonce(&self) -> IndyResult { - trace!("generate_nonce >"); - - let nonce = self - .verifier_service - .generate_nonce()? - .to_dec() - .to_indy(IndyErrorKind::InvalidState, "Cannot serialize Nonce")?; - - let res = Ok(nonce); - trace!("generate_nonce < {:?}", res); - res - } -} diff --git a/libvdrtools/src/controllers/blob_storage.rs b/libvdrtools/src/controllers/blob_storage.rs deleted file mode 100644 index 6dc2a5bbef..0000000000 --- a/libvdrtools/src/controllers/blob_storage.rs +++ /dev/null @@ -1,43 +0,0 @@ -use std::sync::Arc; - -use indy_api_types::errors::prelude::*; - -use crate::services::BlobStorageService; - -pub struct BlobStorageController { - blob_storage_service: Arc, -} - -impl BlobStorageController { - pub(crate) fn new(blob_storage_service: Arc) -> BlobStorageController { - BlobStorageController { - blob_storage_service, - } - } - - pub async fn open_reader(&self, type_: String, config: String) -> IndyResult { - trace!("open_reader > type_ {:?} config {:?}", type_, config); - - let handle = self - .blob_storage_service - .open_reader(&type_, &config) - .await?; - - let res = Ok(handle); - trace!("open_reader < {:?}", res); - res - } - - pub async fn open_writer(&self, type_: String, config: String) -> IndyResult { - trace!("open_writer > type_ {:?} config {:?}", type_, config); - - let handle = self - .blob_storage_service - .open_writer(&type_, &config) - .await?; - - let res = Ok(handle); - trace!("open_writer < {:?}", res); - res - } -} diff --git a/libvdrtools/src/controllers/config.rs b/libvdrtools/src/controllers/config.rs deleted file mode 100644 index 0112690172..0000000000 --- a/libvdrtools/src/controllers/config.rs +++ /dev/null @@ -1,40 +0,0 @@ -use std::env; - -use crate::domain::IndyConfig; - -pub struct ConfigController {} - -impl ConfigController { - pub(crate) fn new() -> ConfigController { - ConfigController {} - } - - /// Set libvdrtools runtime configuration. Can be optionally called to change current params. - /// - /// #Params - /// config: { - /// "crypto_thread_pool_size": Optional - size of thread pool for the most expensive crypto operations. (4 by default) - /// "collect_backtrace": Optional - whether errors backtrace should be collected. - /// Capturing of backtrace can affect library performance. - /// NOTE: must be set before invocation of any other API functions. - /// } - /// - /// #Errors - /// Common* - pub fn set_runtime_config(&self, config: IndyConfig) { - trace!("set_runtime_config > {:?}", config); - - // FIXME: Deprecate this param. - if let Some(_crypto_thread_pool_size) = config.crypto_thread_pool_size { - warn!("indy_set_runtime_config ! unsupported param used"); - } - - match config.collect_backtrace { - Some(true) => env::set_var("RUST_BACKTRACE", "1"), - Some(false) => env::set_var("RUST_BACKTRACE", "0"), - _ => {} - } - - trace!("set_runtime_config <"); - } -} diff --git a/libvdrtools/src/controllers/crypto.rs b/libvdrtools/src/controllers/crypto.rs index 326b19c345..2cd94972ae 100644 --- a/libvdrtools/src/controllers/crypto.rs +++ b/libvdrtools/src/controllers/crypto.rs @@ -40,10 +40,10 @@ impl CryptoController { /// wallet_handle: Wallet handle (created by open_wallet). /// key_json: Key information as json. Example: /// { - /// "seed": string, (optional) Seed that allows deterministic key creation (if not set random one will be created). - /// Can be UTF-8, base64 or hex string. - /// "crypto_type": string, // Optional (if not set then ed25519 curve is used); Currently only 'ed25519' value is supported for this field. - /// } + /// "seed": string, (optional) Seed that allows deterministic key creation (if not set + /// random one will be created). Can be UTF-8, base64 or hex + /// string. "crypto_type": string, // Optional (if not set then ed25519 curve is used); + /// Currently only 'ed25519' value is supported for this field. } /// /// #Returns /// verkey: Ver key of generated key pair, also used as key identifier @@ -82,9 +82,9 @@ impl CryptoController { /// #Params /// wallet_handle: wallet handler (created by open_wallet). - /// signer_vk: id (verkey) of message signer. The key must be created by calling indy_create_key or indy_create_and_store_my_did - /// message_raw: a pointer to first byte of message to be signed - /// message_len: a message length + /// signer_vk: id (verkey) of message signer. The key must be created by calling indy_create_key + /// or indy_create_and_store_my_did message_raw: a pointer to first byte of message to be + /// signed message_len: a message length /// /// #Returns /// a signature string @@ -110,7 +110,7 @@ impl CryptoController { let key: Key = self .wallet_service - .get_indy_object(wallet_handle, &my_vk, &RecordOptions::id_value()) + .get_indy_object(wallet_handle, my_vk, &RecordOptions::id_value()) .await?; let res = self.crypto_service.sign(&key, msg).await?; @@ -163,7 +163,8 @@ impl CryptoController { Ok(res) } - /// Packs a message by encrypting the message and serializes it in a JWE-like format (Experimental) + /// Packs a message by encrypting the message and serializes it in a JWE-like format + /// (Experimental) /// /// Note to use DID keys with this function you can call indy_key_for_did to get key id (verkey) /// for specific DID. @@ -173,11 +174,12 @@ impl CryptoController { /// wallet_handle: wallet handle (created by open_wallet). /// message: a pointer to the first byte of the message to be packed /// message_len: the length of the message - /// receivers: a string in the format of a json list which will contain the list of receiver's keys - /// the message is being encrypted for. + /// receivers: a string in the format of a json list which will contain the list of receiver's + /// keys the message is being encrypted for. /// Example: /// "[, ]" - /// sender: the sender's verkey as a string When null pointer is used in this parameter, anoncrypt is used + /// sender: the sender's verkey as a string When null pointer is used in this parameter, + /// anoncrypt is used /// /// #Returns /// a JWE using authcrypt alg is defined below: @@ -188,18 +190,18 @@ impl CryptoController { /// "alg": "Authcrypt", /// "recipients": [ /// { - /// "encrypted_key": base64URLencode(libsodium.crypto_box(my_key, their_vk, cek, cek_iv)) - /// "header": { + /// "encrypted_key": base64URLencode(libsodium.crypto_box(my_key, their_vk, cek, + /// cek_iv)) "header": { /// "kid": "base58encode(recipient_verkey)", - /// "sender" : base64URLencode(libsodium.crypto_box_seal(their_vk, base58encode(sender_vk)), - /// "iv" : base64URLencode(cek_iv) + /// "sender" : base64URLencode(libsodium.crypto_box_seal(their_vk, + /// base58encode(sender_vk)), "iv" : base64URLencode(cek_iv) /// } /// }, /// ], /// })", /// "iv": , - /// "ciphertext": b64URLencode(encrypt_detached({'@type'...}, protected_value_encoded, iv, cek), - /// "tag": + /// "ciphertext": b64URLencode(encrypt_detached({'@type'...}, protected_value_encoded, iv, + /// cek), "tag": /// } /// /// Alternative example in using anoncrypt alg is defined below: @@ -218,8 +220,8 @@ impl CryptoController { /// ], /// })", /// "iv": b64URLencode(iv), - /// "ciphertext": b64URLencode(encrypt_detached({'@type'...}, protected_value_encoded, iv, cek), - /// "tag": b64URLencode(tag) + /// "ciphertext": b64URLencode(encrypt_detached({'@type'...}, protected_value_encoded, iv, + /// cek), "tag": b64URLencode(tag) /// } /// /// @@ -294,7 +296,7 @@ impl CryptoController { }); } // end for-loop - Ok(self._base64_encode_protected(encrypted_recipients_struct, false)?) + self._base64_encode_protected(encrypted_recipients_struct, false) } async fn _prepare_protected_authcrypt( @@ -335,7 +337,7 @@ impl CryptoController { }); } // end for-loop - Ok(self._base64_encode_protected(encrypted_recipients_struct, true)?) + self._base64_encode_protected(encrypted_recipients_struct, true) } fn _base64_encode_protected( diff --git a/libvdrtools/src/controllers/did.rs b/libvdrtools/src/controllers/did.rs index ebdbe7f4fb..95905df4fd 100644 --- a/libvdrtools/src/controllers/did.rs +++ b/libvdrtools/src/controllers/did.rs @@ -1,26 +1,18 @@ use std::{collections::HashMap, sync::Arc}; -use crate::utils::crypto::base58::{FromBase58, ToBase58}; use indy_api_types::{errors::prelude::*, WalletHandle}; use indy_wallet::{RecordOptions, SearchOptions, WalletService}; use crate::{ - domain::{ - crypto::{ - did::{ - Did, DidMetadata, DidMethod, DidValue, DidWithMeta, MyDidInfo, TemporaryDid, - TheirDid, TheirDidInfo, - }, - key::KeyInfo, + domain::crypto::{ + did::{ + Did, DidMetadata, DidValue, DidWithMeta, MyDidInfo, TemporaryDid, TheirDid, + TheirDidInfo, }, - ledger::{ - attrib::{AttribData, Endpoint, GetAttrReplyResult}, - did::{GetNymReplyResult, GetNymResultDataV0}, - response::Reply, - }, - pairwise::Pairwise, + key::KeyInfo, }, services::CryptoService, + utils::crypto::base58::{DecodeBase58, ToBase58}, }; pub struct DidController { @@ -52,14 +44,14 @@ impl DidController { /// Example: /// { /// "did": string, (optional; - /// if not provided and cid param is false then the first 16 bit of the verkey will be used as a new DID; - /// if not provided and cid is true then the full verkey will be used as a new DID; - /// if provided, then keys will be replaced - key rotation use case) - /// "seed": string, (optional) Seed that allows deterministic did creation (if not set random one will be created). - /// Can be UTF-8, base64 or hex string. - /// "crypto_type": string, (optional; if not set then ed25519 curve is used; - /// currently only 'ed25519' value is supported for this field) - /// "cid": bool, (optional; if not set then false is used;) + /// if not provided and cid param is false then the first 16 bit of the verkey will + /// be used as a new DID; if not provided and cid is true then the full verkey + /// will be used as a new DID; if provided, then keys will be replaced - key + /// rotation use case) "seed": string, (optional) Seed that allows deterministic did + /// creation (if not set random one will be created). Can be + /// UTF-8, base64 or hex string. "crypto_type": string, (optional; if not set then + /// ed25519 curve is used; currently only 'ed25519' value is supported for + /// this field) "cid": bool, (optional; if not set then false is used;) /// "ledger_type": string, (optional) type of the ledger to create fully qualified did. /// "method_name": string, (optional) method name to create fully qualified did. /// } @@ -94,8 +86,8 @@ impl DidController { Err(err_msg( IndyErrorKind::DIDAlreadyExists, format!( - "DID \"{}\" already exists but with different Verkey. \ - You should specify Seed used for initial generation", + "DID \"{}\" already exists but with different Verkey. You should specify \ + Seed used for initial generation", did.did.0 ), ))?; @@ -126,9 +118,9 @@ impl DidController { /// did: target did to rotate keys. /// key_info: key information as json. Example: /// { - /// "seed": string, (optional) Seed that allows deterministic key creation (if not set random one will be created). - /// Can be UTF-8, base64 or hex string. - /// "crypto_type": string, (optional; if not set then ed25519 curve is used; + /// "seed": string, (optional) Seed that allows deterministic key creation (if not set + /// random one will be created). Can be UTF-8, base64 or hex + /// string. "crypto_type": string, (optional; if not set then ed25519 curve is used; /// currently only 'ed25519' value is supported for this field) /// } /// @@ -292,9 +284,9 @@ impl DidController { /// did_with_meta: { /// "did": string - DID stored in the wallet, /// "verkey": string - The DIDs transport key (ver key, key id), - /// "tempVerkey": string - Temporary DIDs transport key (ver key, key id), exist only during the rotation of the keys. - /// After rotation is done, it becomes a new verkey. - /// "metadata": string - The meta information stored with the DID + /// "tempVerkey": string - Temporary DIDs transport key (ver key, key id), exist only during + /// the rotation of the keys. After rotation is done, it becomes a + /// new verkey. "metadata": string - The meta information stored with the DID /// } /// /// #Errors @@ -400,7 +392,7 @@ impl DidController { "No value for DID record", )) .and_then(|tags_json| { - serde_json::from_str(&tags_json).to_indy( + serde_json::from_str(tags_json).to_indy( IndyErrorKind::InvalidState, format!("Cannot deserialize Did {:?}", did_id), ) @@ -419,7 +411,7 @@ impl DidController { "No value for DID record", )) .and_then(|tags_json| { - serde_json::from_str(&tags_json).to_indy( + serde_json::from_str(tags_json).to_indy( IndyErrorKind::InvalidState, format!("Cannot deserialize Did {:?}", did_id), ) @@ -435,7 +427,7 @@ impl DidController { .get_value() .ok_or_else(|| err_msg(IndyErrorKind::InvalidState, "No value for DID record")) .and_then(|tags_json| { - serde_json::from_str(&tags_json).to_indy( + serde_json::from_str(tags_json).to_indy( IndyErrorKind::InvalidState, format!("Cannot deserialize Did {:?}", did_id), ) @@ -447,8 +439,8 @@ impl DidController { let did_with_meta = DidWithMeta { did: did.did, verkey: did.verkey, - temp_verkey: temp_verkey, - metadata: metadata, + temp_verkey, + metadata, }; dids.push(did_with_meta); @@ -519,49 +511,6 @@ impl DidController { res } - /// Set/replaces endpoint information for the given DID. - /// - /// #Params - - /// wallet_handle: Wallet handle (created by open_wallet). - /// did - The DID to resolve endpoint. - /// address - The DIDs endpoint address. indy-node and indy-plenum restrict this to ip_address:port - /// transport_key - The DIDs transport key (ver key, key id). - /// - /// #Returns - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn set_endpoint_for_did( - &self, - wallet_handle: WalletHandle, - did: DidValue, - endpoint: Endpoint, - ) -> IndyResult<()> { - trace!( - "set_endpoint_for_did > wallet_handle {:?} did {:?} endpoint {:?}", - wallet_handle, - did, - endpoint - ); - - self.crypto_service.validate_did(&did)?; - - if let Some(ref transport_key) = endpoint.verkey { - self.crypto_service.validate_key(transport_key).await?; - } - - self.wallet_service - .upsert_indy_object(wallet_handle, &did.0, &endpoint) - .await?; - - let res = Ok(()); - trace!("set_endpoint_for_did < {:?}", res); - res - } - /// Saves/replaces the meta information for the giving DID in the wallet. /// /// #Params @@ -610,7 +559,8 @@ impl DidController { /// did - The DID to retrieve metadata. /// /// #Returns - /// metadata - The meta information stored with the DID; Can be null if no metadata was saved for this DID. + /// metadata - The meta information stored with the DID; Can be null if no metadata was saved + /// for this DID. /// /// #Errors /// Common* @@ -665,8 +615,8 @@ impl DidController { return res; } - let did = &did.to_unqualified().0.from_base58()?; - let dverkey = &verkey.from_base58()?; + let did = &did.to_unqualified().0.decode_base58()?; + let dverkey = &verkey.decode_base58()?; let (first_part, second_part) = dverkey.split_at(16); @@ -681,175 +631,6 @@ impl DidController { res } - /// Update DID stored in the wallet to make fully qualified, or to do other DID maintenance. - /// - If the DID has no method, a method will be appended (prepend did:peer to a legacy did) - /// - If the DID has a method, a method will be updated (migrate did:peer to did:peer-new) - /// - /// Update DID related entities stored in the wallet. - /// - /// #Params - - /// wallet_handle: Wallet handle (created by open_wallet). - /// did: target DID stored in the wallet. - /// method: method to apply to the DID. - /// - /// #Returns - /// did: fully qualified form of did - /// - /// #Errors - /// Common* - /// Wallet* - /// Crypto* - pub async fn qualify_did( - &self, - wallet_handle: WalletHandle, - did: DidValue, - method: DidMethod, - ) -> IndyResult { - trace!( - "qualify_did > wallet_handle {:?} curr_did {:?} method {:?}", - wallet_handle, - did, - method - ); - - self.crypto_service.validate_did(&did)?; - - let mut curr_did: Did = self - .wallet_service - .get_indy_object::(wallet_handle, &did.0, &RecordOptions::id_value()) - .await?; - - curr_did.did = DidValue::new(&did.to_short().0, None, Some(&method.0))?; - - self.wallet_service - .delete_indy_record::(wallet_handle, &did.0) - .await?; - - self.wallet_service - .add_indy_object(wallet_handle, &curr_did.did.0, &curr_did, &HashMap::new()) - .await?; - - // move temporary Did - if let Ok(mut temp_did) = self - .wallet_service - .get_indy_object::(wallet_handle, &did.0, &RecordOptions::id_value()) - .await - { - temp_did.did = curr_did.did.clone(); - - self.wallet_service - .delete_indy_record::(wallet_handle, &did.0) - .await?; - - self.wallet_service - .add_indy_object(wallet_handle, &curr_did.did.0, &temp_did, &HashMap::new()) - .await?; - } - - // move metadata - self._update_dependent_entity_reference::( - wallet_handle, - &did.0, - &curr_did.did.0, - ) - .await?; - - // move endpoint - self._update_dependent_entity_reference::(wallet_handle, &did.0, &curr_did.did.0) - .await?; - - // move all pairwise - let mut pairwise_search = self - .wallet_service - .search_indy_records::(wallet_handle, "{}", &RecordOptions::id_value()) - .await?; - - while let Some(pairwise_record) = pairwise_search.fetch_next_record().await? { - let mut pairwise: Pairwise = pairwise_record - .get_value() - .ok_or_else(|| err_msg(IndyErrorKind::InvalidState, "No value for Pairwise record")) - .and_then(|pairwise_json| { - serde_json::from_str(&pairwise_json).map_err(|err| { - IndyError::from_msg( - IndyErrorKind::InvalidState, - format!("Cannot deserialize Pairwise {:?}", err), - ) - }) - })?; - - if pairwise.my_did.eq(&did) { - pairwise.my_did = curr_did.did.clone(); - - self.wallet_service - .update_indy_object(wallet_handle, &pairwise.their_did.0, &pairwise) - .await?; - } - } - - let res = Ok(curr_did.did.0); - trace!("qualify_did < {:?}", res); - res - } - - pub async fn get_nym_ack_process_and_store_their_did( - &self, - wallet_handle: WalletHandle, - did: DidValue, - get_nym_reply_result: IndyResult, - ) -> IndyResult { - trace!( - "get_nym_ack_process_and_store_their_did > \ - wallet_handle {:?} get_nym_reply_result {:?}", - wallet_handle, - get_nym_reply_result - ); - - let get_nym_reply = get_nym_reply_result?; - - let get_nym_response: Reply = serde_json::from_str(&get_nym_reply) - .to_indy( - IndyErrorKind::InvalidState, - "Invalid GetNymReplyResult json", - )?; - - let their_did_info = match get_nym_response.result() { - GetNymReplyResult::GetNymReplyResultV0(res) => { - if let Some(data) = &res.data { - let gen_nym_result_data: GetNymResultDataV0 = serde_json::from_str(data) - .to_indy(IndyErrorKind::InvalidState, "Invalid GetNymResultData json")?; - - TheirDidInfo::new( - gen_nym_result_data.dest.qualify(did.get_method()), - gen_nym_result_data.verkey, - ) - } else { - return Err(err_msg( - IndyErrorKind::WalletItemNotFound, - "Their DID isn't found on the ledger", - )); //TODO FIXME use separate error - } - } - GetNymReplyResult::GetNymReplyResultV1(res) => TheirDidInfo::new( - res.txn.data.did.qualify(did.get_method()), - res.txn.data.verkey, - ), - }; - - let their_did = self - .crypto_service - .create_their_did(&their_did_info) - .await?; - - self.wallet_service - .add_indy_object(wallet_handle, &their_did.did.0, &their_did, &HashMap::new()) - .await?; - - trace!("get_nym_ack_process_and_store_their_did <<<"); - - Ok(their_did) - } - async fn _update_dependent_entity_reference( &self, wallet_handle: WalletHandle, @@ -875,50 +656,6 @@ impl DidController { Ok(()) } - async fn _get_attrib_ack_process_store_endpoint_to_wallet( - &self, - wallet_handle: WalletHandle, - get_attrib_reply_result: IndyResult, - ) -> IndyResult { - trace!( - "_get_attrib_ack_process_store_endpoint_to_wallet > \ - wallet_handle {:?} get_attrib_reply_result {:?}", - wallet_handle, - get_attrib_reply_result - ); - - let get_attrib_reply = get_attrib_reply_result?; - - let get_attrib_reply: Reply = serde_json::from_str(&get_attrib_reply) - .to_indy( - IndyErrorKind::InvalidState, - "Invalid GetAttrReplyResult json", - )?; - - let (raw, did) = match get_attrib_reply.result() { - GetAttrReplyResult::GetAttrReplyResultV0(res) => (res.data, res.dest), - GetAttrReplyResult::GetAttrReplyResultV1(res) => (res.txn.data.raw, res.txn.data.did), - }; - - let attrib_data: AttribData = serde_json::from_str(&raw) - .to_indy(IndyErrorKind::InvalidState, "Invalid GetAttReply json")?; - - let endpoint = Endpoint::new(attrib_data.endpoint.ha, attrib_data.endpoint.verkey); - - self.wallet_service - .add_indy_object(wallet_handle, &did.0, &endpoint, &HashMap::new()) - .await?; - - let res = Ok(endpoint); - - trace!( - "_get_attrib_ack_process_store_endpoint_to_wallet < {:?}", - res - ); - - res - } - async fn _wallet_get_my_did( &self, wallet_handle: WalletHandle, diff --git a/libvdrtools/src/controllers/mod.rs b/libvdrtools/src/controllers/mod.rs index d9cb936566..960f6d8d9d 100644 --- a/libvdrtools/src/controllers/mod.rs +++ b/libvdrtools/src/controllers/mod.rs @@ -1,21 +1,9 @@ -mod anoncreds; -mod blob_storage; -#[macro_use] -mod config; mod crypto; pub(crate) mod did; mod non_secrets; -mod pairwise; mod wallet; -pub use anoncreds::{ - CredentialDefinitionId, IssuerController, ProverController, VerifierController, -}; - -pub(crate) use blob_storage::BlobStorageController; -pub(crate) use config::ConfigController; pub(crate) use crypto::CryptoController; pub(crate) use did::DidController; pub(crate) use non_secrets::NonSecretsController; -pub(crate) use pairwise::PairwiseController; pub(crate) use wallet::WalletController; diff --git a/libvdrtools/src/controllers/non_secrets.rs b/libvdrtools/src/controllers/non_secrets.rs index 733b3d49db..156bdfadf9 100644 --- a/libvdrtools/src/controllers/non_secrets.rs +++ b/libvdrtools/src/controllers/non_secrets.rs @@ -46,8 +46,7 @@ impl NonSecretsController { tags: Option, ) -> IndyResult<()> { trace!( - "add_record > wallet_handle {:?} type_ {:?} \ - id {:?} value {:?} tags {:?}", + "add_record > wallet_handle {:?} type_ {:?} id {:?} value {:?} tags {:?}", wallet_handle, type_, id, @@ -63,7 +62,7 @@ impl NonSecretsController { &type_, &id, &value, - &tags.unwrap_or_else(|| Tags::new()), + &tags.unwrap_or_default(), ) .await?; @@ -88,8 +87,7 @@ impl NonSecretsController { value: String, ) -> IndyResult<()> { trace!( - "update_record_value > wallet_handle {:?} type_ {:?} \ - id {:?} value {:?}", + "update_record_value > wallet_handle {:?} type_ {:?} id {:?} value {:?}", wallet_handle, type_, id, @@ -132,8 +130,7 @@ impl NonSecretsController { tags: Tags, ) -> IndyResult<()> { trace!( - "update_record_tags > wallet_handle {:?} type_ {:?} \ - id {:?} tags {:?}", + "update_record_tags > wallet_handle {:?} type_ {:?} id {:?} tags {:?}", wallet_handle, type_, id, @@ -178,8 +175,7 @@ impl NonSecretsController { tags: Tags, ) -> IndyResult<()> { trace!( - "add_record_tags > wallet_handle {:?} type_ {:?} \ - id {:?} tags {:?}", + "add_record_tags > wallet_handle {:?} type_ {:?} id {:?} tags {:?}", wallet_handle, type_, id, @@ -214,8 +210,7 @@ impl NonSecretsController { tag_names_json: String, ) -> IndyResult<()> { trace!( - "delete_record_tags > wallet_handle {:?} type_ {:?} \ - id {:?} tag_names_json {:?}", + "delete_record_tags > wallet_handle {:?} type_ {:?} id {:?} tag_names_json {:?}", wallet_handle, type_, id, @@ -298,8 +293,7 @@ impl NonSecretsController { options_json: String, ) -> IndyResult { trace!( - "get_record > wallet_handle {:?} type_ {:?} \ - id {:?} options_json {:?}", + "get_record > wallet_handle {:?} type_ {:?} id {:?} options_json {:?}", wallet_handle, type_, id, @@ -364,8 +358,7 @@ impl NonSecretsController { options_json: String, ) -> IndyResult { trace!( - "open_search > wallet_handle {:?} type_ {:?} \ - query_json {:?} options_json {:?}", + "open_search > wallet_handle {:?} type_ {:?} query_json {:?} options_json {:?}", wallet_handle, type_, query_json, diff --git a/libvdrtools/src/controllers/pairwise.rs b/libvdrtools/src/controllers/pairwise.rs deleted file mode 100644 index 11a05bb207..0000000000 --- a/libvdrtools/src/controllers/pairwise.rs +++ /dev/null @@ -1,242 +0,0 @@ -use std::{collections::HashMap, sync::Arc}; - -use indy_api_types::{errors::prelude::*, WalletHandle}; -use indy_wallet::{RecordOptions, WalletService}; -use log::trace; - -use crate::domain::{ - crypto::did::{Did, DidValue, TheirDid}, - pairwise::{Pairwise, PairwiseInfo}, -}; - -pub struct PairwiseController { - wallet_service: Arc, -} - -impl PairwiseController { - pub(crate) fn new(wallet_service: Arc) -> PairwiseController { - PairwiseController { wallet_service } - } - - /// Check if pairwise is exists. - /// - /// #Params - /// wallet_handle: wallet handler (created by open_wallet). - - /// their_did: encrypted DID - /// - /// #Returns - /// exists: true - if pairwise is exists, false - otherwise - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn pairwise_exists( - &self, - wallet_handle: WalletHandle, - their_did: DidValue, - ) -> IndyResult { - trace!( - "pairwise_exists > wallet_handle {:?} their_did {:?}", - wallet_handle, - their_did - ); - - let exists = self - .wallet_service - .record_exists::(wallet_handle, &their_did.0) - .await?; - - let res = Ok(exists); - trace!("pairwise_exists < {:?}", res); - res - } - - /// Creates pairwise. - /// - /// #Params - /// wallet_handle: wallet handler (created by open_wallet). - - /// their_did: encrypted DID - /// my_did: encrypted DID - /// metadata Optional: extra information for pairwise - /// - /// #Returns - /// Error code - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn create_pairwise( - &self, - wallet_handle: WalletHandle, - their_did: DidValue, - my_did: DidValue, - metadata: Option, - ) -> IndyResult<()> { - trace!( - "create_pairwise > wallet_handle {:?} \ - their_did {:?} my_did {:?} metadata {:?}", - wallet_handle, - their_did, - my_did, - metadata - ); - - self.wallet_service - .get_indy_record::(wallet_handle, &my_did.0, &RecordOptions::id()) - .await?; - - self.wallet_service - .get_indy_record::(wallet_handle, &their_did.0, &RecordOptions::id()) - .await?; - - let pairwise = Pairwise { - my_did, - their_did, - metadata, - }; - - self.wallet_service - .add_indy_object( - wallet_handle, - &pairwise.their_did.0, - &pairwise, - &HashMap::new(), - ) - .await?; - - let res = Ok(()); - trace!("create_pairwise < {:?}", res); - res - } - - /// Get list of saved pairwise. - /// - /// #Params - /// wallet_handle: wallet handler (created by open_wallet). - - /// - /// #Returns - /// list_pairwise: list of saved pairwise - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn list_pairwise(&self, wallet_handle: WalletHandle) -> IndyResult { - trace!("list_pairwise > wallet_handle {:?}", wallet_handle); - - let mut search = self - .wallet_service - .search_indy_records::(wallet_handle, "{}", &RecordOptions::id_value()) - .await?; - - let mut pairwises: Vec = Vec::new(); - - while let Some(pairwise) = search.fetch_next_record().await? { - let pairwise = pairwise - .get_value() - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - format!("No value for pairwise {}", pairwise.get_id()), - ) - })? - .to_string(); - - pairwises.push(pairwise); - } - - let pairwises = serde_json::to_string(&pairwises) - .to_indy(IndyErrorKind::InvalidState, "Can't serialize pairwise list")?; - - let res = Ok(pairwises); - trace!("list_pairwise < {:?}", res); - res - } - - /// Gets pairwise information for specific their_did. - /// - /// #Params - /// wallet_handle: wallet handler (created by open_wallet). - - /// their_did: encoded Did - /// - /// #Returns - /// pairwise_info_json: did info associated with their did - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn get_pairwise( - &self, - wallet_handle: WalletHandle, - their_did: DidValue, - ) -> IndyResult { - trace!( - "get_pairwise > wallet_handle {:?} their_did {:?}", - wallet_handle, - their_did - ); - - let pairwise_info = PairwiseInfo::from( - self.wallet_service - .get_indy_object::( - wallet_handle, - &their_did.0, - &RecordOptions::id_value(), - ) - .await?, - ); - - let res = serde_json::to_string(&pairwise_info) - .to_indy(IndyErrorKind::InvalidState, "Can't serialize PairwiseInfo")?; - - let res = Ok(res); - trace!("get_pairwise < {:?}", res); - res - } - - /// Save some data in the Wallet for pairwise associated with Did. - /// - /// #Params - /// wallet_handle: wallet handler (created by open_wallet). - - /// their_did: encoded Did - /// metadata: some extra information for pairwise - /// - /// #Returns - /// Error code - /// - /// #Errors - /// Common* - /// Wallet* - pub async fn set_pairwise_metadata( - &self, - wallet_handle: WalletHandle, - their_did: DidValue, - metadata: Option, - ) -> IndyResult<()> { - trace!( - "set_pairwise_metadata > wallet_handle {:?} their_did {:?} metadata {:?}", - wallet_handle, - their_did, - metadata - ); - - let mut pairwise: Pairwise = self - .wallet_service - .get_indy_object(wallet_handle, &their_did.0, &RecordOptions::id_value()) - .await?; - - pairwise.metadata = metadata; - - self.wallet_service - .update_indy_object(wallet_handle, &their_did.0, &pairwise) - .await?; - - let res = Ok(()); - trace!("set_pairwise_metadata <<<"); - res - } -} diff --git a/libvdrtools/src/controllers/wallet.rs b/libvdrtools/src/controllers/wallet.rs index a5e45dcea0..980507ad89 100644 --- a/libvdrtools/src/controllers/wallet.rs +++ b/libvdrtools/src/controllers/wallet.rs @@ -1,21 +1,17 @@ use std::sync::Arc; // use async_std::task::spawn_blocking; - use indy_api_types::{ domain::wallet::{Config, Credentials, ExportConfig, KeyConfig, Record}, errors::prelude::*, WalletHandle, }; - use indy_utils::crypto::{ chacha20poly1305_ietf, chacha20poly1305_ietf::Key as MasterKey, randombytes, }; +use indy_wallet::{KeyDerivationData, MigrationResult, WalletService}; -use crate::utils::crypto::base58::ToBase58; -use indy_wallet::{KeyDerivationData, WalletService}; - -use crate::services::CryptoService; +use crate::{services::CryptoService, utils::crypto::base58::ToBase58}; pub struct WalletController { wallet_service: Arc, @@ -42,10 +38,10 @@ impl WalletController { /// Configured storage uses this identifier to lookup exact wallet data placement. /// "storage_type": optional, Type of the wallet storage. Defaults to 'default'. /// 'Default' storage type allows to store wallet data in the local file. - /// Custom storage types can be registered with indy_register_wallet_storage call. - /// "storage_config": optional, Storage configuration json. Storage type defines set of supported keys. - /// Can be optional if storage supports default configuration. - /// For 'default' storage type configuration is: + /// Custom storage types can be registered with indy_register_wallet_storage + /// call. "storage_config": optional, Storage configuration json. Storage type + /// defines set of supported keys. Can be optional if storage supports + /// default configuration. For 'default' storage type configuration is: /// { /// "path": optional, Path to the directory with wallet files. /// Defaults to $HOME/.indy_client/wallet. @@ -55,16 +51,16 @@ impl WalletController { /// credentials: Wallet credentials json /// { /// "key": string, Key or passphrase used for wallet key derivation. - /// Look to key_derivation_method param for information about supported key derivation methods. - /// "storage_credentials": optional Credentials for wallet storage. Storage type defines set of supported keys. - /// Can be optional if storage supports default configuration. - /// For 'default' storage type should be empty. - /// "key_derivation_method": optional Algorithm to use for wallet key derivation: - /// ARGON2I_MOD - derive secured wallet master key (used by default) - /// ARGON2I_INT - derive secured wallet master key (less secured but faster) - /// RAW - raw wallet key master provided (skip derivation). - /// RAW keys can be generated with indy_generate_wallet_key call - /// } + /// Look to key_derivation_method param for information about supported key + /// derivation methods. "storage_credentials": optional Credentials for wallet + /// storage. Storage type defines set of supported keys. Can be + /// optional if storage supports default configuration. For + /// 'default' storage type should be empty. "key_derivation_method": optional + /// Algorithm to use for wallet key derivation: ARGON2I_MOD - + /// derive secured wallet master key (used by default) ARGON2I_INT + /// - derive secured wallet master key (less secured but faster) + /// RAW - raw wallet key master provided (skip derivation). + /// RAW keys can be generated with indy_generate_wallet_key call } /// /// #Returns /// err: Error code @@ -106,41 +102,42 @@ impl WalletController { /// Configured storage uses this identifier to lookup exact wallet data placement. /// "storage_type": optional, Type of the wallet storage. Defaults to 'default'. /// 'Default' storage type allows to store wallet data in the local file. - /// Custom storage types can be registered with indy_register_wallet_storage call. - /// "storage_config": optional, Storage configuration json. Storage type defines set of supported keys. - /// Can be optional if storage supports default configuration. - /// For 'default' storage type configuration is: - /// { + /// Custom storage types can be registered with + /// indy_register_wallet_storage call. "storage_config": optional, Storage + /// configuration json. Storage type defines set of supported keys. + /// Can be optional if storage supports default configuration. For + /// 'default' storage type configuration is: { /// "path": optional, Path to the directory with wallet files. /// Defaults to $HOME/.indy_client/wallet. /// Wallet will be stored in the file {path}/{id}/sqlite.db /// } - /// "cache": optional, Cache configuration json. If omitted the cache is disabled (default). - /// { + /// "cache": optional, Cache configuration json. If omitted the cache is disabled + /// (default). { /// "size": optional, Number of items in cache, - /// "entities": List, Types of items being cached. eg. ["vdrtools::Did", "vdrtools::Key"] - /// "algorithm" optional, cache algorithm, defaults to lru, which is the only one supported for now. - /// } + /// "entities": List, Types of items being cached. eg. ["vdrtools::Did", + /// "vdrtools::Key"] "algorithm" optional, cache algorithm, defaults to + /// lru, which is the only one supported for now. } /// } /// credentials: Wallet credentials json /// { /// "key": string, Key or passphrase used for wallet key derivation. - /// Look to key_derivation_method param for information about supported key derivation methods. - /// "rekey": optional, If present than wallet master key will be rotated to a new one. - /// "storage_credentials": optional Credentials for wallet storage. Storage type defines set of supported keys. - /// Can be optional if storage supports default configuration. - /// For 'default' storage type should be empty. - /// "key_derivation_method": optional Algorithm to use for wallet key derivation: - /// ARGON2I_MOD - derive secured wallet master key (used by default) - /// ARGON2I_INT - derive secured wallet master key (less secured but faster) - /// RAW - raw wallet key master provided (skip derivation). - /// RAW keys can be generated with indy_generate_wallet_key call - /// "rekey_derivation_method": optional Algorithm to use for wallet rekey derivation: - /// ARGON2I_MOD - derive secured wallet master rekey (used by default) - /// ARGON2I_INT - derive secured wallet master rekey (less secured but faster) - /// RAW - raw wallet rekey master provided (skip derivation). - /// RAW keys can be generated with indy_generate_wallet_key call - /// } + /// Look to key_derivation_method param for information about supported key + /// derivation methods. "rekey": optional, If present than wallet master key + /// will be rotated to a new one. "storage_credentials": optional Credentials + /// for wallet storage. Storage type defines set of supported keys. + /// Can be optional if storage supports default configuration. + /// For 'default' storage type should be empty. "key_derivation_method": + /// optional Algorithm to use for wallet key derivation: + /// ARGON2I_MOD - derive secured wallet master key (used by default) + /// ARGON2I_INT - derive secured wallet master key (less secured but faster) + /// RAW - raw wallet key master provided (skip derivation). + /// RAW keys can be generated with indy_generate_wallet_key call + /// "rekey_derivation_method": optional Algorithm to use for wallet rekey + /// derivation: ARGON2I_MOD - derive secured wallet master rekey + /// (used by default) ARGON2I_INT - derive secured wallet master + /// rekey (less secured but faster) RAW - raw wallet rekey master + /// provided (skip derivation). RAW keys can be generated + /// with indy_generate_wallet_key call } /// /// #Returns /// err: Error code @@ -209,10 +206,10 @@ impl WalletController { /// Configured storage uses this identifier to lookup exact wallet data placement. /// "storage_type": optional, Type of the wallet storage. Defaults to 'default'. /// 'Default' storage type allows to store wallet data in the local file. - /// Custom storage types can be registered with indy_register_wallet_storage call. - /// "storage_config": optional, Storage configuration json. Storage type defines set of supported keys. - /// Can be optional if storage supports default configuration. - /// For 'default' storage type configuration is: + /// Custom storage types can be registered with indy_register_wallet_storage + /// call. "storage_config": optional, Storage configuration json. Storage type + /// defines set of supported keys. Can be optional if storage supports + /// default configuration. For 'default' storage type configuration is: /// { /// "path": optional, Path to the directory with wallet files. /// Defaults to $HOME/.indy_client/wallet. @@ -222,16 +219,16 @@ impl WalletController { /// credentials: Wallet credentials json /// { /// "key": string, Key or passphrase used for wallet key derivation. - /// Look to key_derivation_method param for information about supported key derivation methods. - /// "storage_credentials": optional Credentials for wallet storage. Storage type defines set of supported keys. - /// Can be optional if storage supports default configuration. - /// For 'default' storage type should be empty. - /// "key_derivation_method": optional Algorithm to use for wallet key derivation: - /// ARGON2I_MOD - derive secured wallet master key (used by default) - /// ARGON2I_INT - derive secured wallet master key (less secured but faster) - /// RAW - raw wallet key master provided (skip derivation). - /// RAW keys can be generated with indy_generate_wallet_key call - /// } + /// Look to key_derivation_method param for information about supported key + /// derivation methods. "storage_credentials": optional Credentials for wallet + /// storage. Storage type defines set of supported keys. Can be + /// optional if storage supports default configuration. For + /// 'default' storage type should be empty. "key_derivation_method": optional + /// Algorithm to use for wallet key derivation: ARGON2I_MOD - + /// derive secured wallet master key (used by default) + /// ARGON2I_INT - derive secured wallet master key (less secured but faster) + /// RAW - raw wallet key master provided (skip derivation). + /// RAW keys can be generated with indy_generate_wallet_key call } /// /// #Returns /// Error code @@ -271,13 +268,13 @@ impl WalletController { /// { /// "path": , Path of the file that contains exported wallet content /// "key": , Key or passphrase used for wallet export key derivation. - /// Look to key_derivation_method param for information about supported key derivation methods. - /// "key_derivation_method": optional Algorithm to use for wallet export key derivation: - /// ARGON2I_MOD - derive secured export key (used by default) - /// ARGON2I_INT - derive secured export key (less secured but faster) - /// RAW - raw export key provided (skip derivation). - /// RAW keys can be generated with indy_generate_wallet_key call - /// } + /// Look to key_derivation_method param for information about supported key + /// derivation methods. "key_derivation_method": optional Algorithm to use for + /// wallet export key derivation: ARGON2I_MOD - derive secured + /// export key (used by default) ARGON2I_INT - derive secured + /// export key (less secured but faster) RAW - raw export key + /// provided (skip derivation). RAW keys can be generated + /// with indy_generate_wallet_key call } /// /// #Returns /// Error code @@ -323,10 +320,10 @@ impl WalletController { /// Configured storage uses this identifier to lookup exact wallet data placement. /// "storage_type": optional, Type of the wallet storage. Defaults to 'default'. /// 'Default' storage type allows to store wallet data in the local file. - /// Custom storage types can be registered with indy_register_wallet_storage call. - /// "storage_config": optional, Storage configuration json. Storage type defines set of supported keys. - /// Can be optional if storage supports default configuration. - /// For 'default' storage type configuration is: + /// Custom storage types can be registered with indy_register_wallet_storage + /// call. "storage_config": optional, Storage configuration json. Storage type + /// defines set of supported keys. Can be optional if storage supports + /// default configuration. For 'default' storage type configuration is: /// { /// "path": optional, Path to the directory with wallet files. /// Defaults to $HOME/.indy_client/wallet. @@ -336,16 +333,16 @@ impl WalletController { /// credentials: Wallet credentials json /// { /// "key": string, Key or passphrase used for wallet key derivation. - /// Look to key_derivation_method param for information about supported key derivation methods. - /// "storage_credentials": optional Credentials for wallet storage. Storage type defines set of supported keys. - /// Can be optional if storage supports default configuration. - /// For 'default' storage type should be empty. - /// "key_derivation_method": optional Algorithm to use for wallet key derivation: - /// ARGON2I_MOD - derive secured wallet master key (used by default) - /// ARGON2I_INT - derive secured wallet master key (less secured but faster) - /// RAW - raw wallet key master provided (skip derivation). - /// RAW keys can be generated with indy_generate_wallet_key call - /// } + /// Look to key_derivation_method param for information about supported key + /// derivation methods. "storage_credentials": optional Credentials for wallet + /// storage. Storage type defines set of supported keys. Can be + /// optional if storage supports default configuration. For + /// 'default' storage type should be empty. "key_derivation_method": optional + /// Algorithm to use for wallet key derivation: ARGON2I_MOD - + /// derive secured wallet master key (used by default) + /// ARGON2I_INT - derive secured wallet master key (less secured but faster) + /// RAW - raw wallet key master provided (skip derivation). + /// RAW keys can be generated with indy_generate_wallet_key call } /// import_config: Import settings json. /// { /// "path": , path of the file that contains exported wallet content @@ -395,7 +392,7 @@ impl WalletController { old_wh: WalletHandle, new_wh: WalletHandle, migrate_fn: impl FnMut(Record) -> Result, E>, - ) -> IndyResult<()> + ) -> IndyResult where E: std::fmt::Display, { @@ -406,13 +403,14 @@ impl WalletController { /// Generate wallet master key. /// Returned key is compatible with "RAW" key derivation method. - /// It allows to avoid expensive key derivation for use cases when wallet keys can be stored in a secure enclave. + /// It allows to avoid expensive key derivation for use cases when wallet keys can be stored in + /// a secure enclave. /// /// #Params /// config: (optional) key configuration json. /// { - /// "seed": string, (optional) Seed that allows deterministic key creation (if not set random one will be created). - /// Can be UTF-8, base64 or hex string. + /// "seed": string, (optional) Seed that allows deterministic key creation (if not set random + /// one will be created). Can be UTF-8, base64 or hex string. /// } /// /// #Returns @@ -424,9 +422,7 @@ impl WalletController { pub fn generate_key(&self, config: Option) -> IndyResult { trace!("generate_key > config: {:?}", secret!(&config)); - let seed = config - .as_ref() - .and_then(|config| config.seed.as_ref().map(String::as_str)); + let seed = config.as_ref().and_then(|config| config.seed.as_deref()); let key = match self.crypto_service.convert_seed(seed)? { Some(seed) => randombytes::randombytes_deterministic( diff --git a/libvdrtools/src/domain/anoncreds/credential.rs b/libvdrtools/src/domain/anoncreds/credential.rs index dd0a07905a..9080a82f24 100644 --- a/libvdrtools/src/domain/anoncreds/credential.rs +++ b/libvdrtools/src/domain/anoncreds/credential.rs @@ -2,8 +2,6 @@ use std::collections::HashMap; use ursa::cl::{CredentialSignature, RevocationRegistry, SignatureCorrectnessProof, Witness}; -use indy_api_types::validation::Validatable; - use super::{ credential_definition::CredentialDefinitionId, revocation_registry_definition::RevocationRegistryId, schema::SchemaId, @@ -56,35 +54,3 @@ pub struct AttributeValues { pub raw: String, pub encoded: String, } - -impl Validatable for CredentialValues { - fn validate(&self) -> Result<(), String> { - if self.0.is_empty() { - return Err(String::from( - "CredentialValues validation failed: empty list has been passed", - )); - } - - Ok(()) - } -} - -impl Validatable for Credential { - fn validate(&self) -> Result<(), String> { - self.schema_id.validate()?; - self.cred_def_id.validate()?; - self.values.validate()?; - - if self.rev_reg_id.is_some() && (self.witness.is_none() || self.rev_reg.is_none()) { - return Err(String::from("Credential validation failed: `witness` and `rev_reg` must be passed for revocable Credential")); - } - - if self.values.0.is_empty() { - return Err(String::from( - "Credential validation failed: `values` is empty", - )); - } - - Ok(()) - } -} diff --git a/libvdrtools/src/domain/anoncreds/credential_attr_tag_policy.rs b/libvdrtools/src/domain/anoncreds/credential_attr_tag_policy.rs deleted file mode 100644 index 837030dd6f..0000000000 --- a/libvdrtools/src/domain/anoncreds/credential_attr_tag_policy.rs +++ /dev/null @@ -1,49 +0,0 @@ -use std::collections::HashSet; - -use serde::{ser::SerializeSeq, Deserialize, Deserializer, Serialize, Serializer}; - -#[derive(Debug)] -pub struct CredentialAttrTagPolicy { - pub taggable: HashSet, -} - -impl CredentialAttrTagPolicy { - pub fn is_taggable(&self, attr_name: &str) -> bool { - self.taggable - .contains(&attr_name.to_string().replace(" ", "").to_lowercase()) - } -} - -impl From> for CredentialAttrTagPolicy { - fn from(taggables: Vec) -> Self { - CredentialAttrTagPolicy { - taggable: taggables - .into_iter() - .map(|a| a.replace(" ", "").to_lowercase()) - .collect(), - } - } -} - -impl Serialize for CredentialAttrTagPolicy { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - let mut seq = serializer.serialize_seq(Some(self.taggable.len()))?; - for ref element in &self.taggable { - seq.serialize_element(&element)?; - } - seq.end() - } -} - -impl<'de> Deserialize<'de> for CredentialAttrTagPolicy { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let attr_names = Vec::deserialize(deserializer)?; - Ok(CredentialAttrTagPolicy::from(attr_names)) - } -} diff --git a/libvdrtools/src/domain/anoncreds/credential_definition.rs b/libvdrtools/src/domain/anoncreds/credential_definition.rs index e3221a4e01..8a765e782e 100644 --- a/libvdrtools/src/domain/anoncreds/credential_definition.rs +++ b/libvdrtools/src/domain/anoncreds/credential_definition.rs @@ -1,24 +1,22 @@ +use std::collections::HashMap; + use indy_api_types::{ errors::{IndyErrorKind, IndyResult}, IndyError, }; -use std::collections::HashMap; - -use indy_api_types::validation::Validatable; - -use super::indy_identifiers; use ursa::cl::{ CredentialKeyCorrectnessProof, CredentialPrimaryPublicKey, CredentialPrivateKey, CredentialRevocationPublicKey, }; -use crate::utils::qualifier; - -use super::super::{ - anoncreds::{schema::SchemaId, DELIMITER}, - crypto::did::DidValue, - ledger::request::ProtocolVersion, +use super::{ + super::{ + anoncreds::{schema::SchemaId, DELIMITER}, + crypto::did::DidValue, + }, + indy_identifiers, }; +use crate::utils::qualifier; pub const CL_SIGNATURE_TYPE: &str = "CL"; @@ -35,20 +33,12 @@ impl SignatureType { } } -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Serialize, Deserialize, Clone, Default)] pub struct CredentialDefinitionConfig { #[serde(default)] pub support_revocation: bool, } -impl Default for CredentialDefinitionConfig { - fn default() -> Self { - CredentialDefinitionConfig { - support_revocation: false, - } - } -} - #[derive(Debug, Serialize, Deserialize)] pub struct CredentialDefinitionData { pub primary: CredentialPrimaryPublicKey, @@ -126,18 +116,6 @@ pub struct CredentialDefinitionCorrectnessProof { pub value: CredentialKeyCorrectnessProof, } -impl Validatable for CredentialDefinition { - fn validate(&self) -> Result<(), String> { - match self { - CredentialDefinition::CredentialDefinitionV1(cred_def) => { - cred_def.id.validate()?; - cred_def.schema_id.validate()?; - Ok(()) - } - } - } -} - qualifiable_type!(CredentialDefinitionId); impl CredentialDefinitionId { @@ -163,35 +141,24 @@ impl CredentialDefinitionId { "Unsupported DID method", )), None => { - let id = if ProtocolVersion::is_node_1_3() { - CredentialDefinitionId(format!( - "{}{}{}{}{}{}{}", - did.0, - DELIMITER, - Self::MARKER, - DELIMITER, - signature_type, - DELIMITER, - schema_id.0 - )) + let tag = if tag.is_empty() { + "".to_owned() } else { - let tag = if tag.is_empty() { - "".to_owned() - } else { - format!("{}{}", DELIMITER, tag) - }; - CredentialDefinitionId(format!( - "{}{}{}{}{}{}{}{}", - did.0, - DELIMITER, - Self::MARKER, - DELIMITER, - signature_type, - DELIMITER, - schema_id.0, - tag - )) + format!("{}{}", DELIMITER, tag) }; + + let id = CredentialDefinitionId(format!( + "{}{}{}{}{}{}{}{}", + did.0, + DELIMITER, + Self::MARKER, + DELIMITER, + signature_type, + DELIMITER, + schema_id.0, + tag + )); + Ok(id) } } @@ -260,7 +227,8 @@ impl CredentialDefinitionId { } if parts.len() == 16 { - // creddef:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:3:CL:schema:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag + // creddef:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:3:CL:schema:sov:did:sov: + // NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag warn!("Deprecated format of FQ CredDef ID is used (creddef: suffix)"); let did = parts[2..5].join(DELIMITER); let signature_type = parts[6].to_string(); @@ -302,18 +270,6 @@ impl CredentialDefinitionId { } } -impl Validatable for CredentialDefinitionId { - fn validate(&self) -> Result<(), String> { - self.parts().ok_or(format!( - "Credential Definition Id validation failed: {:?}, doesn't match pattern", - self.0 - ))?; - Ok(()) - } -} - -impl Validatable for CredentialDefinitionConfig {} - #[cfg(test)] mod tests { use super::*; @@ -478,39 +434,4 @@ mod tests { assert_eq!(_tag(), tag); } } - - mod validate { - use super::*; - - #[test] - fn test_validate_cred_def_id_as_unqualified() { - _cred_def_id_unqualified().validate().unwrap(); - } - - #[test] - fn test_validate_cred_def_id_as_unqualified_without_tag() { - _cred_def_id_unqualified_without_tag().validate().unwrap(); - } - - #[test] - fn test_validate_cred_def_id_as_unqualified_with_schema_as_seq_no() { - _cred_def_id_unqualified_with_schema_as_seq_no() - .validate() - .unwrap(); - } - - #[test] - fn test_validate_cred_def_id_as_unqualified_with_schema_as_seq_no_without_tag() { - _cred_def_id_unqualified_with_schema_as_seq_no_without_tag() - .validate() - .unwrap(); - } - - #[test] - fn test_validate_cred_def_id_as_fully_qualified_with_schema_as_seq_no() { - _cred_def_id_qualified_with_schema_as_seq_no() - .validate() - .unwrap(); - } - } } diff --git a/libvdrtools/src/domain/anoncreds/credential_for_proof_request.rs b/libvdrtools/src/domain/anoncreds/credential_for_proof_request.rs deleted file mode 100644 index 41f15442ae..0000000000 --- a/libvdrtools/src/domain/anoncreds/credential_for_proof_request.rs +++ /dev/null @@ -1,24 +0,0 @@ -use std::collections::HashMap; - -use super::{credential::CredentialInfo, proof_request::NonRevocedInterval}; - -#[derive(Debug, Deserialize, Serialize)] -pub struct CredentialsForProofRequest { - pub attrs: HashMap>, - pub predicates: HashMap>, -} - -impl Default for CredentialsForProofRequest { - fn default() -> Self { - CredentialsForProofRequest { - attrs: HashMap::new(), - predicates: HashMap::new(), - } - } -} - -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct RequestedCredential { - pub cred_info: CredentialInfo, - pub interval: Option, -} diff --git a/libvdrtools/src/domain/anoncreds/credential_offer.rs b/libvdrtools/src/domain/anoncreds/credential_offer.rs index a4ff5d3365..d980b5d52c 100644 --- a/libvdrtools/src/domain/anoncreds/credential_offer.rs +++ b/libvdrtools/src/domain/anoncreds/credential_offer.rs @@ -2,8 +2,6 @@ use ursa::cl::{CredentialKeyCorrectnessProof, Nonce}; use super::{credential_definition::CredentialDefinitionId, schema::SchemaId}; -use indy_api_types::validation::Validatable; - #[derive(Debug, Deserialize, Serialize)] pub struct CredentialOffer { pub schema_id: SchemaId, @@ -30,11 +28,3 @@ impl CredentialOffer { } } } - -impl Validatable for CredentialOffer { - fn validate(&self) -> Result<(), String> { - self.schema_id.validate()?; - self.cred_def_id.validate()?; - Ok(()) - } -} diff --git a/libvdrtools/src/domain/anoncreds/credential_request.rs b/libvdrtools/src/domain/anoncreds/credential_request.rs index e410e62c61..a85416094d 100644 --- a/libvdrtools/src/domain/anoncreds/credential_request.rs +++ b/libvdrtools/src/domain/anoncreds/credential_request.rs @@ -1,12 +1,9 @@ -use super::super::crypto::did::DidValue; use ursa::cl::{ BlindedCredentialSecrets, BlindedCredentialSecretsCorrectnessProof, CredentialSecretsBlindingFactors, Nonce, }; -use super::credential_definition::CredentialDefinitionId; - -use indy_api_types::validation::Validatable; +use super::{super::crypto::did::DidValue, credential_definition::CredentialDefinitionId}; #[derive(Debug, Serialize, Deserialize)] pub struct CredentialRequest { @@ -35,13 +32,3 @@ impl CredentialRequest { } } } - -impl Validatable for CredentialRequest { - fn validate(&self) -> Result<(), String> { - self.cred_def_id.validate()?; - self.prover_did.validate()?; - Ok(()) - } -} - -impl Validatable for CredentialRequestMetadata {} diff --git a/libvdrtools/src/domain/anoncreds/indy_identifiers.rs b/libvdrtools/src/domain/anoncreds/indy_identifiers.rs index 455d160e2b..9b5fcba751 100644 --- a/libvdrtools/src/domain/anoncreds/indy_identifiers.rs +++ b/libvdrtools/src/domain/anoncreds/indy_identifiers.rs @@ -1,9 +1,10 @@ +use lazy_static::lazy_static; +use regex::Regex; + use super::{ super::crypto::did::DidValue, credential_definition::CredentialDefinitionId, revocation_registry_definition::CL_ACCUM, schema::SchemaId, }; -use lazy_static::lazy_static; -use regex::Regex; const NAMESPACE_RE: &str = r"[a-z][a-z0-9_:-]*"; const DID_RE: &str = r"[1-9A-HJ-NP-Za-km-z]*"; //base58 @@ -68,8 +69,10 @@ pub fn try_parse_indy_rev_reg( let creddef_name_re = r"[^/]*"; let tag_re = r"[^/]*"; let schema_ref_re = &*SCHEMA_REF_RE; - let id_re = format!("^(did:indy(:{NAMESPACE_RE})?:{DID_RE})\ - /anoncreds/v0/REV_REG_DEF/{schema_ref_re}/({creddef_name_re})/({tag_re})$"); + let id_re = format!( + "^(did:indy(:{NAMESPACE_RE})?:{DID_RE})/anoncreds/v0/REV_REG_DEF/{schema_ref_re}/\ + ({creddef_name_re})/({tag_re})$" + ); let id_re = Regex::new(id_re.as_str()).unwrap(); if let Some(captures) = id_re.captures(id) { @@ -110,9 +113,11 @@ fn test_try_parse_valid_indy_creddefid_works() { assert_eq!(schema_seq_no, "1".to_owned()); assert_eq!(tag, "tag".to_owned()); - let (did, schema_ref, tag) = - try_parse_indy_creddef_id( - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/gvt/1.0/tag").unwrap(); + let (did, schema_ref, tag) = try_parse_indy_creddef_id( + "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/\ + anoncreds/v0/SCHEMA/gvt/1.0/tag", + ) + .unwrap(); assert_eq!(did, "did:indy:NcYxiDXkpYi6ov5FcYDi1e".to_owned()); assert_eq!( schema_ref, @@ -134,10 +139,17 @@ fn test_try_parse_valid_indy_revreg_works() { ); assert_eq!(tag, "TAG1".to_owned()); - let (did, creddef, _, tag) = - try_parse_indy_rev_reg( - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/REV_REG_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/gvt/1.0/creddef_name/TAG1").unwrap(); + let (did, creddef, _, tag) = try_parse_indy_rev_reg( + "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/REV_REG_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/\ + anoncreds/v0/SCHEMA/gvt/1.0/creddef_name/TAG1", + ) + .unwrap(); assert_eq!(did.0, "did:indy:NcYxiDXkpYi6ov5FcYDi1e".to_owned()); - assert_eq!(creddef.0, "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/gvt/1.0/creddef_name".to_owned()); + assert_eq!( + creddef.0, + "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/\ + anoncreds/v0/SCHEMA/gvt/1.0/creddef_name" + .to_owned() + ); assert_eq!(tag, "TAG1".to_owned()); } diff --git a/libvdrtools/src/domain/anoncreds/master_secret.rs b/libvdrtools/src/domain/anoncreds/master_secret.rs index 965290e216..0b6b30c9c4 100644 --- a/libvdrtools/src/domain/anoncreds/master_secret.rs +++ b/libvdrtools/src/domain/anoncreds/master_secret.rs @@ -1,10 +1,6 @@ use ursa::cl::MasterSecret as CryptoMasterSecret; -use indy_api_types::validation::Validatable; - #[derive(Debug, Deserialize, Serialize)] pub struct MasterSecret { pub value: CryptoMasterSecret, } - -impl Validatable for MasterSecret {} diff --git a/libvdrtools/src/domain/anoncreds/mod.rs b/libvdrtools/src/domain/anoncreds/mod.rs index 9260ad8e7b..77fd6297b3 100644 --- a/libvdrtools/src/domain/anoncreds/mod.rs +++ b/libvdrtools/src/domain/anoncreds/mod.rs @@ -1,7 +1,5 @@ pub mod credential; -pub mod credential_attr_tag_policy; pub mod credential_definition; -pub mod credential_for_proof_request; pub mod credential_offer; pub mod credential_request; pub mod indy_identifiers; @@ -12,7 +10,6 @@ pub mod requested_credential; pub mod revocation_registry; pub mod revocation_registry_definition; pub mod revocation_registry_delta; -pub mod revocation_state; pub mod schema; pub const DELIMITER: &str = ":"; diff --git a/libvdrtools/src/domain/anoncreds/proof.rs b/libvdrtools/src/domain/anoncreds/proof.rs index 7879788641..f66e0cce0a 100644 --- a/libvdrtools/src/domain/anoncreds/proof.rs +++ b/libvdrtools/src/domain/anoncreds/proof.rs @@ -6,7 +6,6 @@ use super::{ credential_definition::CredentialDefinitionId, revocation_registry_definition::RevocationRegistryId, schema::SchemaId, }; -use indy_api_types::validation::Validatable; #[derive(Debug, Serialize, Deserialize)] pub struct Proof { @@ -15,7 +14,7 @@ pub struct Proof { pub identifiers: Vec, } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize, Default)] pub struct RequestedProof { pub revealed_attrs: HashMap, #[serde(skip_serializing_if = "HashMap::is_empty")] @@ -29,18 +28,6 @@ pub struct RequestedProof { pub predicates: HashMap, } -impl Default for RequestedProof { - fn default() -> Self { - RequestedProof { - revealed_attrs: HashMap::new(), - revealed_attr_groups: HashMap::new(), - self_attested_attrs: HashMap::new(), - unrevealed_attrs: HashMap::new(), - predicates: HashMap::new(), - } - } -} - #[derive(Clone, Debug, Deserialize, Serialize)] pub struct SubProofReferent { pub sub_proof_index: u32, @@ -73,10 +60,10 @@ pub struct Identifier { pub timestamp: Option, } -impl Validatable for Proof {} - #[cfg(test)] mod tests { + use serde_json::json; + use super::*; #[test] diff --git a/libvdrtools/src/domain/anoncreds/proof_request.rs b/libvdrtools/src/domain/anoncreds/proof_request.rs index cad13d1a9e..f3b28ad616 100644 --- a/libvdrtools/src/domain/anoncreds/proof_request.rs +++ b/libvdrtools/src/domain/anoncreds/proof_request.rs @@ -1,18 +1,15 @@ use std::{collections::HashMap, fmt}; -use ursa::cl::Nonce; - -use indy_api_types::validation::Validatable; -use crate::utils::wql::Query; use serde::{de, ser, Deserialize, Deserializer, Serialize, Serializer}; -use serde_json::Value; +use serde_json::{json, Value}; +use ursa::cl::Nonce; use super::{ super::crypto::did::DidValue, credential::Credential, credential_definition::CredentialDefinitionId, revocation_registry_definition::RevocationRegistryId, schema::SchemaId, }; -use crate::utils::qualifier; +use crate::utils::{qualifier, wql::Query}; #[derive(Debug, Deserialize, Serialize)] pub struct ProofRequestPayload { @@ -39,7 +36,7 @@ pub enum ProofRequestsVersion { } impl ProofRequest { - pub fn value<'a>(&'a self) -> &'a ProofRequestPayload { + pub fn value(&self) -> &ProofRequestPayload { match self { ProofRequest::ProofRequestV1(proof_req) => proof_req, ProofRequest::ProofRequestV2(proof_req) => proof_req, @@ -68,7 +65,7 @@ impl<'de> Deserialize<'de> for ProofRequest { let v = Value::deserialize(deserializer)?; let helper = Helper::deserialize(&v).map_err(de::Error::custom)?; - let nonce_cleaned = helper.nonce.replace(" ", "").replace("_", ""); + let nonce_cleaned = helper.nonce.replace([' ', '_'], ""); let proof_req = match helper.ver { Some(version) => match version.as_ref() { @@ -200,58 +197,6 @@ pub struct RequestedPredicateInfo { pub predicate_info: PredicateInfo, } -impl Validatable for ProofRequest { - fn validate(&self) -> Result<(), String> { - let value = self.value(); - let version = self.version(); - - if value.requested_attributes.is_empty() && value.requested_predicates.is_empty() { - return Err(String::from("Proof Request validation failed: both `requested_attributes` and `requested_predicates` are empty")); - } - - for (_, requested_attribute) in value.requested_attributes.iter() { - let has_name = !requested_attribute - .name - .as_ref() - .map(String::is_empty) - .unwrap_or(true); - let has_names = !requested_attribute - .names - .as_ref() - .map(Vec::is_empty) - .unwrap_or(true); - if !has_name && !has_names { - return Err(format!( - "Proof Request validation failed: there is empty requested attribute: {:?}", - requested_attribute - )); - } - - if has_name && has_names { - return Err(format!("Proof request validation failed: there is a requested attribute with both name and names: {:?}", requested_attribute)); - } - - if let Some(ref restrictions) = requested_attribute.restrictions { - _process_operator(&restrictions, &version)?; - } - } - - for (_, requested_predicate) in value.requested_predicates.iter() { - if requested_predicate.name.is_empty() { - return Err(format!( - "Proof Request validation failed: there is empty requested attribute: {:?}", - requested_predicate - )); - } - if let Some(ref restrictions) = requested_predicate.restrictions { - _process_operator(&restrictions, &version)?; - } - } - - Ok(()) - } -} - impl ProofRequest { pub fn to_unqualified(self) -> ProofRequest { let convert = |proof_request: &mut ProofRequestPayload| { @@ -259,13 +204,13 @@ impl ProofRequest { requested_attribute.restrictions = requested_attribute .restrictions .as_mut() - .map(|ref mut restrictions| _convert_query_to_unqualified(&restrictions)); + .map(|ref mut restrictions| _convert_query_to_unqualified(restrictions)); } for (_, requested_predicate) in proof_request.requested_predicates.iter_mut() { requested_predicate.restrictions = requested_predicate .restrictions .as_mut() - .map(|ref mut restrictions| _convert_query_to_unqualified(&restrictions)); + .map(|ref mut restrictions| _convert_query_to_unqualified(restrictions)); } }; @@ -302,13 +247,13 @@ fn _convert_query_to_unqualified(query: &Query) -> Query { Query::And(ref queries) => Query::And( queries .iter() - .map(|query| _convert_query_to_unqualified(query)) + .map(_convert_query_to_unqualified) .collect::>(), ), Query::Or(ref queries) => Query::Or( queries .iter() - .map(|query| _convert_query_to_unqualified(query)) + .map(_convert_query_to_unqualified) .collect::>(), ), Query::Not(ref query) => _convert_query_to_unqualified(query), @@ -372,8 +317,12 @@ fn _check_restriction( && Credential::QUALIFIABLE_TAGS.contains(&tag_name) && qualifier::is_fully_qualified(tag_value) { - return Err("Proof Request validation failed: fully qualified identifiers can not be used for Proof Request of the first version. \ - Please, set \"ver\":\"2.0\" to use fully qualified identifiers.".to_string()); + return Err( + "Proof Request validation failed: fully qualified identifiers can not be used for \ + Proof Request of the first version. Please, set \"ver\":\"2.0\" to use fully \ + qualified identifiers." + .to_string(), + ); } Ok(()) } @@ -431,8 +380,12 @@ mod tests { const CRED_DEF_ID_QUALIFIED: &str = "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/1/tag"; const CRED_DEF_ID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e:3:CL:1:tag"; - const REV_REG_ID_QUALIFIED: &str = "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/REV_REG_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/gvt/1.0/tag/TAG_1"; - const REV_REG_ID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e:4:NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag:CL_ACCUM:TAG_1"; + const REV_REG_ID_QUALIFIED: &str = "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/\ + REV_REG_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/\ + v0/SCHEMA/gvt/1.0/tag/TAG_1"; + const REV_REG_ID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e:4:NcYxiDXkpYi6ov5FcYDi1e:3:\ + CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag:CL_ACCUM:\ + TAG_1"; #[test] fn proof_request_to_unqualified() { diff --git a/libvdrtools/src/domain/anoncreds/requested_credential.rs b/libvdrtools/src/domain/anoncreds/requested_credential.rs index 103f88e6c1..2f2ee0df53 100644 --- a/libvdrtools/src/domain/anoncreds/requested_credential.rs +++ b/libvdrtools/src/domain/anoncreds/requested_credential.rs @@ -1,7 +1,5 @@ use std::collections::HashMap; -use indy_api_types::validation::Validatable; - #[derive(Debug, Deserialize, Serialize)] pub struct RequestedCredentials { pub self_attested_attributes: HashMap, @@ -21,15 +19,3 @@ pub struct ProvingCredentialKey { pub cred_id: String, pub timestamp: Option, } - -impl Validatable for RequestedCredentials { - fn validate(&self) -> Result<(), String> { - if self.self_attested_attributes.is_empty() - && self.requested_attributes.is_empty() - && self.requested_predicates.is_empty() - { - return Err(String::from("Requested Credentials validation failed: `self_attested_attributes` and `requested_attributes` and `requested_predicates` are empty")); - } - Ok(()) - } -} diff --git a/libvdrtools/src/domain/anoncreds/revocation_registry.rs b/libvdrtools/src/domain/anoncreds/revocation_registry.rs index 549f81b988..f9a1042dd9 100644 --- a/libvdrtools/src/domain/anoncreds/revocation_registry.rs +++ b/libvdrtools/src/domain/anoncreds/revocation_registry.rs @@ -1,8 +1,6 @@ -use ursa::cl::RevocationRegistry as CryptoRevocationRegistry; - use std::collections::HashMap; -use indy_api_types::validation::Validatable; +use ursa::cl::RevocationRegistry as CryptoRevocationRegistry; use super::revocation_registry_definition::RevocationRegistryId; @@ -42,5 +40,3 @@ pub fn rev_regs_map_to_rev_regs_local_map( }) .collect() } - -impl Validatable for RevocationRegistry {} diff --git a/libvdrtools/src/domain/anoncreds/revocation_registry_definition.rs b/libvdrtools/src/domain/anoncreds/revocation_registry_definition.rs index 7548cb00d6..003d0af9f6 100644 --- a/libvdrtools/src/domain/anoncreds/revocation_registry_definition.rs +++ b/libvdrtools/src/domain/anoncreds/revocation_registry_definition.rs @@ -1,7 +1,6 @@ -use indy_api_types::errors::{err_msg, IndyErrorKind, IndyResult}; use std::collections::{HashMap, HashSet}; -use indy_api_types::validation::Validatable; +use indy_api_types::errors::{err_msg, IndyErrorKind, IndyResult}; use lazy_static::lazy_static; use regex::Regex; use ursa::cl::{RevocationKeyPrivate, RevocationKeyPublic}; @@ -10,14 +9,17 @@ use super::{ super::crypto::did::DidValue, credential_definition::CredentialDefinitionId, indy_identifiers, DELIMITER, }; - use crate::utils::qualifier; pub const CL_ACCUM: &str = "CL_ACCUM"; pub const REV_REG_DEG_MARKER: &str = "4"; lazy_static! { - static ref QUALIFIED_REV_REG_ID: Regex = Regex::new("(^revreg:(?P[a-z0-9]+):)?(?P.+):4:(?P.+):(?P.+):(?P.+)$").unwrap(); + static ref QUALIFIED_REV_REG_ID: Regex = Regex::new( + "(^revreg:(?P[a-z0-9]+):)?(?P.+):4:(?P.+):(?P.+):\ + (?P.+)$" + ) + .unwrap(); } #[derive(Deserialize, Debug, Serialize)] @@ -202,15 +204,14 @@ impl RevocationRegistryId { return Some(parts); } - match QUALIFIED_REV_REG_ID.captures(&self.0) { - Some(caps) => Some(( + QUALIFIED_REV_REG_ID.captures(&self.0).map(|caps| { + ( DidValue(caps["did"].to_string()), CredentialDefinitionId(caps["cred_def_id"].to_string()), caps["rev_reg_type"].to_string(), caps["tag"].to_string(), - )), - None => None, - } + ) + }) } pub fn to_unqualified(&self) -> RevocationRegistryId { @@ -227,38 +228,6 @@ impl RevocationRegistryId { } } -impl Validatable for RevocationRegistryConfig { - fn validate(&self) -> Result<(), String> { - if let Some(num_) = self.max_cred_num { - if num_ == 0 { - return Err(String::from("RevocationRegistryConfig validation failed: `max_cred_num` must be greater than 0")); - } - } - Ok(()) - } -} - -impl Validatable for RevocationRegistryId { - fn validate(&self) -> Result<(), String> { - self.parts().ok_or(format!( - "Revocation Registry Id validation failed: {:?}, doesn't match pattern", - self.0 - ))?; - Ok(()) - } -} - -impl Validatable for RevocationRegistryDefinition { - fn validate(&self) -> Result<(), String> { - match self { - RevocationRegistryDefinition::RevocationRegistryDefinitionV1(revoc_reg_def) => { - revoc_reg_def.id.validate()?; - } - } - Ok(()) - } -} - #[cfg(test)] mod tests { use super::*; @@ -286,15 +255,28 @@ mod tests { } fn _cred_def_id_qualified() -> CredentialDefinitionId { - CredentialDefinitionId("creddef:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:3:CL:schema:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag".to_string()) + CredentialDefinitionId( + "creddef:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:3:CL:schema:sov:did:sov:\ + NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag" + .to_string(), + ) } fn _rev_reg_id_unqualified() -> RevocationRegistryId { - RevocationRegistryId("NcYxiDXkpYi6ov5FcYDi1e:4:NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag:CL_ACCUM:TAG_1".to_string()) + RevocationRegistryId( + "NcYxiDXkpYi6ov5FcYDi1e:4:NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.\ + 0:tag:CL_ACCUM:TAG_1" + .to_string(), + ) } fn _rev_reg_id_qualified() -> RevocationRegistryId { - RevocationRegistryId("revreg:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:4:creddef:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:3:CL:schema:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag:CL_ACCUM:TAG_1".to_string()) + RevocationRegistryId( + "revreg:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:4:creddef:sov:did:sov:\ + NcYxiDXkpYi6ov5FcYDi1e:3:CL:schema:sov:did:sov:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag:\ + CL_ACCUM:TAG_1" + .to_string(), + ) } mod to_unqualified { @@ -338,18 +320,4 @@ mod tests { assert_eq!(_tag(), tag); } } - - mod validate { - use super::*; - - #[test] - fn test_validate_rev_reg_id_as_unqualified() { - _rev_reg_id_unqualified().validate().unwrap(); - } - - #[test] - fn test_validate_rev_reg_id_as_fully_qualified() { - _rev_reg_id_qualified().validate().unwrap(); - } - } } diff --git a/libvdrtools/src/domain/anoncreds/revocation_registry_delta.rs b/libvdrtools/src/domain/anoncreds/revocation_registry_delta.rs index 3687ede8d5..f316d2cc6c 100644 --- a/libvdrtools/src/domain/anoncreds/revocation_registry_delta.rs +++ b/libvdrtools/src/domain/anoncreds/revocation_registry_delta.rs @@ -1,4 +1,3 @@ -use indy_api_types::validation::Validatable; use ursa::cl::RevocationRegistryDelta as RegistryDelta; #[derive(Debug, Clone, Serialize, Deserialize)] @@ -21,5 +20,3 @@ impl From for RevocationRegistryDeltaV1 { } } } - -impl Validatable for RevocationRegistryDelta {} diff --git a/libvdrtools/src/domain/anoncreds/revocation_state.rs b/libvdrtools/src/domain/anoncreds/revocation_state.rs deleted file mode 100644 index 3d159cec73..0000000000 --- a/libvdrtools/src/domain/anoncreds/revocation_state.rs +++ /dev/null @@ -1,24 +0,0 @@ -use std::collections::HashMap; -use ursa::cl::{RevocationRegistry, Witness}; - -use indy_api_types::validation::Validatable; - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct RevocationState { - pub witness: Witness, - pub rev_reg: RevocationRegistry, - pub timestamp: u64, -} - -impl Validatable for RevocationState { - fn validate(&self) -> Result<(), String> { - if self.timestamp == 0 { - return Err(String::from( - "RevocationState validation failed: `timestamp` must be greater than 0", - )); - } - Ok(()) - } -} - -pub type RevocationStates = HashMap>; diff --git a/libvdrtools/src/domain/anoncreds/schema.rs b/libvdrtools/src/domain/anoncreds/schema.rs index b5035cf677..d2823b1f0d 100644 --- a/libvdrtools/src/domain/anoncreds/schema.rs +++ b/libvdrtools/src/domain/anoncreds/schema.rs @@ -1,16 +1,12 @@ -use super::DELIMITER; - -use super::super::crypto::did::DidValue; +use std::collections::{HashMap, HashSet}; use indy_api_types::{ errors::{IndyErrorKind, IndyResult}, IndyError, }; -use std::collections::{HashMap, HashSet}; -use super::indy_identifiers; +use super::{super::crypto::did::DidValue, indy_identifiers, DELIMITER}; use crate::utils::qualifier; -use indy_api_types::validation::Validatable; pub const MAX_ATTRIBUTES_COUNT: usize = 125; @@ -63,10 +59,9 @@ pub fn schemas_map_to_schemas_v1_map(schemas: Schemas) -> HashMap); -#[allow(dead_code)] impl AttributeNames { pub fn new() -> Self { AttributeNames(HashSet::new()) @@ -79,54 +74,9 @@ impl From> for AttributeNames { } } -impl Into> for AttributeNames { - fn into(self) -> HashSet { - self.0 - } -} - -impl Validatable for Schema { - fn validate(&self) -> Result<(), String> { - match self { - Schema::SchemaV1(schema) => { - schema.attr_names.validate()?; - schema.id.validate()?; - if let Some((_, name, version)) = schema.id.parts() { - if name != schema.name { - return Err(format!( - "Inconsistent Schema Id and Schema Name: {:?} and {}", - schema.id, schema.name - )); - } - if version != schema.version { - return Err(format!( - "Inconsistent Schema Id and Schema Version: {:?} and {}", - schema.id, schema.version - )); - } - } - Ok(()) - } - } - } -} - -impl Validatable for AttributeNames { - fn validate(&self) -> Result<(), String> { - if self.0.is_empty() { - return Err(String::from( - "Empty list of Schema attributes has been passed", - )); - } - - if self.0.len() > MAX_ATTRIBUTES_COUNT { - return Err(format!( - "The number of Schema attributes {} cannot be greater than {}", - self.0.len(), - MAX_ATTRIBUTES_COUNT - )); - } - Ok(()) +impl From for HashSet { + fn from(value: AttributeNames) -> HashSet { + value.0 } } @@ -216,21 +166,6 @@ impl SchemaId { } } -impl Validatable for SchemaId { - fn validate(&self) -> Result<(), String> { - if self.0.parse::().is_ok() { - return Ok(()); - } - - self.parts().ok_or(format!( - "SchemaId validation failed: {:?}, doesn't match pattern", - self.0 - ))?; - - Ok(()) - } -} - #[cfg(test)] mod tests { use super::*; @@ -317,89 +252,4 @@ mod tests { assert!(_schema_id_invalid().parts().is_none()); } } - - mod validate { - use super::*; - - #[test] - fn test_validate_schema_id_as_seq_no() { - _schema_id_seq_no().validate().unwrap(); - } - - #[test] - fn test_validate_schema_id_as_unqualified() { - _schema_id_unqualified().validate().unwrap(); - } - - #[test] - fn test_validate_schema_id_as_fully_qualified() { - _schema_id_qualified().validate().unwrap(); - } - - #[test] - fn test_validate_schema_id_for_invalid_unqualified() { - _schema_id_invalid().validate().unwrap_err(); - } - - #[test] - fn test_validate_schema_id_for_invalid_fully_qualified() { - let id = SchemaId("schema:sov:NcYxiDXkpYi6ov5FcYDi1e:2:1.0".to_string()); - id.validate().unwrap_err(); - } - } - - mod test_schema_validation { - use super::*; - - #[test] - fn test_valid_schema() { - let schema_json = json!({ - "id": _schema_id_qualified(), - "name": "gvt", - "ver": "1.0", - "version": "1.0", - "attrNames": ["aaa", "bbb", "ccc"], - }) - .to_string(); - - let schema: Schema = serde_json::from_str(&schema_json).unwrap(); - schema.validate().unwrap(); - match schema { - Schema::SchemaV1(schema) => { - assert_eq!(schema.name, "gvt"); - assert_eq!(schema.version, "1.0"); - } - } - } - - #[test] - fn test_invalid_name_schema() { - let schema_json = json!({ - "id": _schema_id_qualified(), - "name": "gvt1", - "ver": "1.0", - "version": "1.0", - "attrNames": ["aaa", "bbb", "ccc"], - }) - .to_string(); - - let schema: Schema = serde_json::from_str(&schema_json).unwrap(); - schema.validate().unwrap_err(); - } - - #[test] - fn test_invalid_version_schema() { - let schema_json = json!({ - "id": _schema_id_qualified(), - "name": "gvt", - "ver": "1.0", - "version": "1.1", - "attrNames": ["aaa", "bbb", "ccc"], - }) - .to_string(); - - let schema: Schema = serde_json::from_str(&schema_json).unwrap(); - schema.validate().unwrap_err(); - } - } } diff --git a/libvdrtools/src/domain/cache.rs b/libvdrtools/src/domain/cache.rs index cc26bf08a1..deea3c64be 100644 --- a/libvdrtools/src/domain/cache.rs +++ b/libvdrtools/src/domain/cache.rs @@ -10,5 +10,6 @@ pub struct GetCacheOptions { pub no_cache: Option, // Skip usage of cache, pub no_update: Option, // Use only cached data, do not try to update. pub no_store: Option, // Skip storing fresh data if updated - pub min_fresh: Option, // Return cached data if not older than this many seconds. -1 means do not check age. + pub min_fresh: Option, /* Return cached data if not older than this many seconds. -1 + * means do not check age. */ } diff --git a/libvdrtools/src/domain/crypto/did.rs b/libvdrtools/src/domain/crypto/did.rs index 2167acfb1a..9c48c7cf08 100644 --- a/libvdrtools/src/domain/crypto/did.rs +++ b/libvdrtools/src/domain/crypto/did.rs @@ -1,29 +1,10 @@ +use indy_api_types::errors::{IndyError, IndyErrorKind, IndyResult}; + use crate::utils::qualifier; -use indy_api_types::{ - errors::{IndyError, IndyErrorKind, IndyResult}, - validation::Validatable, -}; -use lazy_static::lazy_static; -use regex::Regex; #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)] pub struct DidMethod(pub String); -impl Validatable for DidMethod { - fn validate(&self) -> Result<(), String> { - lazy_static! { - static ref REGEX_METHOD_NAME: Regex = Regex::new("^[a-z0-9:]+$").unwrap(); - } - if !REGEX_METHOD_NAME.is_match(&self.0) { - return Err(format!( - "Invalid default name: {}. It does not match the DID method name format.", - self.0 - )); - } - Ok(()) - } -} - #[derive(Serialize, Deserialize, Clone, Debug, Default)] pub struct MyDidInfo { pub did: Option, @@ -34,18 +15,6 @@ pub struct MyDidInfo { pub ledger_type: Option, } -impl Validatable for MyDidInfo { - fn validate(&self) -> Result<(), String> { - if let Some(ref did) = self.did { - did.validate()?; - } - if let Some(ref name) = self.method_name { - name.validate()? - } - Ok(()) - } -} - #[derive(Debug, Serialize, Deserialize, Clone)] pub struct TheirDidInfo { pub did: DidValue, @@ -58,13 +27,6 @@ impl TheirDidInfo { } } -impl Validatable for TheirDidInfo { - fn validate(&self) -> Result<(), String> { - self.did.validate()?; - Ok(()) - } -} - #[derive(Serialize, Deserialize, Clone, Debug)] pub struct Did { pub did: DidValue, @@ -87,7 +49,7 @@ impl DidValue { (Some(ledger_type_), Some(method_)) => { Ok(DidValue(did.to_string()).set_ledger_and_method(ledger_type_, method_)) } - (None, Some(method_)) => Ok(DidValue(did.to_string()).set_method(&method_)), + (None, Some(method_)) => Ok(DidValue(did.to_string()).set_method(method_)), (None, None) => Ok(DidValue(did.to_string())), (Some(_), None) => Err(IndyError::from_msg( IndyErrorKind::InvalidStructure, @@ -101,7 +63,7 @@ impl DidValue { } pub fn qualify(&self, method: &str) -> DidValue { - self.set_method(&method) + self.set_method(method) } pub fn to_unqualified(&self) -> DidValue { @@ -117,24 +79,6 @@ impl DidValue { } } -impl Validatable for DidValue { - fn validate(&self) -> Result<(), String> { - if self.is_fully_qualified() { - // pass - } else { - let did = bs58::decode(&self.0) - .into_vec() - .map_err(|err| err.to_string())?; - - if did.len() != 16 && did.len() != 32 { - return Err(format!("Trying to use DID with unexpected length: {}. \ - The 16- or 32-byte number upon which a DID is based should be 22/23 or 44/45 bytes when encoded as base58.", did.len())); - } - } - Ok(()) - } -} - qualifiable_type!(ShortDidValue); impl ShortDidValue { @@ -148,20 +92,6 @@ impl ShortDidValue { } } -impl Validatable for ShortDidValue { - fn validate(&self) -> Result<(), String> { - let did = bs58::decode(&self.0) - .into_vec() - .map_err(|err| err.to_string())?; - - if did.len() != 16 && did.len() != 32 { - return Err(format!("Trying to use DID with unexpected length: {}. \ - The 16- or 32-byte number upon which a DID is based should be 22/23 or 44/45 bytes when encoded as base58.", did.len())); - } - Ok(()) - } -} - #[derive(Serialize, Deserialize, Debug)] pub struct DidMetadata { pub value: String, diff --git a/libvdrtools/src/domain/ledger/attrib.rs b/libvdrtools/src/domain/ledger/attrib.rs deleted file mode 100644 index b95906d8c0..0000000000 --- a/libvdrtools/src/domain/ledger/attrib.rs +++ /dev/null @@ -1,106 +0,0 @@ -use super::{ - super::crypto::did::ShortDidValue, - constants::{ATTRIB, GET_ATTR}, - response::GetReplyResultV1, -}; - -#[derive(Serialize, PartialEq, Debug)] -pub struct AttribOperation { - #[serde(rename = "type")] - pub _type: String, - pub dest: ShortDidValue, - #[serde(skip_serializing_if = "Option::is_none")] - pub hash: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub raw: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub enc: Option, -} - -impl AttribOperation { - pub fn new( - dest: ShortDidValue, - hash: Option, - raw: Option, - enc: Option, - ) -> AttribOperation { - AttribOperation { - _type: ATTRIB.to_string(), - dest, - hash, - raw, - enc, - } - } -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct GetAttribOperation { - #[serde(rename = "type")] - pub _type: String, - pub dest: ShortDidValue, - #[serde(skip_serializing_if = "Option::is_none")] - pub raw: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub hash: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub enc: Option, -} - -impl GetAttribOperation { - pub fn new( - dest: ShortDidValue, - raw: Option<&str>, - hash: Option<&str>, - enc: Option<&str>, - ) -> GetAttribOperation { - GetAttribOperation { - _type: GET_ATTR.to_string(), - dest, - raw: raw.map(String::from), - hash: hash.map(String::from), - enc: enc.map(String::from), - } - } -} - -#[derive(Debug, Deserialize)] -#[serde(untagged)] -pub enum GetAttrReplyResult { - GetAttrReplyResultV0(GetAttResultV0), - GetAttrReplyResultV1(GetReplyResultV1), -} - -#[derive(Deserialize, Eq, PartialEq, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetAttResultV0 { - pub identifier: ShortDidValue, - pub data: String, - pub dest: ShortDidValue, - pub raw: String, -} - -#[derive(Deserialize, Eq, PartialEq, Debug)] -pub struct GetAttResultDataV1 { - pub ver: String, - pub id: String, - pub did: ShortDidValue, - pub raw: String, -} - -#[derive(Deserialize, Debug)] -pub struct AttribData { - pub endpoint: Endpoint, -} - -#[derive(Serialize, Deserialize, Clone, Debug)] -pub struct Endpoint { - pub ha: String, // indy-node and indy-plenum restrict this to ip-address:port - pub verkey: Option, -} - -impl Endpoint { - pub fn new(ha: String, verkey: Option) -> Endpoint { - Endpoint { ha, verkey } - } -} diff --git a/libvdrtools/src/domain/ledger/auth_rule.rs b/libvdrtools/src/domain/ledger/auth_rule.rs deleted file mode 100644 index 64634e6223..0000000000 --- a/libvdrtools/src/domain/ledger/auth_rule.rs +++ /dev/null @@ -1,260 +0,0 @@ -use serde_json::Value; -use std::ops::Not; - -use super::constants::{AUTH_RULE, AUTH_RULES, GET_AUTH_RULE}; - -#[allow(non_camel_case_types)] -#[derive(Deserialize, Debug, Serialize, PartialEq)] -pub enum AuthAction { - ADD, - EDIT, -} - -/** - Enum of the constraint type within the GAT_AUTH_RULE result data - # parameters - Role - The final constraint - And - Combine multiple constraints all of them must be met - Or - Combine multiple constraints any of them must be met - Forbidden - action is forbidden -*/ -#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] -#[serde(tag = "constraint_id")] -pub enum Constraint { - #[serde(rename = "OR")] - OrConstraint(CombinationConstraint), - #[serde(rename = "AND")] - AndConstraint(CombinationConstraint), - #[serde(rename = "ROLE")] - RoleConstraint(RoleConstraint), - #[serde(rename = "FORBIDDEN")] - ForbiddenConstraint(ForbiddenConstraint), -} - -/** - The final constraint - # parameters - sig_count - The number of signatures required to execution action - role - The role which the user must have to execute the action. - metadata - An additional parameters of the constraint (contains transaction FEE cost). - need_to_be_owner - The flag specifying if a user must be an owner of the transaction (false by default) . - off_ledger_signature - allow signature of unknow for ledger did (false by default). -*/ -#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] -pub struct RoleConstraint { - pub sig_count: u32, - pub role: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub metadata: Option, - #[serde(default)] - pub need_to_be_owner: bool, - #[serde(default)] - #[serde(skip_serializing_if = "Not::not")] - pub off_ledger_signature: bool, -} - -/** - Combine multiple constraints - # parameters - auth_constraints - The type of the combination -*/ -#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] -pub struct CombinationConstraint { - pub auth_constraints: Vec, -} - -/** - The forbidden constraint means that action is forbidden -*/ -#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] -#[serde(deny_unknown_fields)] -pub struct ForbiddenConstraint {} - -#[derive(Serialize, PartialEq, Debug)] -#[serde(untagged)] -pub enum AuthRuleOperation { - Add(AddAuthRuleOperation), - Edit(EditAuthRuleOperation), -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct AddAuthRuleOperation { - #[serde(rename = "type")] - pub _type: String, - pub auth_type: String, - pub field: String, - pub auth_action: AuthAction, - pub new_value: Option, - pub constraint: Constraint, -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct EditAuthRuleOperation { - #[serde(rename = "type")] - pub _type: String, - pub auth_type: String, - pub field: String, - pub auth_action: AuthAction, - pub old_value: Option, - pub new_value: Option, - pub constraint: Constraint, -} - -impl AuthRuleOperation { - pub fn new( - auth_type: String, - field: String, - auth_action: AuthAction, - old_value: Option, - new_value: Option, - constraint: Constraint, - ) -> AuthRuleOperation { - match auth_action { - AuthAction::ADD => AuthRuleOperation::Add(AddAuthRuleOperation { - _type: AUTH_RULE.to_string(), - auth_type, - field, - auth_action, - new_value, - constraint, - }), - AuthAction::EDIT => AuthRuleOperation::Edit(EditAuthRuleOperation { - _type: AUTH_RULE.to_string(), - auth_type, - field, - auth_action, - old_value, - new_value, - constraint, - }), - } - } -} - -#[derive(Serialize, PartialEq, Debug)] -#[serde(untagged)] -pub enum GetAuthRuleOperation { - All(GetAllAuthRuleOperation), - Add(GetAddAuthRuleOperation), - Edit(GetEditAuthRuleOperation), -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct GetAllAuthRuleOperation { - #[serde(rename = "type")] - pub _type: String, -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct GetAddAuthRuleOperation { - #[serde(rename = "type")] - pub _type: String, - pub auth_type: String, - pub field: String, - pub auth_action: AuthAction, - pub new_value: Option, -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct GetEditAuthRuleOperation { - #[serde(rename = "type")] - pub _type: String, - pub auth_type: String, - pub field: String, - pub auth_action: AuthAction, - pub old_value: Option, - pub new_value: Option, -} - -impl GetAuthRuleOperation { - pub fn get_all() -> GetAuthRuleOperation { - GetAuthRuleOperation::All(GetAllAuthRuleOperation { - _type: GET_AUTH_RULE.to_string(), - }) - } - - pub fn get_one( - auth_type: String, - field: String, - auth_action: AuthAction, - old_value: Option, - new_value: Option, - ) -> GetAuthRuleOperation { - match auth_action { - AuthAction::ADD => GetAuthRuleOperation::Add(GetAddAuthRuleOperation { - _type: GET_AUTH_RULE.to_string(), - auth_type, - field, - auth_action, - new_value, - }), - AuthAction::EDIT => GetAuthRuleOperation::Edit(GetEditAuthRuleOperation { - _type: GET_AUTH_RULE.to_string(), - auth_type, - field, - auth_action, - old_value, - new_value, - }), - } - } -} - -pub type AuthRules = Vec; - -#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] -#[serde(tag = "auth_action")] -pub enum AuthRuleData { - #[serde(rename = "ADD")] - Add(AddAuthRuleData), - #[serde(rename = "EDIT")] - Edit(EditAuthRuleData), -} - -#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] -pub struct AddAuthRuleData { - pub auth_type: String, - pub field: String, - pub new_value: Option, - pub constraint: Constraint, -} - -#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] -pub struct EditAuthRuleData { - pub auth_type: String, - pub field: String, - pub old_value: Option, - pub new_value: Option, - pub constraint: Constraint, -} - -#[derive(Serialize, Deserialize, PartialEq, Debug)] -pub struct GetAuthRuleResult { - pub data: Vec, -} - -#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] -pub struct AuthRule { - pub auth_type: String, - pub auth_action: String, - pub field: String, - pub old_value: Option, - pub new_value: Option, - pub constraint: Constraint, -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct AuthRulesOperation { - #[serde(rename = "type")] - pub _type: String, - pub rules: AuthRules, -} - -impl AuthRulesOperation { - pub fn new(rules: AuthRules) -> AuthRulesOperation { - AuthRulesOperation { - _type: AUTH_RULES.to_string(), - rules, - } - } -} diff --git a/libvdrtools/src/domain/ledger/author_agreement.rs b/libvdrtools/src/domain/ledger/author_agreement.rs deleted file mode 100644 index 1a986f9784..0000000000 --- a/libvdrtools/src/domain/ledger/author_agreement.rs +++ /dev/null @@ -1,168 +0,0 @@ -use std::collections::HashMap; - -use indy_api_types::validation::Validatable; - -use super::constants::{ - DISABLE_ALL_TXN_AUTHR_AGRMTS, GET_TXN_AUTHR_AGRMT, GET_TXN_AUTHR_AGRMT_AML, TXN_AUTHR_AGRMT, - TXN_AUTHR_AGRMT_AML, -}; - -#[derive(Serialize, PartialEq, Debug)] -pub struct TxnAuthorAgreementOperation { - #[serde(rename = "type")] - _type: String, - #[serde(skip_serializing_if = "Option::is_none")] - text: Option, - version: String, - #[serde(skip_serializing_if = "Option::is_none")] - ratification_ts: Option, - #[serde(skip_serializing_if = "Option::is_none")] - retirement_ts: Option, -} - -impl TxnAuthorAgreementOperation { - pub fn new( - text: Option, - version: String, - ratification_ts: Option, - retirement_ts: Option, - ) -> TxnAuthorAgreementOperation { - TxnAuthorAgreementOperation { - _type: TXN_AUTHR_AGRMT.to_string(), - text, - version, - ratification_ts, - retirement_ts, - } - } -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct DisableAllTxnAuthorAgreementsOperation { - #[serde(rename = "type")] - _type: String, -} - -impl DisableAllTxnAuthorAgreementsOperation { - pub fn new() -> DisableAllTxnAuthorAgreementsOperation { - DisableAllTxnAuthorAgreementsOperation { - _type: DISABLE_ALL_TXN_AUTHR_AGRMTS.to_string(), - } - } -} - -#[derive(Deserialize, PartialEq, Debug)] -pub struct GetTxnAuthorAgreementData { - pub digest: Option, - pub version: Option, - pub timestamp: Option, -} - -impl Validatable for GetTxnAuthorAgreementData { - fn validate(&self) -> Result<(), String> { - match ( - self.digest.as_ref(), - self.version.as_ref(), - self.timestamp.as_ref(), - ) { - (Some(_), None, None) => Ok(()), - (None, Some(_), None) => Ok(()), - (None, None, Some(_)) => Ok(()), - (None, None, None) => Ok(()), - (digest, version, timestamp) => Err(format!( - "Only one of field can be specified: digest: {:?}, version: {:?}, timestamp: {:?}", - digest, version, timestamp - )), - } - } -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct GetTxnAuthorAgreementOperation { - #[serde(rename = "type")] - _type: String, - #[serde(skip_serializing_if = "Option::is_none")] - digest: Option, - #[serde(skip_serializing_if = "Option::is_none")] - version: Option, - #[serde(skip_serializing_if = "Option::is_none")] - timestamp: Option, -} - -impl GetTxnAuthorAgreementOperation { - pub fn new(data: Option<&GetTxnAuthorAgreementData>) -> GetTxnAuthorAgreementOperation { - GetTxnAuthorAgreementOperation { - _type: GET_TXN_AUTHR_AGRMT.to_string(), - digest: data.as_ref().and_then(|d| d.digest.clone()), - version: data.as_ref().and_then(|d| d.version.clone()), - timestamp: data.as_ref().and_then(|d| d.timestamp), - } - } -} - -#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] -pub struct AcceptanceMechanisms(pub HashMap); - -impl AcceptanceMechanisms { - #[allow(dead_code)] - pub fn new() -> Self { - AcceptanceMechanisms(HashMap::new()) - } -} - -impl Validatable for AcceptanceMechanisms { - fn validate(&self) -> Result<(), String> { - if self.0.is_empty() { - return Err(String::from( - "Empty list of Acceptance Mechanisms has been passed", - )); - } - Ok(()) - } -} - -#[derive(Serialize, PartialEq, Debug)] -#[serde(rename_all = "camelCase")] -pub struct SetAcceptanceMechanismOperation { - #[serde(rename = "type")] - _type: String, - aml: AcceptanceMechanisms, - version: String, - #[serde(skip_serializing_if = "Option::is_none")] - aml_context: Option, -} - -impl SetAcceptanceMechanismOperation { - pub fn new( - aml: AcceptanceMechanisms, - version: String, - aml_context: Option, - ) -> SetAcceptanceMechanismOperation { - SetAcceptanceMechanismOperation { - _type: TXN_AUTHR_AGRMT_AML.to_string(), - aml, - version, - aml_context, - } - } -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct GetAcceptanceMechanismOperation { - #[serde(rename = "type")] - _type: String, - #[serde(skip_serializing_if = "Option::is_none")] - timestamp: Option, - #[serde(skip_serializing_if = "Option::is_none")] - version: Option, -} - -impl GetAcceptanceMechanismOperation { - pub fn new(timestamp: Option, version: Option) -> GetAcceptanceMechanismOperation { - GetAcceptanceMechanismOperation { - _type: GET_TXN_AUTHR_AGRMT_AML.to_string(), - timestamp, - version, - } - } -} diff --git a/libvdrtools/src/domain/ledger/constants.rs b/libvdrtools/src/domain/ledger/constants.rs deleted file mode 100644 index ec1b38b65e..0000000000 --- a/libvdrtools/src/domain/ledger/constants.rs +++ /dev/null @@ -1,99 +0,0 @@ -pub const NODE: &str = "0"; -pub const NYM: &str = "1"; -pub const GET_TXN: &str = "3"; -pub const TXN_AUTHR_AGRMT: &str = "4"; // TODO Use nonabbreviated names as in updated design -pub const TXN_AUTHR_AGRMT_AML: &str = "5"; -pub const GET_TXN_AUTHR_AGRMT: &str = "6"; -pub const GET_TXN_AUTHR_AGRMT_AML: &str = "7"; -pub const DISABLE_ALL_TXN_AUTHR_AGRMTS: &str = "8"; -pub const ATTRIB: &str = "100"; -pub const SCHEMA: &str = "101"; -pub const CRED_DEF: &str = "102"; -pub const GET_ATTR: &str = "104"; -pub const GET_NYM: &str = "105"; -pub const GET_SCHEMA: &str = "107"; -pub const GET_CRED_DEF: &str = "108"; -pub const POOL_UPGRADE: &str = "109"; -pub const POOL_RESTART: &str = "118"; -pub const POOL_CONFIG: &str = "111"; -pub const REVOC_REG_DEF: &str = "113"; -pub const REVOC_REG_ENTRY: &str = "114"; -pub const GET_REVOC_REG_DEF: &str = "115"; -pub const GET_REVOC_REG: &str = "116"; -pub const GET_REVOC_REG_DELTA: &str = "117"; -pub const GET_VALIDATOR_INFO: &str = "119"; -pub const AUTH_RULE: &str = "120"; -pub const GET_AUTH_RULE: &str = "121"; -pub const AUTH_RULES: &str = "122"; -pub const GET_DDO: &str = "120"; //TODO change number - -pub const REQUESTS: [&str; 25] = [ - NODE, - NYM, - GET_TXN, - ATTRIB, - SCHEMA, - CRED_DEF, - GET_ATTR, - GET_NYM, - GET_SCHEMA, - GET_CRED_DEF, - POOL_UPGRADE, - POOL_RESTART, - POOL_CONFIG, - REVOC_REG_DEF, - REVOC_REG_ENTRY, - GET_REVOC_REG_DEF, - GET_REVOC_REG, - GET_REVOC_REG_DELTA, - GET_VALIDATOR_INFO, - AUTH_RULE, - GET_DDO, - TXN_AUTHR_AGRMT, - TXN_AUTHR_AGRMT_AML, - GET_TXN_AUTHR_AGRMT, - GET_TXN_AUTHR_AGRMT_AML, -]; - -pub const TRUSTEE: &str = "0"; -pub const STEWARD: &str = "2"; -pub const ENDORSER: &str = "101"; -pub const NETWORK_MONITOR: &str = "201"; -pub const ROLE_REMOVE: &str = ""; - -pub const ROLES: [&str; 4] = [TRUSTEE, STEWARD, ENDORSER, NETWORK_MONITOR]; - -pub fn txn_name_to_code(txn: &str) -> Option<&str> { - if REQUESTS.contains(&txn) { - return Some(txn); - } - - match txn { - "NODE" => Some(NODE), - "NYM" => Some(NYM), - "GET_TXN" => Some(GET_TXN), - "ATTRIB" => Some(ATTRIB), - "SCHEMA" => Some(SCHEMA), - "CRED_DEF" | "CLAIM_DEF" => Some(CRED_DEF), - "GET_ATTR" => Some(GET_ATTR), - "GET_NYM" => Some(GET_NYM), - "GET_SCHEMA" => Some(GET_SCHEMA), - "GET_CRED_DEF" => Some(GET_CRED_DEF), - "POOL_UPGRADE" => Some(POOL_UPGRADE), - "POOL_RESTART" => Some(POOL_RESTART), - "POOL_CONFIG" => Some(POOL_CONFIG), - "REVOC_REG_DEF" => Some(REVOC_REG_DEF), - "REVOC_REG_ENTRY" => Some(REVOC_REG_ENTRY), - "GET_REVOC_REG_DEF" => Some(GET_REVOC_REG_DEF), - "GET_REVOC_REG" => Some(GET_REVOC_REG), - "GET_REVOC_REG_DELTA" => Some(GET_REVOC_REG_DELTA), - "GET_VALIDATOR_INFO" => Some(GET_VALIDATOR_INFO), - "AUTH_RULE" => Some(AUTH_RULE), - "GET_DDO" => Some(GET_DDO), - "TXN_AUTHR_AGRMT" => Some(TXN_AUTHR_AGRMT), - "TXN_AUTHR_AGRMT_AML" => Some(TXN_AUTHR_AGRMT_AML), - "GET_TXN_AUTHR_AGRMT" => Some(GET_TXN_AUTHR_AGRMT), - "GET_TXN_AUTHR_AGRMT_AML" => Some(GET_TXN_AUTHR_AGRMT_AML), - val => Some(val), - } -} diff --git a/libvdrtools/src/domain/ledger/cred_def.rs b/libvdrtools/src/domain/ledger/cred_def.rs deleted file mode 100644 index 34bf36c1c5..0000000000 --- a/libvdrtools/src/domain/ledger/cred_def.rs +++ /dev/null @@ -1,110 +0,0 @@ -use super::{ - super::{ - anoncreds::{ - credential_definition::{ - CredentialDefinitionData, CredentialDefinitionId, CredentialDefinitionV1, - SignatureType, - }, - schema::SchemaId, - }, - crypto::did::ShortDidValue, - ledger::request::ProtocolVersion, - }, - constants::{CRED_DEF, GET_CRED_DEF}, - response::{GetReplyResultV1, ReplyType}, -}; - -#[derive(Serialize, Debug)] -pub struct CredDefOperation { - #[serde(rename = "ref")] - pub _ref: i32, - pub data: CredentialDefinitionData, - #[serde(rename = "type")] - pub _type: String, - pub signature_type: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub tag: Option, -} - -impl CredDefOperation { - pub fn new(data: CredentialDefinitionV1) -> CredDefOperation { - CredDefOperation { - _ref: data.schema_id.0.parse::().unwrap_or(0), - signature_type: data.signature_type.to_str().to_string(), - data: data.value, - tag: if ProtocolVersion::is_node_1_3() { - None - } else { - Some(data.tag.clone()) - }, - _type: CRED_DEF.to_string(), - } - } -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct GetCredDefOperation { - #[serde(rename = "type")] - pub _type: String, - #[serde(rename = "ref")] - pub _ref: i32, - pub signature_type: String, - pub origin: ShortDidValue, - #[serde(skip_serializing_if = "Option::is_none")] - pub tag: Option, -} - -impl GetCredDefOperation { - pub fn new( - _ref: i32, - signature_type: String, - origin: ShortDidValue, - tag: Option, - ) -> GetCredDefOperation { - GetCredDefOperation { - _type: GET_CRED_DEF.to_string(), - _ref, - signature_type, - origin, - tag, - } - } -} - -#[derive(Debug, Deserialize)] -#[serde(untagged)] -pub enum GetCredDefReplyResult { - GetCredDefReplyResultV0(GetCredDefResultV0), - GetCredDefReplyResultV1(GetReplyResultV1), -} - -impl ReplyType for GetCredDefReplyResult { - fn get_type<'a>() -> &'a str { - GET_CRED_DEF - } -} - -#[derive(Deserialize, Serialize, Debug)] -pub struct GetCredDefResultV0 { - pub identifier: ShortDidValue, - #[serde(rename = "ref")] - pub ref_: u64, - #[serde(rename = "seqNo")] - pub seq_no: i32, - pub signature_type: SignatureType, - pub origin: ShortDidValue, - pub tag: Option, - pub data: CredentialDefinitionData, -} - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetCredDefResultDataV1 { - pub ver: String, - pub id: CredentialDefinitionId, - #[serde(rename = "type")] - pub type_: SignatureType, - pub tag: String, - pub schema_ref: SchemaId, - pub public_keys: CredentialDefinitionData, -} diff --git a/libvdrtools/src/domain/ledger/ddo.rs b/libvdrtools/src/domain/ledger/ddo.rs deleted file mode 100644 index 827b3e74ea..0000000000 --- a/libvdrtools/src/domain/ledger/ddo.rs +++ /dev/null @@ -1,17 +0,0 @@ -use super::{super::crypto::did::ShortDidValue, constants::GET_DDO}; - -#[derive(Serialize, PartialEq, Debug)] -pub struct GetDdoOperation { - #[serde(rename = "type")] - pub _type: String, - pub dest: ShortDidValue, -} - -impl GetDdoOperation { - pub fn new(dest: ShortDidValue) -> GetDdoOperation { - GetDdoOperation { - _type: GET_DDO.to_string(), - dest, - } - } -} diff --git a/libvdrtools/src/domain/ledger/did.rs b/libvdrtools/src/domain/ledger/did.rs deleted file mode 100644 index a2d175df38..0000000000 --- a/libvdrtools/src/domain/ledger/did.rs +++ /dev/null @@ -1,99 +0,0 @@ -use super::{ - super::crypto::did::{DidValue, ShortDidValue}, - constants::{GET_NYM, NYM}, - response::{GetReplyResultV0, GetReplyResultV1, ReplyType}, -}; - -#[derive(Serialize, PartialEq, Debug)] -pub struct NymOperation { - #[serde(rename = "type")] - pub _type: String, - pub dest: ShortDidValue, - #[serde(skip_serializing_if = "Option::is_none")] - pub verkey: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub alias: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub role: Option<::serde_json::Value>, -} - -impl NymOperation { - pub fn new( - dest: ShortDidValue, - verkey: Option, - alias: Option, - role: Option<::serde_json::Value>, - ) -> NymOperation { - NymOperation { - _type: NYM.to_string(), - dest, - verkey, - alias, - role, - } - } -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct GetNymOperation { - #[serde(rename = "type")] - pub _type: String, - pub dest: ShortDidValue, -} - -impl GetNymOperation { - pub fn new(dest: ShortDidValue) -> GetNymOperation { - GetNymOperation { - _type: GET_NYM.to_string(), - dest, - } - } -} - -#[derive(Debug, Deserialize)] -#[serde(untagged)] -pub enum GetNymReplyResult { - GetNymReplyResultV0(GetReplyResultV0), - GetNymReplyResultV1(GetReplyResultV1), -} - -impl ReplyType for GetNymReplyResult { - fn get_type<'a>() -> &'a str { - GET_NYM - } -} - -#[derive(Deserialize, Eq, PartialEq, Debug)] -pub struct GetNymResultDataV0 { - pub identifier: Option, - pub dest: ShortDidValue, - pub role: Option, - pub verkey: Option, -} - -#[derive(Deserialize, Eq, PartialEq, Debug)] -pub struct GetNymResultDataV1 { - pub ver: String, - pub id: String, - pub did: ShortDidValue, - pub verkey: Option, - pub role: Option, -} - -#[derive(Serialize, Deserialize, Eq, PartialEq, Debug)] -pub struct NymData { - pub did: ShortDidValue, - pub verkey: Option, - pub role: Option, -} - -#[derive(Serialize, Deserialize, Eq, PartialEq, Debug)] -pub struct NymTxnParams { - pub dest: DidValue, - #[serde(skip_serializing_if = "Option::is_none")] - pub verkey: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub alias: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub role: Option, -} diff --git a/libvdrtools/src/domain/ledger/mod.rs b/libvdrtools/src/domain/ledger/mod.rs deleted file mode 100644 index ca6b1e1160..0000000000 --- a/libvdrtools/src/domain/ledger/mod.rs +++ /dev/null @@ -1,16 +0,0 @@ -pub mod attrib; -pub mod auth_rule; -pub mod author_agreement; -pub mod constants; -pub mod cred_def; -pub mod ddo; -pub mod did; -pub mod node; -pub mod pool; -pub mod request; -pub mod response; -pub mod rev_reg; -pub mod rev_reg_def; -pub mod schema; -pub mod txn; -pub mod validator_info; diff --git a/libvdrtools/src/domain/ledger/node.rs b/libvdrtools/src/domain/ledger/node.rs deleted file mode 100644 index c819019573..0000000000 --- a/libvdrtools/src/domain/ledger/node.rs +++ /dev/null @@ -1,76 +0,0 @@ -use super::constants::NODE; - -use super::super::crypto::did::ShortDidValue; -use indy_api_types::validation::Validatable; - -#[derive(Serialize, PartialEq, Debug)] -pub struct NodeOperation { - #[serde(rename = "type")] - pub _type: String, - pub dest: ShortDidValue, - pub data: NodeOperationData, -} - -impl NodeOperation { - pub fn new(dest: ShortDidValue, data: NodeOperationData) -> NodeOperation { - NodeOperation { - _type: NODE.to_string(), - dest, - data, - } - } -} - -#[derive(Serialize, PartialEq, Debug, Deserialize)] -pub enum Services { - VALIDATOR, - OBSERVER, -} - -#[derive(Serialize, PartialEq, Debug, Deserialize)] -pub struct NodeOperationData { - #[serde(skip_serializing_if = "Option::is_none")] - pub node_ip: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub node_port: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub client_ip: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub client_port: Option, - pub alias: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub services: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub blskey: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub blskey_pop: Option, -} - -impl Validatable for NodeOperationData { - fn validate(&self) -> Result<(), String> { - if self.node_ip.is_none() - && self.node_port.is_none() - && self.client_ip.is_none() - && self.client_port.is_none() - && self.services.is_none() - && self.blskey.is_none() - && self.blskey_pop.is_none() - { - return Err(String::from("Invalid data json: all fields missed at once")); - } - - if (self.node_ip.is_some() - || self.node_port.is_some() - || self.client_ip.is_some() - || self.client_port.is_some()) - && (self.node_ip.is_none() - || self.node_port.is_none() - || self.client_ip.is_none() - || self.client_port.is_none()) - { - return Err(String::from("Invalid data json: Fields node_ip, node_port, client_ip, client_port must be specified together")); - } - - Ok(()) - } -} diff --git a/libvdrtools/src/domain/ledger/pool.rs b/libvdrtools/src/domain/ledger/pool.rs deleted file mode 100644 index cf25932369..0000000000 --- a/libvdrtools/src/domain/ledger/pool.rs +++ /dev/null @@ -1,93 +0,0 @@ -use super::constants::{POOL_CONFIG, POOL_RESTART, POOL_UPGRADE}; - -use std::collections::HashMap; - -#[derive(Serialize, PartialEq, Debug)] -pub struct PoolConfigOperation { - #[serde(rename = "type")] - pub _type: String, - pub writes: bool, - pub force: bool, -} - -impl PoolConfigOperation { - pub fn new(writes: bool, force: bool) -> PoolConfigOperation { - PoolConfigOperation { - _type: POOL_CONFIG.to_string(), - writes, - force, - } - } -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct PoolRestartOperation { - #[serde(rename = "type")] - pub _type: String, - pub action: String, - //start, cancel - #[serde(skip_serializing_if = "Option::is_none")] - pub datetime: Option, -} - -impl PoolRestartOperation { - pub fn new(action: &str, datetime: Option) -> PoolRestartOperation { - PoolRestartOperation { - _type: POOL_RESTART.to_string(), - action: action.to_string(), - datetime, - } - } -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct PoolUpgradeOperation { - #[serde(rename = "type")] - pub _type: String, - pub name: String, - pub version: String, - pub action: String, - //start, cancel - pub sha256: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub timeout: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub schedule: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub justification: Option, - pub reinstall: bool, - pub force: bool, - #[serde(skip_serializing_if = "Option::is_none")] - pub package: Option, -} - -impl PoolUpgradeOperation { - pub fn new( - name: &str, - version: &str, - action: &str, - sha256: &str, - timeout: Option, - schedule: Option>, - justification: Option<&str>, - reinstall: bool, - force: bool, - package: Option<&str>, - ) -> PoolUpgradeOperation { - PoolUpgradeOperation { - _type: POOL_UPGRADE.to_string(), - name: name.to_string(), - version: version.to_string(), - action: action.to_string(), - sha256: sha256.to_string(), - timeout, - schedule, - justification: justification.map(String::from), - reinstall, - force, - package: package.map(String::from), - } - } -} - -pub type Schedule = HashMap; diff --git a/libvdrtools/src/domain/ledger/request.rs b/libvdrtools/src/domain/ledger/request.rs deleted file mode 100644 index 58b0c073c9..0000000000 --- a/libvdrtools/src/domain/ledger/request.rs +++ /dev/null @@ -1,102 +0,0 @@ -use serde; -use serde_json; -use time; - -use std::{ - collections::HashMap, - sync::atomic::{AtomicUsize, Ordering}, -}; - -use lazy_static::lazy_static; - -use super::super::crypto::did::{DidValue, ShortDidValue}; - -pub const DEFAULT_LIBIDY_DID: &str = "LibindyDid111111111111"; - -pub struct ProtocolVersion {} - -lazy_static! { - pub static ref PROTOCOL_VERSION: AtomicUsize = AtomicUsize::new(2); -} - -impl ProtocolVersion { - pub fn set(version: usize) { - PROTOCOL_VERSION.store(version, Ordering::Relaxed); - } - - pub fn get() -> usize { - PROTOCOL_VERSION.load(Ordering::Relaxed) - } - - pub fn is_node_1_3() -> bool { - ProtocolVersion::get() == 1 - } -} - -#[derive(Serialize, Deserialize, PartialEq, Debug)] -#[serde(rename_all = "camelCase")] -pub struct TxnAuthrAgrmtAcceptanceData { - pub mechanism: String, - pub taa_digest: String, - pub time: u64, -} - -fn get_req_id() -> u64 { - time::OffsetDateTime::now_utc().unix_timestamp() as u64 * (1e9 as u64) - + time::OffsetDateTime::now_utc().unix_timestamp_nanos() as u64 -} - -#[derive(Serialize, Deserialize, PartialEq, Debug)] -#[serde(rename_all = "camelCase")] -pub struct Request { - pub req_id: u64, - #[serde(skip_serializing_if = "Option::is_none")] - pub identifier: Option, - pub operation: T, - pub protocol_version: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub signature: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub signatures: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub taa_acceptance: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub endorser: Option, -} - -impl Request { - pub fn new( - req_id: u64, - identifier: ShortDidValue, - operation: T, - protocol_version: usize, - ) -> Request { - Request { - req_id, - identifier: Some(identifier), - operation, - protocol_version: Some(protocol_version), - signature: None, - signatures: None, - taa_acceptance: None, - endorser: None, - } - } - - pub fn build_request(identifier: Option<&DidValue>, operation: T) -> Result { - let req_id = get_req_id(); - - let identifier = match identifier { - Some(identifier_) => identifier_.clone().to_short(), - None => ShortDidValue(DEFAULT_LIBIDY_DID.to_string()), - }; - - serde_json::to_string(&Request::new( - req_id, - identifier, - operation, - ProtocolVersion::get(), - )) - .map_err(|err| format!("Cannot serialize Request: {:?}", err)) - } -} diff --git a/libvdrtools/src/domain/ledger/response.rs b/libvdrtools/src/domain/ledger/response.rs deleted file mode 100644 index 797d8b7340..0000000000 --- a/libvdrtools/src/domain/ledger/response.rs +++ /dev/null @@ -1,89 +0,0 @@ -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct Response { - pub req_id: u64, - pub reason: String, -} - -#[derive(Debug, Deserialize)] -#[serde(untagged)] -pub enum Reply { - ReplyV0(ReplyV0), - ReplyV1(ReplyV1), -} - -impl Reply { - pub fn result(self) -> T { - match self { - Reply::ReplyV0(reply) => reply.result, - Reply::ReplyV1(mut reply) => reply.data.result.remove(0).result, - } - } -} - -#[derive(Debug, Deserialize)] -pub struct ReplyV0 { - pub result: T, -} - -#[derive(Debug, Deserialize)] -pub struct ReplyV1 { - pub data: ReplyDataV1, -} - -#[derive(Debug, Deserialize)] -pub struct ReplyDataV1 { - pub result: Vec>, -} - -#[derive(Debug, Deserialize)] -pub struct GetReplyResultV0 { - pub data: Option, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct GetReplyResultV1 { - pub txn: GetReplyTxnV1, - pub txn_metadata: TxnMetadata, -} - -#[derive(Debug, Deserialize)] -pub struct GetReplyTxnV1 { - pub data: T, -} - -#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct TxnMetadata { - pub seq_no: u32, - pub creation_time: u64, -} - -#[derive(Deserialize, Debug)] -#[serde(tag = "op")] -pub enum Message { - #[serde(rename = "REQNACK")] - ReqNACK(Response), - #[serde(rename = "REPLY")] - Reply(Reply), - #[serde(rename = "REJECT")] - Reject(Response), -} - -pub trait ReplyType { - fn get_type<'a>() -> &'a str; -} - -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct ResponseMetadata { - #[serde(skip_serializing_if = "Option::is_none")] - pub seq_no: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub txn_time: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub last_txn_time: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub last_seq_no: Option, -} diff --git a/libvdrtools/src/domain/ledger/rev_reg.rs b/libvdrtools/src/domain/ledger/rev_reg.rs deleted file mode 100644 index 14ebd679f5..0000000000 --- a/libvdrtools/src/domain/ledger/rev_reg.rs +++ /dev/null @@ -1,162 +0,0 @@ -use super::constants::{GET_REVOC_REG, GET_REVOC_REG_DELTA, REVOC_REG_ENTRY}; - -use ursa::cl::{RevocationRegistry, RevocationRegistryDelta}; - -use super::{ - super::anoncreds::{ - revocation_registry::RevocationRegistryV1, - revocation_registry_definition::RevocationRegistryId, - revocation_registry_delta::RevocationRegistryDeltaV1, - }, - response::{GetReplyResultV1, ReplyType}, -}; - -use std::collections::HashSet; - -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct RevRegEntryOperation { - #[serde(rename = "type")] - pub _type: String, - pub revoc_reg_def_id: RevocationRegistryId, - pub revoc_def_type: String, - pub value: RevocationRegistryDelta, -} - -impl RevRegEntryOperation { - pub fn new( - rev_def_type: &str, - revoc_reg_def_id: &RevocationRegistryId, - value: RevocationRegistryDeltaV1, - ) -> RevRegEntryOperation { - RevRegEntryOperation { - _type: REVOC_REG_ENTRY.to_string(), - revoc_def_type: rev_def_type.to_string(), - revoc_reg_def_id: revoc_reg_def_id.clone(), - value: value.value, - } - } -} - -#[derive(Serialize, PartialEq, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetRevRegOperation { - #[serde(rename = "type")] - pub _type: String, - pub revoc_reg_def_id: RevocationRegistryId, - pub timestamp: i64, -} - -impl GetRevRegOperation { - pub fn new(revoc_reg_def_id: &RevocationRegistryId, timestamp: i64) -> GetRevRegOperation { - GetRevRegOperation { - _type: GET_REVOC_REG.to_string(), - revoc_reg_def_id: revoc_reg_def_id.clone(), - timestamp, - } - } -} - -#[derive(Serialize, PartialEq, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetRevRegDeltaOperation { - #[serde(rename = "type")] - pub _type: String, - pub revoc_reg_def_id: RevocationRegistryId, - #[serde(skip_serializing_if = "Option::is_none")] - pub from: Option, - pub to: i64, -} - -impl GetRevRegDeltaOperation { - pub fn new( - revoc_reg_def_id: &RevocationRegistryId, - from: Option, - to: i64, - ) -> GetRevRegDeltaOperation { - GetRevRegDeltaOperation { - _type: GET_REVOC_REG_DELTA.to_string(), - revoc_reg_def_id: revoc_reg_def_id.clone(), - from, - to, - } - } -} - -#[derive(Debug, Deserialize)] -#[serde(untagged)] -pub enum GetRevocRegReplyResult { - GetRevocRegReplyResultV0(GetRevocRegResultV0), - GetRevocRegReplyResultV1(GetReplyResultV1), -} - -impl ReplyType for GetRevocRegReplyResult { - fn get_type<'a>() -> &'a str { - GET_REVOC_REG - } -} - -#[derive(Deserialize, Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetRevocRegResultV0 { - pub seq_no: i32, - pub revoc_reg_def_id: RevocationRegistryId, - pub data: RevocationRegistryV1, - pub txn_time: u64, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct GetRevocRegDataV1 { - pub revoc_reg_def_id: RevocationRegistryId, - pub value: RevocationRegistryV1, -} - -#[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct RevocationRegistryDeltaData { - pub value: RevocationRegistryDeltaValue, -} - -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct RevocationRegistryDeltaValue { - pub accum_from: Option, - pub accum_to: AccumulatorState, - pub issued: HashSet, - pub revoked: HashSet, -} - -#[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct AccumulatorState { - pub value: RevocationRegistry, - pub txn_time: u64, -} - -#[derive(Debug, Deserialize)] -#[serde(untagged)] -pub enum GetRevocRegDeltaReplyResult { - GetRevocRegDeltaReplyResultV0(GetRevocRegDeltaResultV0), - GetRevocRegDeltaReplyResultV1(GetReplyResultV1), -} - -impl ReplyType for GetRevocRegDeltaReplyResult { - fn get_type<'a>() -> &'a str { - GET_REVOC_REG_DELTA - } -} - -#[derive(Deserialize, Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetRevocRegDeltaResultV0 { - pub seq_no: i32, - pub revoc_reg_def_id: RevocationRegistryId, - pub data: RevocationRegistryDeltaData, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct GetRevocRegDeltaDataV1 { - pub revoc_reg_def_id: RevocationRegistryId, - pub value: RevocationRegistryDeltaData, -} diff --git a/libvdrtools/src/domain/ledger/rev_reg_def.rs b/libvdrtools/src/domain/ledger/rev_reg_def.rs deleted file mode 100644 index 56ac00f159..0000000000 --- a/libvdrtools/src/domain/ledger/rev_reg_def.rs +++ /dev/null @@ -1,73 +0,0 @@ -use super::{ - super::anoncreds::{ - credential_definition::CredentialDefinitionId, - revocation_registry_definition::{ - RevocationRegistryDefinitionV1, RevocationRegistryDefinitionValue, RevocationRegistryId, - }, - }, - constants::{GET_REVOC_REG_DEF, REVOC_REG_DEF}, - response::{GetReplyResultV1, ReplyType}, -}; - -#[derive(Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct RevRegDefOperation { - #[serde(rename = "type")] - pub _type: String, - pub id: RevocationRegistryId, - #[serde(rename = "revocDefType")] - pub type_: String, - pub tag: String, - pub cred_def_id: CredentialDefinitionId, - pub value: RevocationRegistryDefinitionValue, -} - -impl RevRegDefOperation { - pub fn new(rev_reg_def: RevocationRegistryDefinitionV1) -> RevRegDefOperation { - RevRegDefOperation { - _type: REVOC_REG_DEF.to_string(), - id: rev_reg_def.id, - type_: rev_reg_def.revoc_def_type.to_str().to_string(), - tag: rev_reg_def.tag, - cred_def_id: rev_reg_def.cred_def_id, - value: rev_reg_def.value, - } - } -} - -#[derive(Serialize, PartialEq, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetRevRegDefOperation { - #[serde(rename = "type")] - pub _type: String, - pub id: RevocationRegistryId, -} - -impl GetRevRegDefOperation { - pub fn new(id: &RevocationRegistryId) -> GetRevRegDefOperation { - GetRevRegDefOperation { - _type: GET_REVOC_REG_DEF.to_string(), - id: id.clone(), - } - } -} - -#[derive(Debug, Deserialize)] -#[serde(untagged)] -pub enum GetRevocRegDefReplyResult { - GetRevocRegDefReplyResultV0(GetRevocRegDefResultV0), - GetRevocRegDefReplyResultV1(GetReplyResultV1), -} - -impl ReplyType for GetRevocRegDefReplyResult { - fn get_type<'a>() -> &'a str { - GET_REVOC_REG_DEF - } -} - -#[derive(Deserialize, Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetRevocRegDefResultV0 { - pub seq_no: i32, - pub data: RevocationRegistryDefinitionV1, -} diff --git a/libvdrtools/src/domain/ledger/schema.rs b/libvdrtools/src/domain/ledger/schema.rs deleted file mode 100644 index f2a286f583..0000000000 --- a/libvdrtools/src/domain/ledger/schema.rs +++ /dev/null @@ -1,107 +0,0 @@ -use super::{ - super::{anoncreds::schema::SchemaId, crypto::did::ShortDidValue}, - constants::{GET_SCHEMA, SCHEMA}, - response::{GetReplyResultV1, ReplyType}, -}; - -use std::collections::HashSet; - -#[derive(Serialize, PartialEq, Debug)] -pub struct SchemaOperation { - #[serde(rename = "type")] - pub _type: String, - pub data: SchemaOperationData, -} - -impl SchemaOperation { - pub fn new(data: SchemaOperationData) -> SchemaOperation { - SchemaOperation { - data, - _type: SCHEMA.to_string(), - } - } -} - -#[derive(Serialize, PartialEq, Debug, Deserialize)] -pub struct SchemaOperationData { - pub name: String, - pub version: String, - pub attr_names: HashSet, -} - -impl SchemaOperationData { - pub fn new(name: String, version: String, attr_names: HashSet) -> SchemaOperationData { - SchemaOperationData { - name, - version, - attr_names, - } - } -} - -#[derive(Serialize, PartialEq, Debug)] -pub struct GetSchemaOperation { - #[serde(rename = "type")] - pub _type: String, - pub dest: ShortDidValue, - pub data: GetSchemaOperationData, -} - -impl GetSchemaOperation { - pub fn new(dest: ShortDidValue, data: GetSchemaOperationData) -> GetSchemaOperation { - GetSchemaOperation { - _type: GET_SCHEMA.to_string(), - dest, - data, - } - } -} - -#[derive(Serialize, PartialEq, Debug, Deserialize)] -pub struct GetSchemaOperationData { - pub name: String, - pub version: String, -} - -impl GetSchemaOperationData { - pub fn new(name: String, version: String) -> GetSchemaOperationData { - GetSchemaOperationData { name, version } - } -} - -#[derive(Debug, Deserialize)] -#[serde(untagged)] -pub enum GetSchemaReplyResult { - GetSchemaReplyResultV0(GetSchemaResultV0), - GetSchemaReplyResultV1(GetReplyResultV1), -} - -impl ReplyType for GetSchemaReplyResult { - fn get_type<'a>() -> &'a str { - GET_SCHEMA - } -} - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetSchemaResultV0 { - pub seq_no: u32, - pub data: SchemaOperationData, - pub dest: ShortDidValue, -} - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetSchemaResultDataV1 { - pub ver: String, - pub id: SchemaId, - pub schema_name: String, - pub schema_version: String, - pub value: GetSchemaResultDataValueV1, -} - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetSchemaResultDataValueV1 { - pub attr_names: HashSet, -} diff --git a/libvdrtools/src/domain/ledger/txn.rs b/libvdrtools/src/domain/ledger/txn.rs deleted file mode 100644 index bdc69bdf5a..0000000000 --- a/libvdrtools/src/domain/ledger/txn.rs +++ /dev/null @@ -1,37 +0,0 @@ -use super::constants::GET_TXN; - -#[derive(Serialize, PartialEq, Debug)] -pub struct GetTxnOperation { - #[serde(rename = "type")] - pub _type: String, - pub data: i32, - #[serde(rename = "ledgerId")] - pub ledger_id: i32, -} - -impl GetTxnOperation { - pub fn new(data: i32, ledger_id: i32) -> GetTxnOperation { - GetTxnOperation { - _type: GET_TXN.to_string(), - data, - ledger_id, - } - } -} - -#[derive(Deserialize, Debug)] -pub enum LedgerType { - POOL = 0, - DOMAIN = 1, - CONFIG = 2, -} - -impl LedgerType { - pub fn to_id(&self) -> i32 { - match *self { - LedgerType::POOL => LedgerType::POOL as i32, - LedgerType::DOMAIN => LedgerType::DOMAIN as i32, - LedgerType::CONFIG => LedgerType::CONFIG as i32, - } - } -} diff --git a/libvdrtools/src/domain/ledger/validator_info.rs b/libvdrtools/src/domain/ledger/validator_info.rs deleted file mode 100644 index d5e302af8a..0000000000 --- a/libvdrtools/src/domain/ledger/validator_info.rs +++ /dev/null @@ -1,15 +0,0 @@ -use super::constants::GET_VALIDATOR_INFO; - -#[derive(Serialize, PartialEq, Debug)] -pub struct GetValidatorInfoOperation { - #[serde(rename = "type")] - pub _type: String, -} - -impl GetValidatorInfoOperation { - pub fn new() -> GetValidatorInfoOperation { - GetValidatorInfoOperation { - _type: GET_VALIDATOR_INFO.to_string(), - } - } -} diff --git a/libvdrtools/src/domain/mod.rs b/libvdrtools/src/domain/mod.rs index 9eed3ac863..1386661e11 100644 --- a/libvdrtools/src/domain/mod.rs +++ b/libvdrtools/src/domain/mod.rs @@ -1,15 +1,9 @@ pub mod anoncreds; pub mod cache; pub mod crypto; -pub mod ledger; -pub mod pairwise; - -use indy_api_types::validation::Validatable; #[derive(Debug, Serialize, Deserialize)] pub struct IndyConfig { pub crypto_thread_pool_size: Option, pub collect_backtrace: Option, } - -impl Validatable for IndyConfig {} diff --git a/libvdrtools/src/domain/pairwise/mod.rs b/libvdrtools/src/domain/pairwise/mod.rs deleted file mode 100644 index 4da97bebe7..0000000000 --- a/libvdrtools/src/domain/pairwise/mod.rs +++ /dev/null @@ -1,25 +0,0 @@ -use super::crypto::did::DidValue; - -#[derive(Serialize, Deserialize)] -pub struct Pairwise { - pub my_did: DidValue, - pub their_did: DidValue, - #[serde(skip_serializing_if = "Option::is_none")] - pub metadata: Option, -} - -#[derive(Serialize, Deserialize)] -pub struct PairwiseInfo { - pub my_did: DidValue, - #[serde(skip_serializing_if = "Option::is_none")] - pub metadata: Option, -} - -impl From for PairwiseInfo { - fn from(pairwise: Pairwise) -> Self { - PairwiseInfo { - my_did: pairwise.my_did, - metadata: pairwise.metadata, - } - } -} diff --git a/libvdrtools/src/lib.rs b/libvdrtools/src/lib.rs index 2a181bbcc4..3221fee2a3 100644 --- a/libvdrtools/src/lib.rs +++ b/libvdrtools/src/lib.rs @@ -1,18 +1,9 @@ -#![cfg_attr(feature = "fatal_warnings", deny(warnings))] -#![allow(clippy::all)] - #[macro_use] extern crate log; -extern crate num_traits; - #[macro_use] extern crate serde_derive; -#[macro_use] -extern crate serde_json; - -#[allow(unused_imports)] #[macro_use] extern crate indy_utils; @@ -28,28 +19,13 @@ mod services; use std::sync::Arc; -use lazy_static::lazy_static; - -use crate::{ - controllers::{ - BlobStorageController, ConfigController, CryptoController, DidController, IssuerController, - NonSecretsController, PairwiseController, ProverController, VerifierController, - WalletController, - }, - services::{ - BlobStorageService, CryptoService, IssuerService, ProverService, VerifierService, - WalletService, - }, -}; - -pub use controllers::CredentialDefinitionId; - pub use domain::{ anoncreds::{ credential::{AttributeValues, Credential, CredentialValues}, credential_definition::{ CredentialDefinition, CredentialDefinitionCorrectnessProof, CredentialDefinitionData, - CredentialDefinitionPrivateKey, CredentialDefinitionV1, SignatureType, + CredentialDefinitionId, CredentialDefinitionPrivateKey, CredentialDefinitionV1, + SignatureType, }, credential_offer::CredentialOffer, credential_request::{CredentialRequest, CredentialRequestMetadata}, @@ -62,7 +38,6 @@ pub use domain::{ RevocationRegistryId, RevocationRegistryInfo, }, revocation_registry_delta::{RevocationRegistryDelta, RevocationRegistryDeltaV1}, - revocation_state::RevocationStates, schema::{AttributeNames, Schema, SchemaId, SchemaV1}, }, crypto::{ @@ -71,14 +46,17 @@ pub use domain::{ pack::JWE, }, }; - pub use indy_api_types::{ CommandHandle, IndyError, SearchHandle, WalletHandle, INVALID_COMMAND_HANDLE, INVALID_SEARCH_HANDLE, INVALID_WALLET_HANDLE, }; - pub use indy_wallet::WalletRecord; -pub use services::AnoncredsHelpers; +use lazy_static::lazy_static; + +use crate::{ + controllers::{CryptoController, DidController, NonSecretsController, WalletController}, + services::{CryptoService, WalletService}, +}; // Global (lazy inited) instance of Locator lazy_static! { @@ -86,15 +64,9 @@ lazy_static! { } pub struct Locator { - pub issuer_controller: IssuerController, - pub prover_controller: ProverController, - pub verifier_controller: VerifierController, pub crypto_controller: CryptoController, - pub config_controller: ConfigController, pub did_controller: DidController, pub wallet_controller: WalletController, - pub pairwise_controller: PairwiseController, - pub blob_storage_controller: BlobStorageController, pub non_secret_controller: NonSecretsController, } @@ -106,53 +78,22 @@ impl Locator { fn new() -> Locator { info!("new >"); - let issuer_service = Arc::new(IssuerService::new()); - let prover_service = Arc::new(ProverService::new()); - let verifier_service = Arc::new(VerifierService::new()); - let blob_storage_service = Arc::new(BlobStorageService::new()); let crypto_service = Arc::new(CryptoService::new()); let wallet_service = Arc::new(WalletService::new()); - let issuer_controller = IssuerController::new( - issuer_service, - blob_storage_service.clone(), - wallet_service.clone(), - crypto_service.clone(), - ); - - let prover_controller = ProverController::new( - prover_service, - wallet_service.clone(), - crypto_service.clone(), - blob_storage_service.clone(), - ); - - let verifier_controller = VerifierController::new(verifier_service); - let crypto_controller = CryptoController::new(wallet_service.clone(), crypto_service.clone()); - let config_controller = ConfigController::new(); - let did_controller = DidController::new(wallet_service.clone(), crypto_service.clone()); let wallet_controller = WalletController::new(wallet_service.clone(), crypto_service.clone()); - - let pairwise_controller = PairwiseController::new(wallet_service.clone()); - let blob_storage_controller = BlobStorageController::new(blob_storage_service.clone()); let non_secret_controller = NonSecretsController::new(wallet_service.clone()); let res = Locator { - issuer_controller, - prover_controller, - verifier_controller, crypto_controller, - config_controller, did_controller, wallet_controller, - pairwise_controller, - blob_storage_controller, non_secret_controller, }; @@ -166,30 +107,3 @@ impl Drop for Locator { info!(target: "Locator", "drop <>"); } } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn locator_new_works() { - let _locator = Locator::new(); - assert!(true); - } - - #[test] - fn locator_drop_works() { - { - let _locator = Locator::new(); - } - - assert!(true); - } - - #[test] - fn locator_get_instance_works() { - let locator = Locator::instance(); - let locator2 = Locator::instance(); - assert!(std::ptr::eq(locator, locator2)); - } -} diff --git a/libvdrtools/src/services/anoncreds/helpers.rs b/libvdrtools/src/services/anoncreds/helpers.rs deleted file mode 100644 index 479826d4d2..0000000000 --- a/libvdrtools/src/services/anoncreds/helpers.rs +++ /dev/null @@ -1,317 +0,0 @@ -use std::collections::{HashMap, HashSet}; - -use indy_api_types::errors::prelude::*; - -use ursa::cl::{ - issuer::Issuer as UrsaIssuer, verifier::Verifier as UrsaVerifier, CredentialSchema, - CredentialValues, MasterSecret, NonCredentialSchema, SubProofRequest, -}; - -use crate::domain::{ - anoncreds::{ - credential::AttributeValues, - credential_definition::{CredentialDefinition, CredentialDefinitionId}, - credential_offer::CredentialOffer, - credential_request::CredentialRequest, - proof_request::{AttributeInfo, NonRevocedInterval, PredicateInfo, ProofRequest}, - revocation_registry_definition::{RevocationRegistryDefinition, RevocationRegistryId}, - schema::{Schema, SchemaId}, - }, - crypto::did::DidValue, -}; - -macro_rules! _id_to_unqualified { - ($entity:expr, $type_:ident) => {{ - if $entity.contains($type_::PREFIX) { - return Ok($type_($entity.to_string()).to_unqualified().0); - } - }}; -} - -macro_rules! _object_to_unqualified { - ($entity:expr, $type_:ident) => {{ - if let Ok(object) = ::serde_json::from_str::<$type_>(&$entity) { - return Ok(json!(object.to_unqualified()).to_string()); - } - }}; -} - -pub struct AnoncredsHelpers {} - -impl AnoncredsHelpers { - pub(crate) fn attr_common_view(attr: &str) -> String { - attr.replace(" ", "").to_lowercase() - } - - pub(crate) fn build_credential_schema(attrs: &HashSet) -> IndyResult { - trace!("build_credential_schema > attrs {:?}", attrs); - - let credential_schema = { - let mut builder = UrsaIssuer::new_credential_schema_builder()?; - - for attr in attrs { - builder.add_attr(&Self::attr_common_view(attr))?; - } - - builder.finalize()? - }; - - let res = Ok(credential_schema); - trace!("build_credential_schema < {:?}", res); - res - } - - pub(crate) fn build_non_credential_schema() -> IndyResult { - trace!("build_non_credential_schema >"); - - let schema = { - let mut builder = UrsaIssuer::new_non_credential_schema_builder()?; - builder.add_attr("master_secret")?; - builder.finalize()? - }; - - let res = Ok(schema); - trace!("build_non_credential_schema < {:?}", res); - res - } - - pub(crate) fn build_credential_values( - credential_values: &HashMap, - master_secret: Option<&MasterSecret>, - ) -> IndyResult { - trace!( - "build_credential_values > credential_values {:?} master_secret {:?}", - credential_values, - secret!(master_secret), - ); - - let credential_values = { - let mut builder = UrsaIssuer::new_credential_values_builder()?; - - for (attr, values) in credential_values { - builder.add_dec_known(&Self::attr_common_view(attr), &values.encoded)?; - } - - if let Some(master_secret) = master_secret { - builder.add_value_hidden("master_secret", &master_secret.value()?)?; - } - - builder.finalize()? - }; - - let res = Ok(credential_values); - trace!("build_credential_values < {:?}", res); - res - } - - pub(crate) fn build_sub_proof_request( - attrs_for_credential: &[AttributeInfo], - predicates_for_credential: &[PredicateInfo], - ) -> IndyResult { - trace!( - "build_sub_proof_request > attrs_for_credential {:?} \ - predicates_for_credential {:?}", - attrs_for_credential, - predicates_for_credential - ); - - let sub_proof_request = { - let mut builder = UrsaVerifier::new_sub_proof_request_builder()?; - - for ref attr in attrs_for_credential { - if let Some(ref name) = attr.name { - builder.add_revealed_attr(&Self::attr_common_view(name))? - } else if let Some(ref names) = attr.names { - for ref name in names { - builder.add_revealed_attr(&Self::attr_common_view(name))? - } - } else { - Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - r#"Attr for credential restriction should contain "name" or "names" param."#, - ))? - }; - } - - for ref predicate in predicates_for_credential { - builder.add_predicate( - &Self::attr_common_view(&predicate.name), - &predicate.p_type.to_string(), - predicate.p_value, - )?; - } - - builder.finalize()? - }; - - let res = Ok(sub_proof_request); - trace!("build_sub_proof_request < {:?}", res); - res - } - - pub(crate) fn parse_cred_rev_id(cred_rev_id: &str) -> IndyResult { - trace!("parse_cred_rev_id > cred_rev_id {:?}", cred_rev_id); - - let cred_rev_id = cred_rev_id.parse::().to_indy( - IndyErrorKind::InvalidStructure, - "Cannot parse CredentialRevocationId", - )?; - - let res = Ok(cred_rev_id); - trace!("parse_cred_rev_id < {:?}", res); - res - } - - pub(crate) fn get_non_revoc_interval( - global_interval: &Option, - local_interval: &Option, - ) -> Option { - trace!( - "get_non_revoc_interval > global_interval {:?} local_interval {:?}", - global_interval, - local_interval - ); - - let res = local_interval - .clone() - .or_else(|| global_interval.clone().or(None)) - .filter(|x| x.to.is_some() || x.from.is_some()); - - trace!("get_non_revoc_interval < {:?}", res); - res - } - - pub fn to_unqualified(entity: &str) -> IndyResult { - trace!("to_unqualified > entity {:?}", entity); - - _object_to_unqualified!(entity, CredentialDefinition); - _object_to_unqualified!(entity, Schema); - _object_to_unqualified!(entity, RevocationRegistryDefinition); - _object_to_unqualified!(entity, CredentialOffer); - _object_to_unqualified!(entity, CredentialRequest); - _object_to_unqualified!(entity, ProofRequest); - - _id_to_unqualified!(entity, RevocationRegistryId); - _id_to_unqualified!(entity, CredentialDefinitionId); - _id_to_unqualified!(entity, SchemaId); - _id_to_unqualified!(entity, DidValue); - - let res = Ok(entity.to_string()); - trace!("to_unqualified < {:?}", res); - res - } -} - -#[cfg(test)] -mod tests { - use super::*; - - fn _interval() -> NonRevocedInterval { - NonRevocedInterval { - from: None, - to: Some(123), - } - } - - #[test] - fn get_non_revoc_interval_for_global() { - let res = AnoncredsHelpers::get_non_revoc_interval(&Some(_interval()), &None).unwrap(); - assert_eq!(_interval(), res); - } - - #[test] - fn get_non_revoc_interval_for_local() { - let res = AnoncredsHelpers::get_non_revoc_interval(&None, &Some(_interval())).unwrap(); - assert_eq!(_interval(), res); - } - - #[test] - fn get_non_revoc_interval_for_none() { - let res = AnoncredsHelpers::get_non_revoc_interval(&None, &None); - assert_eq!(None, res); - } - - #[test] - fn get_non_revoc_interval_for_empty_interval() { - let res = AnoncredsHelpers::get_non_revoc_interval( - &Some(NonRevocedInterval { - from: None, - to: None, - }), - &None, - ); - assert_eq!(None, res); - } - - mod to_unqualified { - use super::*; - - const DID_QUALIFIED: &str = "did:indy:NcYxiDXkpYi6ov5FcYDi1e"; - const DID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e"; - const SCHEMA_ID_QUALIFIED: &str = - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/gvt/1.0"; - const SCHEMA_ID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0"; - const CRED_DEF_ID_QUALIFIED: &str = - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/1/tag"; - const CRED_DEF_ID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e:3:CL:1:tag"; - const REV_REG_ID_QUALIFIED: &str = "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/REV_REG_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/gvt/1.0/tag/TAG_1"; - const REV_REG_ID_UNQUALIFIED: &str = "NcYxiDXkpYi6ov5FcYDi1e:4:NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag:CL_ACCUM:TAG_1"; - const SCHEMA_ID_WITH_SPACES_QUALIFIED: &str = - "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/Passport Schema/1.0"; - const SCHEMA_ID_WITH_SPACES_UNQUALIFIED: &str = - "NcYxiDXkpYi6ov5FcYDi1e:2:Passport Schema:1.0"; - - #[test] - fn test_to_unqualified() { - // DID - assert_eq!( - DID_UNQUALIFIED, - AnoncredsHelpers::to_unqualified(DID_QUALIFIED).unwrap() - ); - assert_eq!( - DID_UNQUALIFIED, - AnoncredsHelpers::to_unqualified(DID_UNQUALIFIED).unwrap() - ); - - // SchemaId - assert_eq!( - SCHEMA_ID_UNQUALIFIED, - AnoncredsHelpers::to_unqualified(SCHEMA_ID_QUALIFIED).unwrap() - ); - assert_eq!( - SCHEMA_ID_UNQUALIFIED, - AnoncredsHelpers::to_unqualified(SCHEMA_ID_UNQUALIFIED).unwrap() - ); - - // SchemaId - assert_eq!( - SCHEMA_ID_WITH_SPACES_UNQUALIFIED, - AnoncredsHelpers::to_unqualified(SCHEMA_ID_WITH_SPACES_QUALIFIED).unwrap() - ); - assert_eq!( - SCHEMA_ID_WITH_SPACES_UNQUALIFIED, - AnoncredsHelpers::to_unqualified(SCHEMA_ID_WITH_SPACES_UNQUALIFIED).unwrap() - ); - - // Credential Definition Id - assert_eq!( - CRED_DEF_ID_UNQUALIFIED, - AnoncredsHelpers::to_unqualified(CRED_DEF_ID_QUALIFIED).unwrap() - ); - assert_eq!( - CRED_DEF_ID_UNQUALIFIED, - AnoncredsHelpers::to_unqualified(CRED_DEF_ID_UNQUALIFIED).unwrap() - ); - - // Revocation Registry Id - assert_eq!( - REV_REG_ID_UNQUALIFIED, - AnoncredsHelpers::to_unqualified(REV_REG_ID_QUALIFIED).unwrap() - ); - assert_eq!( - REV_REG_ID_UNQUALIFIED, - AnoncredsHelpers::to_unqualified(REV_REG_ID_UNQUALIFIED).unwrap() - ); - } - } -} diff --git a/libvdrtools/src/services/anoncreds/issuer.rs b/libvdrtools/src/services/anoncreds/issuer.rs deleted file mode 100644 index 16348f7e48..0000000000 --- a/libvdrtools/src/services/anoncreds/issuer.rs +++ /dev/null @@ -1,284 +0,0 @@ -use indy_api_types::errors::prelude::*; - -use ursa::cl::{ - issuer::Issuer as UrsaIssuer, CredentialKeyCorrectnessProof, CredentialPrivateKey, - CredentialPublicKey, CredentialSignature, Nonce, RevocationKeyPrivate, RevocationRegistry, - RevocationRegistryDelta, RevocationTailsAccessor, RevocationTailsGenerator, - SignatureCorrectnessProof, -}; - -use crate::{ - domain::{ - anoncreds::{ - credential::CredentialValues, - credential_definition::{ - CredentialDefinitionData, CredentialDefinitionV1 as CredentialDefinition, - }, - credential_request::CredentialRequest, - revocation_registry_definition::{ - RevocationRegistryDefinitionV1, RevocationRegistryDefinitionValuePublicKeys, - }, - schema::AttributeNames, - }, - crypto::did::DidValue, - }, - services::AnoncredsHelpers, -}; - -pub struct IssuerService {} - -impl IssuerService { - pub(crate) fn new() -> IssuerService { - IssuerService {} - } - - pub(crate) fn new_credential_definition( - attr_names: &AttributeNames, - support_revocation: bool, - ) -> IndyResult<( - CredentialDefinitionData, - CredentialPrivateKey, - CredentialKeyCorrectnessProof, - )> { - trace!( - "new_credential_definition > attr_names {:?} support_revocation {:?}", - attr_names, - support_revocation - ); - - let credential_schema = AnoncredsHelpers::build_credential_schema(&attr_names.0)?; - let non_credential_schema = AnoncredsHelpers::build_non_credential_schema()?; - - let (credential_public_key, credential_private_key, credential_key_correctness_proof) = - UrsaIssuer::new_credential_def( - &credential_schema, - &non_credential_schema, - support_revocation, - )?; - - let credential_definition_value = CredentialDefinitionData { - primary: credential_public_key.get_primary_key()?.try_clone()?, - revocation: credential_public_key.get_revocation_key()?.clone(), - }; - - let res = Ok(( - credential_definition_value, - credential_private_key, - credential_key_correctness_proof, - )); - - trace!("new_credential_definition < {:?}", secret!(&res)); - res - } - - pub(crate) fn new_revocation_registry( - &self, - cred_def: &CredentialDefinition, - max_cred_num: u32, - issuance_by_default: bool, - issuer_did: &DidValue, - ) -> IndyResult<( - RevocationRegistryDefinitionValuePublicKeys, - RevocationKeyPrivate, - RevocationRegistry, - RevocationTailsGenerator, - )> { - trace!( - "new_revocation_registry > pub_key {:?} \ - max_cred_num {:?} issuance_by_default {:?} issuer_did {:?}", - cred_def, - max_cred_num, - issuance_by_default, - issuer_did - ); - - let credential_pub_key = CredentialPublicKey::build_from_parts( - &cred_def.value.primary, - cred_def.value.revocation.as_ref(), - )?; - - let (rev_key_pub, rev_key_priv, rev_reg_entry, rev_tails_generator) = - UrsaIssuer::new_revocation_registry_def( - &credential_pub_key, - max_cred_num, - issuance_by_default, - )?; - - let rev_keys_pub = RevocationRegistryDefinitionValuePublicKeys { - accum_key: rev_key_pub, - }; - - let res = Ok(( - rev_keys_pub, - rev_key_priv, - rev_reg_entry, - rev_tails_generator, - )); - - trace!("new_revocation_registry < {:?}", secret!(&res)); - res - } - - pub(crate) fn new_credential( - &self, - cred_def: &CredentialDefinition, - cred_priv_key: &CredentialPrivateKey, - cred_issuance_blinding_nonce: &Nonce, - cred_request: &CredentialRequest, - cred_values: &CredentialValues, - rev_idx: Option, - rev_reg_def: Option<&RevocationRegistryDefinitionV1>, - rev_reg: Option<&mut RevocationRegistry>, - rev_key_priv: Option<&RevocationKeyPrivate>, - rev_tails_accessor: Option<&RTA>, - ) -> IndyResult<( - CredentialSignature, - SignatureCorrectnessProof, - Option, - )> - where - RTA: RevocationTailsAccessor, - { - trace!( - "new_credential > cred_def {:?} cred_priv_key {:?} \ - cred_issuance_blinding_nonce {:?} cred_request {:?} \ - cred_values {:?} rev_idx {:?} rev_reg_def {:?} \ - rev_reg {:?} rev_key_priv {:?}", - cred_def, - secret!(&cred_priv_key), - secret!(&cred_issuance_blinding_nonce), - secret!(&cred_request), - secret!(&cred_values), - secret!(&rev_idx), - rev_reg_def, - rev_reg, - secret!(&rev_key_priv) - ); - - let credential_values = AnoncredsHelpers::build_credential_values(&cred_values.0, None)?; - - let credential_pub_key = CredentialPublicKey::build_from_parts( - &cred_def.value.primary, - cred_def.value.revocation.as_ref(), - )?; - - let (credential_signature, signature_correctness_proof, rev_reg_delta) = match rev_idx { - Some(rev_idx) => { - let rev_reg = rev_reg.ok_or_else(|| { - err_msg(IndyErrorKind::InvalidState, "RevocationRegistry not found") - })?; - - let rev_key_priv = rev_key_priv.ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - "RevocationKeyPrivate not found", - ) - })?; - - let rev_reg_def = rev_reg_def.ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - "RevocationRegistryDefinitionValue not found", - ) - })?; - - let rev_tails_accessor = rev_tails_accessor.ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - "RevocationTailsAccessor not found", - ) - })?; - - UrsaIssuer::sign_credential_with_revoc( - &cred_request.prover_did.0, - &cred_request.blinded_ms, - &cred_request.blinded_ms_correctness_proof, - cred_issuance_blinding_nonce, - &cred_request.nonce, - &credential_values, - &credential_pub_key, - &cred_priv_key, - rev_idx, - rev_reg_def.value.max_cred_num, - rev_reg_def.value.issuance_type.to_bool(), - rev_reg, - rev_key_priv, - rev_tails_accessor, - )? - } - None => { - let (signature, correctness_proof) = UrsaIssuer::sign_credential( - &cred_request.prover_did.0, - &cred_request.blinded_ms, - &cred_request.blinded_ms_correctness_proof, - cred_issuance_blinding_nonce, - &cred_request.nonce, - &credential_values, - &credential_pub_key, - &cred_priv_key, - )?; - (signature, correctness_proof, None) - } - }; - - let res = Ok(( - credential_signature, - signature_correctness_proof, - rev_reg_delta, - )); - - trace!("new_credential < {:?}", secret!(&res)); - res - } - - pub(crate) fn revoke( - &self, - rev_reg: &mut RevocationRegistry, - max_cred_num: u32, - rev_idx: u32, - rev_tails_accessor: &RTA, - ) -> IndyResult - where - RTA: RevocationTailsAccessor, - { - trace!( - "revoke > rev_reg {:?} max_cred_num {:?} rev_idx {:?}", - rev_reg, - max_cred_num, - secret!(&rev_idx) - ); - - let rev_reg_delta = - UrsaIssuer::revoke_credential(rev_reg, max_cred_num, rev_idx, rev_tails_accessor)?; - - let res = Ok(rev_reg_delta); - trace!("recovery < {:?}", res); - res - } - - #[allow(dead_code)] - pub(crate) fn recovery( - &self, - rev_reg: &mut RevocationRegistry, - max_cred_num: u32, - rev_idx: u32, - rev_tails_accessor: &RTA, - ) -> IndyResult - where - RTA: RevocationTailsAccessor, - { - trace!( - "revoke > rev_reg {:?} max_cred_num {:?} rev_idx {:?}", - rev_reg, - max_cred_num, - secret!(&rev_idx) - ); - - let rev_reg_delta = - UrsaIssuer::recovery_credential(rev_reg, max_cred_num, rev_idx, rev_tails_accessor)?; - - let res = Ok(rev_reg_delta); - trace!("recovery < {:?}", res); - res - } -} diff --git a/libvdrtools/src/services/anoncreds/mod.rs b/libvdrtools/src/services/anoncreds/mod.rs deleted file mode 100644 index 0e945bee9c..0000000000 --- a/libvdrtools/src/services/anoncreds/mod.rs +++ /dev/null @@ -1,9 +0,0 @@ -mod helpers; -mod issuer; -mod prover; -mod verifier; - -pub use helpers::AnoncredsHelpers; -pub use issuer::IssuerService; -pub use prover::ProverService; -pub use verifier::VerifierService; diff --git a/libvdrtools/src/services/anoncreds/prover.rs b/libvdrtools/src/services/anoncreds/prover.rs deleted file mode 100644 index 0f1a3390f5..0000000000 --- a/libvdrtools/src/services/anoncreds/prover.rs +++ /dev/null @@ -1,1725 +0,0 @@ -use std::collections::{hash_map::Entry, HashMap}; - -use indy_api_types::errors::prelude::*; - -use ursa::cl::{ - issuer::Issuer as UrsaIssuer, prover::Prover as UrsaProver, verifier::Verifier as UrsaVerifier, - BlindedCredentialSecrets, BlindedCredentialSecretsCorrectnessProof, CredentialPublicKey, - CredentialSecretsBlindingFactors, MasterSecret, SubProofRequest, -}; - -use crate::{ - domain::anoncreds::{ - credential::{AttributeValues, Credential}, - credential_attr_tag_policy::CredentialAttrTagPolicy, - credential_definition::{ - CredentialDefinitionId, CredentialDefinitionV1 as CredentialDefinition, - }, - credential_offer::CredentialOffer, - credential_request::CredentialRequestMetadata, - proof::{ - AttributeValue, Identifier, Proof, RequestedProof, RevealedAttributeGroupInfo, - RevealedAttributeInfo, SubProofReferent, - }, - proof_request::{ - PredicateInfo, PredicateTypes, ProofRequest, ProofRequestExtraQuery, - ProofRequestPayload, ProofRequestsVersion, RequestedAttributeInfo, - RequestedPredicateInfo, - }, - requested_credential::{ProvingCredentialKey, RequestedCredentials}, - revocation_registry_definition::RevocationRegistryDefinitionV1, - revocation_state::RevocationState, - schema::{SchemaId, SchemaV1}, - }, - services::{AnoncredsHelpers, VerifierService}, - utils::wql::Query, -}; - -const ATTRIBUTE_EXISTENCE_MARKER: &str = "1"; - -pub struct ProverService {} - -impl ProverService { - pub fn new() -> ProverService { - ProverService {} - } - - pub fn new_master_secret(&self) -> IndyResult { - trace!("new_master_secret > "); - - let master_secret = UrsaProver::new_master_secret()?; - - let res = Ok(master_secret); - trace!("new_master_secret < {:?}", secret!(&res)); - res - } - - pub fn new_credential_request( - &self, - cred_def: &CredentialDefinition, - master_secret: &MasterSecret, - credential_offer: &CredentialOffer, - ) -> IndyResult<( - BlindedCredentialSecrets, - CredentialSecretsBlindingFactors, - BlindedCredentialSecretsCorrectnessProof, - )> { - trace!( - "new_credential_request > cred_def {:?} master_secret {:?} credential_offer {:?}", - cred_def, - secret!(&master_secret), - credential_offer - ); - - let credential_pub_key = CredentialPublicKey::build_from_parts( - &cred_def.value.primary, - cred_def.value.revocation.as_ref(), - )?; - - let cred_values = { - let mut builder = UrsaIssuer::new_credential_values_builder()?; - builder.add_value_hidden("master_secret", &master_secret.value()?)?; - builder.finalize()? - }; - - let blinded_secrets = UrsaProver::blind_credential_secrets( - &credential_pub_key, - &credential_offer.key_correctness_proof, - &cred_values, - &credential_offer.nonce, - )?; - - let res = Ok(blinded_secrets); - trace!("new_credential_request < {:?}", res); - res - } - - pub fn process_credential( - &self, - credential: &mut Credential, - cred_request_metadata: &CredentialRequestMetadata, - master_secret: &MasterSecret, - cred_def: &CredentialDefinition, - rev_reg_def: Option<&RevocationRegistryDefinitionV1>, - ) -> IndyResult<()> { - trace!( - "process_credential > credential {:?} cred_request_metadata {:?} \ - master_secret {:?} cred_def {:?} rev_reg_def {:?}", - credential, - cred_request_metadata, - secret!(&master_secret), - cred_def, - rev_reg_def - ); - - let credential_pub_key = CredentialPublicKey::build_from_parts( - &cred_def.value.primary, - cred_def.value.revocation.as_ref(), - )?; - - let credential_values = - AnoncredsHelpers::build_credential_values(&credential.values.0, Some(master_secret))?; - - UrsaProver::process_credential_signature( - &mut credential.signature, - &credential_values, - &credential.signature_correctness_proof, - &cred_request_metadata.master_secret_blinding_data, - &credential_pub_key, - &cred_request_metadata.nonce, - rev_reg_def - .as_ref() - .map(|r_reg_def| &r_reg_def.value.public_keys.accum_key), - credential.rev_reg.as_ref(), - credential.witness.as_ref(), - )?; - - let res = Ok(()); - trace!("process_credential < {:?}", res); - res - } - - pub fn create_proof( - &self, - credentials: &HashMap, - proof_req: &ProofRequest, - requested_credentials: &RequestedCredentials, - master_secret: &MasterSecret, - schemas: &HashMap, - cred_defs: &HashMap, - rev_states: &HashMap>, - ) -> IndyResult { - trace!( - "create_proof > credentials {:?} proof_req {:?} \ - requested_credentials {:?} master_secret {:?} \ - schemas {:?} cred_defs {:?} rev_states {:?}", - credentials, - proof_req, - requested_credentials, - secret!(&master_secret), - schemas, - cred_defs, - rev_states - ); - - let proof_req_val = proof_req.value(); - - let mut proof_builder = { - let mut builder = UrsaProver::new_proof_builder()?; - builder.add_common_attribute("master_secret")?; - builder - }; - - let mut requested_proof = { - let mut rp = RequestedProof::default(); - rp.self_attested_attrs = requested_credentials.self_attested_attributes.clone(); - rp - }; - - let credentials_for_proving = - Self::_prepare_credentials_for_proving(requested_credentials, proof_req_val)?; - - let non_credential_schema = AnoncredsHelpers::build_non_credential_schema()?; - let mut identifiers: Vec = Vec::with_capacity(credentials_for_proving.len()); - let mut sub_proof_index = 0; - - for (cred_key, (req_attrs_for_cred, req_predicates_for_cred)) in credentials_for_proving { - let credential = credentials.get(cred_key.cred_id.as_str()).ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Credential not found by id {:?}", cred_key.cred_id), - ) - })?; - - let schema = schemas.get(&credential.schema_id).ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Schema not found by id {:?}", credential.schema_id), - ) - })?; - - let cred_def = cred_defs.get(&credential.cred_def_id).ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!( - "CredentialDefinition not found by id {:?}", - credential.cred_def_id - ), - ) - })?; - - let rev_state = if let Some(timestamp) = cred_key.timestamp { - let rev_reg_id = credential.rev_reg_id.clone().ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - "Revocation Registry Id not found", - ) - })?; - - let rev_states_for_timestamp = rev_states - .get(&rev_reg_id.0) - .or(rev_states.get(cred_key.cred_id.as_str())) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("RevocationState not found by id {:?}", rev_reg_id), - ) - })?; - - Some(rev_states_for_timestamp.get(×tamp).ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("RevocationInfo not found by timestamp {:?}", timestamp), - ) - })?) - } else { - None - }; - - let credential_pub_key = CredentialPublicKey::build_from_parts( - &cred_def.value.primary, - cred_def.value.revocation.as_ref(), - )?; - - let credential_schema = - AnoncredsHelpers::build_credential_schema(&schema.attr_names.0)?; - - let credential_values = AnoncredsHelpers::build_credential_values( - &credential.values.0, - Some(master_secret), - )?; - - let sub_proof_request = - Self::_build_sub_proof_request(&req_attrs_for_cred, &req_predicates_for_cred)?; - - proof_builder.add_sub_proof_request( - &sub_proof_request, - &credential_schema, - &non_credential_schema, - &credential.signature, - &credential_values, - &credential_pub_key, - rev_state.as_ref().map(|r_info| &r_info.rev_reg), - rev_state.as_ref().map(|r_info| &r_info.witness), - )?; - - let identifier = match proof_req { - ProofRequest::ProofRequestV1(_) => Identifier { - schema_id: credential.schema_id.to_unqualified(), - cred_def_id: credential.cred_def_id.to_unqualified(), - rev_reg_id: credential.rev_reg_id.as_ref().map(|id| id.to_unqualified()), - timestamp: cred_key.timestamp, - }, - ProofRequest::ProofRequestV2(_) => Identifier { - schema_id: credential.schema_id.clone(), - cred_def_id: credential.cred_def_id.clone(), - rev_reg_id: credential.rev_reg_id.clone(), - timestamp: cred_key.timestamp, - }, - }; - - identifiers.push(identifier); - - Self::_update_requested_proof( - req_attrs_for_cred, - req_predicates_for_cred, - proof_req_val, - credential, - sub_proof_index, - &mut requested_proof, - )?; - - sub_proof_index += 1; - } - - let proof = proof_builder.finalize(&proof_req_val.nonce)?; - - let full_proof = Proof { - proof, - requested_proof, - identifiers, - }; - - let res = Ok(full_proof); - trace!("create_proof < {:?}", res); - res - } - - pub fn _prepare_credentials_for_proving( - requested_credentials: &RequestedCredentials, - proof_req: &ProofRequestPayload, - ) -> IndyResult< - HashMap, Vec)>, - > { - trace!( - "_prepare_credentials_for_proving > requested_credentials {:?} proof_req {:?}", - requested_credentials, - proof_req - ); - - let mut credentials_for_proving: HashMap< - ProvingCredentialKey, - (Vec, Vec), - > = HashMap::new(); - - for (attr_referent, requested_attr) in requested_credentials.requested_attributes.iter() { - let attr_info = proof_req - .requested_attributes - .get(attr_referent.as_str()) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!( - "AttributeInfo not found in ProofRequest for referent \"{}\"", - attr_referent.as_str() - ), - ) - })?; - - let req_attr_info = RequestedAttributeInfo { - attr_referent: attr_referent.clone(), - attr_info: attr_info.clone(), - revealed: requested_attr.revealed, - }; - - match credentials_for_proving.entry(ProvingCredentialKey { - cred_id: requested_attr.cred_id.clone(), - timestamp: requested_attr.timestamp, - }) { - Entry::Occupied(cred_for_proving) => { - let &mut (ref mut attributes_for_credential, _) = cred_for_proving.into_mut(); - attributes_for_credential.push(req_attr_info); - } - Entry::Vacant(attributes_for_credential) => { - attributes_for_credential.insert((vec![req_attr_info], Vec::new())); - } - }; - } - - for (predicate_referent, proving_cred_key) in - requested_credentials.requested_predicates.iter() - { - let predicate_info = proof_req - .requested_predicates - .get(predicate_referent.as_str()) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!( - "PredicateInfo not found in ProofRequest for referent \"{}\"", - predicate_referent.as_str() - ), - ) - })?; - - let req_predicate_info = RequestedPredicateInfo { - predicate_referent: predicate_referent.clone(), - predicate_info: predicate_info.clone(), - }; - - match credentials_for_proving.entry(proving_cred_key.clone()) { - Entry::Occupied(cred_for_proving) => { - let &mut (_, ref mut predicates_for_credential) = cred_for_proving.into_mut(); - predicates_for_credential.push(req_predicate_info); - } - Entry::Vacant(v) => { - v.insert((Vec::new(), vec![req_predicate_info])); - } - }; - } - - let res = Ok(credentials_for_proving); - trace!("_prepare_credentials_for_proving < {:?}", res); - res - } - - pub fn get_credential_values_for_attribute( - &self, - credential_attrs: &HashMap, - requested_attr: &str, - ) -> Option { - trace!( - "get_credential_values_for_attribute > credential_attrs {:?} requested_attr {:?}", - credential_attrs, - requested_attr - ); - - let res = Self::_get_credential_values_for_attribute(credential_attrs, requested_attr); - - trace!("get_credential_values_for_attribute < {:?}", res); - res - } - - fn _get_credential_values_for_attribute( - credential_attrs: &HashMap, - requested_attr: &str, - ) -> Option { - credential_attrs - .iter() - .find(|&(ref key, _)| { - AnoncredsHelpers::attr_common_view(key) - == AnoncredsHelpers::attr_common_view(&requested_attr) - }) - .map(|(_, values)| values.clone()) - } - - pub fn build_credential_tags( - &self, - credential: &Credential, - catpol: Option<&CredentialAttrTagPolicy>, - ) -> IndyResult> { - trace!( - "build_credential_tags > credential {:?} catpol {:?}", - credential, - catpol - ); - - let mut res: HashMap = HashMap::new(); - - let (schema_issuer_did, schema_name, schema_version) = - credential.schema_id.parts().ok_or(IndyError::from_msg( - IndyErrorKind::InvalidState, - format!( - "Invalid Schema ID `{}`: wrong number of parts", - credential.schema_id.0 - ), - ))?; - - let issuer_did = credential - .cred_def_id - .issuer_did() - .ok_or(IndyError::from_msg( - IndyErrorKind::InvalidState, - format!( - "Invalid Credential Definition ID `{}`: wrong number of parts", - credential.cred_def_id.0 - ), - ))?; - - res.insert("schema_id".to_string(), credential.schema_id.0.to_string()); - - res.insert( - "schema_issuer_did".to_string(), - schema_issuer_did.0.to_string(), - ); - - res.insert("schema_name".to_string(), schema_name); - res.insert("schema_version".to_string(), schema_version); - res.insert("issuer_did".to_string(), issuer_did.0.to_string()); - - res.insert( - "cred_def_id".to_string(), - credential.cred_def_id.0.to_string(), - ); - - res.insert( - "rev_reg_id".to_string(), - credential - .rev_reg_id - .as_ref() - .map(|rev_reg_id| rev_reg_id.0.clone()) - .unwrap_or_else(|| "None".to_string()), - ); - - if credential.cred_def_id.is_fully_qualified() { - res.insert( - Credential::add_extra_tag_suffix("schema_id"), - credential.schema_id.to_unqualified().0, - ); - - res.insert( - Credential::add_extra_tag_suffix("schema_issuer_did"), - schema_issuer_did.to_unqualified().0, - ); - - res.insert( - Credential::add_extra_tag_suffix("issuer_did"), - issuer_did.to_unqualified().0, - ); - - res.insert( - Credential::add_extra_tag_suffix("cred_def_id"), - credential.cred_def_id.to_unqualified().0, - ); - - res.insert( - Credential::add_extra_tag_suffix("rev_reg_id"), - credential - .rev_reg_id - .as_ref() - .map(|rev_reg_id| rev_reg_id.to_unqualified().0.clone()) - .unwrap_or_else(|| "None".to_string()), - ); - } - - credential.values.0.iter().for_each(|(attr, values)| { - if catpol - .map(|cp| cp.is_taggable(attr.as_str())) - .unwrap_or(true) - { - // abstain for attrs policy marks untaggable - res.insert( - Self::_build_attr_marker_tag(attr), - ATTRIBUTE_EXISTENCE_MARKER.to_string(), - ); - - res.insert(Self::_build_attr_value_tag(attr), values.raw.clone()); - } - }); - - let res = Ok(res); - trace!("build_credential_tags < {:?}", res); - res - } - - fn _build_attr_marker_tag(attr: &str) -> String { - format!( - "attr::{}::marker", - AnoncredsHelpers::attr_common_view(&attr) - ) - } - - fn _build_attr_value_tag(attr: &str) -> String { - format!("attr::{}::value", AnoncredsHelpers::attr_common_view(&attr)) - } - - pub fn attribute_satisfy_predicate( - &self, - predicate: &PredicateInfo, - attribute_value: &str, - ) -> IndyResult { - trace!( - "attribute_satisfy_predicate > predicate {:?} attribute_value {:?}", - predicate, - attribute_value - ); - - let res = match predicate.p_type { - PredicateTypes::GE => { - let attribute_value = attribute_value.parse::().to_indy( - IndyErrorKind::InvalidStructure, - format!( - "Credential attribute value \"{:?}\" is invalid", - attribute_value - ), - )?; - Ok(attribute_value >= predicate.p_value) - } - PredicateTypes::GT => { - let attribute_value = attribute_value.parse::().to_indy( - IndyErrorKind::InvalidStructure, - format!( - "Credential attribute value \"{:?}\" is invalid", - attribute_value - ), - )?; - Ok(attribute_value > predicate.p_value) - } - PredicateTypes::LE => { - let attribute_value = attribute_value.parse::().to_indy( - IndyErrorKind::InvalidStructure, - format!( - "Credential attribute value \"{:?}\" is invalid", - attribute_value - ), - )?; - Ok(attribute_value <= predicate.p_value) - } - PredicateTypes::LT => { - let attribute_value = attribute_value.parse::().to_indy( - IndyErrorKind::InvalidStructure, - format!( - "Credential attribute value \"{:?}\" is invalid", - attribute_value - ), - )?; - Ok(attribute_value < predicate.p_value) - } - }; - - trace!("attribute_satisfy_predicate < {:?}", res); - res - } - - fn _update_requested_proof( - req_attrs_for_credential: Vec, - req_predicates_for_credential: Vec, - proof_req: &ProofRequestPayload, - credential: &Credential, - sub_proof_index: u32, - requested_proof: &mut RequestedProof, - ) -> IndyResult<()> { - trace!( - "_update_requested_proof > req_attrs_for_credential {:?} \ - req_predicates_for_credential {:?} proof_req {:?} credential {:?} \ - sub_proof_index {:?} requested_proof {:?}", - req_attrs_for_credential, - req_predicates_for_credential, - proof_req, - credential, - sub_proof_index, - requested_proof - ); - - for attr_info in req_attrs_for_credential { - if attr_info.revealed { - let attribute = &proof_req.requested_attributes[&attr_info.attr_referent]; - - if let Some(name) = &attribute.name { - let attribute_values = - Self::_get_credential_values_for_attribute(&credential.values.0, &name) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Credential value not found for attribute {:?}", name), - ) - })?; - - requested_proof.revealed_attrs.insert( - attr_info.attr_referent.clone(), - RevealedAttributeInfo { - sub_proof_index, - raw: attribute_values.raw, - encoded: attribute_values.encoded, - }, - ); - } else if let Some(names) = &attribute.names { - let mut value_map: HashMap = HashMap::new(); - - for name in names { - let attr_value = - Self::_get_credential_values_for_attribute(&credential.values.0, &name) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!( - "Credential value not found for attribute {:?}", - name - ), - ) - })?; - - value_map.insert( - name.clone(), - AttributeValue { - raw: attr_value.raw, - encoded: attr_value.encoded, - }, - ); - } - - requested_proof.revealed_attr_groups.insert( - attr_info.attr_referent.clone(), - RevealedAttributeGroupInfo { - sub_proof_index, - values: value_map, - }, - ); - } - } else { - requested_proof.unrevealed_attrs.insert( - attr_info.attr_referent, - SubProofReferent { sub_proof_index }, - ); - } - } - - for predicate_info in req_predicates_for_credential { - requested_proof.predicates.insert( - predicate_info.predicate_referent, - SubProofReferent { sub_proof_index }, - ); - } - - let res = Ok(()); - trace!("_update_requested_proof < {:?}", res); - res - } - - fn _build_sub_proof_request( - req_attrs_for_credential: &[RequestedAttributeInfo], - req_predicates_for_credential: &[RequestedPredicateInfo], - ) -> IndyResult { - trace!( - "_build_sub_proof_request > req_attrs_for_credential {:?} \ - req_predicates_for_credential {:?}", - req_attrs_for_credential, - req_predicates_for_credential - ); - - let sub_proof_request = { - let mut builder = UrsaVerifier::new_sub_proof_request_builder()?; - - for attr in req_attrs_for_credential { - if !attr.revealed { - continue; - } - - if let Some(ref name) = &attr.attr_info.name { - builder.add_revealed_attr(&AnoncredsHelpers::attr_common_view(name))? - } - - if let Some(ref names) = &attr.attr_info.names { - for name in names { - builder.add_revealed_attr(&AnoncredsHelpers::attr_common_view(name))? - } - } - } - - for predicate in req_predicates_for_credential { - builder.add_predicate( - &AnoncredsHelpers::attr_common_view(&predicate.predicate_info.name), - &predicate.predicate_info.p_type.to_string(), - predicate.predicate_info.p_value, - )?; - } - - builder.finalize()? - }; - - let res = Ok(sub_proof_request); - trace!("_build_sub_proof_request < {:?}", res); - res - } - - pub fn process_proof_request_restrictions( - &self, - version: &ProofRequestsVersion, - name: &Option, - names: &Option>, - referent: &str, - restrictions: &Option, - extra_query: &Option<&ProofRequestExtraQuery>, - ) -> IndyResult { - trace!( - "process_proof_request_restrictions > version {:?} \ - name {:?} names {:?} referent {:?} \ - restrictions {:?} extra_query {:?}", - version, - name, - names, - referent, - restrictions, - extra_query - ); - - let mut queries: Vec = Vec::new(); - - let mut attr_queries: Vec = name - .iter() - .chain(names.iter().flatten()) - .map(|name| { - Query::Eq( - Self::_build_attr_marker_tag(name), - ATTRIBUTE_EXISTENCE_MARKER.to_string(), - ) - }) - .collect(); - - if attr_queries.is_empty() { - Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - r#"Proof Request attribute restriction should contain "name" or "names" param"#, - ))?; - } - - if let Some(restrictions_) = restrictions.clone() { - match version { - ProofRequestsVersion::V1 => { - let insensitive_restrictions = - Self::_make_restrictions_by_internal_tags_case_insensitive(restrictions_)?; - queries.push(self._double_restrictions(insensitive_restrictions)?) - } - ProofRequestsVersion::V2 => { - let insensitive_restrictions = - Self::_make_restrictions_by_internal_tags_case_insensitive(restrictions_)?; - queries.push(insensitive_restrictions) - } - }; - } - - if let Some(extra_query_) = extra_query.as_ref().and_then(|query| query.get(referent)) { - queries.push(extra_query_.clone()) - } - - // put attr_queries last as this results in a better performing query with large datasets - // ref IS-1470 - queries.append(&mut attr_queries); - - let res = Ok(Query::And(queries)); - trace!("process_proof_request_restrictions < {:?}", res); - res - } - - fn _make_restrictions_by_internal_tags_case_insensitive(operator: Query) -> IndyResult { - let query = match operator { - Query::Eq(tag_name, tag_value) => { - if let Some(tag_name) = VerifierService::attr_request_by_value(&tag_name) { - Query::Eq(Self::_build_attr_value_tag(tag_name), tag_value) - } else if let Some(tag_name) = VerifierService::attr_request_by_marker(&tag_name) { - Query::Eq(Self::_build_attr_marker_tag(tag_name), tag_value) - } else { - Query::Eq(tag_name, tag_value) - } - } - Query::Neq(tag_name, tag_value) => { - if let Some(tag_name) = VerifierService::attr_request_by_value(&tag_name) { - Query::Neq(Self::_build_attr_value_tag(tag_name), tag_value) - } else if let Some(tag_name) = VerifierService::attr_request_by_marker(&tag_name) { - Query::Neq(Self::_build_attr_marker_tag(tag_name), tag_value) - } else { - Query::Neq(tag_name, tag_value) - } - } - Query::In(tag_name, tag_values) => { - if let Some(tag_name) = VerifierService::attr_request_by_value(&tag_name) { - Query::In(Self::_build_attr_value_tag(tag_name), tag_values) - } else if let Some(tag_name) = VerifierService::attr_request_by_marker(&tag_name) { - Query::In(Self::_build_attr_marker_tag(tag_name), tag_values) - } else { - Query::In(tag_name, tag_values) - } - } - Query::And(operators) => Query::And( - operators - .into_iter() - .map(|op| Self::_make_restrictions_by_internal_tags_case_insensitive(op)) - .collect::>>()?, - ), - Query::Or(operators) => Query::Or( - operators - .into_iter() - .map(|op| Self::_make_restrictions_by_internal_tags_case_insensitive(op)) - .collect::>>()?, - ), - Query::Not(operator) => Query::Not(::std::boxed::Box::new( - Self::_make_restrictions_by_internal_tags_case_insensitive(*operator)?, - )), - _ => { - return Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "unsupported operator", - )) - } - }; - - Ok(query) - } - - fn _double_restrictions(&self, operator: Query) -> IndyResult { - let query = match operator { - Query::Eq(tag_name, tag_value) => { - if Credential::QUALIFIABLE_TAGS.contains(&tag_name.as_str()) { - Query::Or(vec![ - Query::Eq(tag_name.clone(), tag_value.clone()), - Query::Eq(Credential::add_extra_tag_suffix(&tag_name), tag_value), - ]) - } else { - Query::Eq(tag_name, tag_value) - } - } - Query::Neq(tag_name, tag_value) => { - if Credential::QUALIFIABLE_TAGS.contains(&tag_name.as_str()) { - Query::And(vec![ - Query::Neq(tag_name.clone(), tag_value.clone()), - Query::Neq(Credential::add_extra_tag_suffix(&tag_name), tag_value), - ]) - } else { - Query::Neq(tag_name, tag_value) - } - } - Query::In(tag_name, tag_values) => { - if Credential::QUALIFIABLE_TAGS.contains(&tag_name.as_str()) { - Query::Or(vec![ - Query::In(tag_name.clone(), tag_values.clone()), - Query::In(Credential::add_extra_tag_suffix(&&tag_name), tag_values), - ]) - } else { - Query::In(tag_name, tag_values) - } - } - Query::And(operators) => Query::And( - operators - .into_iter() - .map(|op| self._double_restrictions(op)) - .collect::>>()?, - ), - Query::Or(operators) => Query::Or( - operators - .into_iter() - .map(|op| self._double_restrictions(op)) - .collect::>>()?, - ), - Query::Not(operator) => Query::Not(::std::boxed::Box::new( - self._double_restrictions(*operator)?, - )), - _ => { - return Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "unsupported operator", - )) - } - }; - - Ok(query) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - const SCHEMA_ID: &str = "NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0"; - const SCHEMA_ISSUER_DID: &str = "NcYxiDXkpYi6ov5FcYDi1e"; - const SCHEMA_NAME: &str = "gvt"; - const SCHEMA_VERSION: &str = "1.0"; - const ISSUER_DID: &str = "NcYxiDXkpYi6ov5FcYDi1e"; - const CRED_DEF_ID: &str = "NcYxiDXkpYi6ov5FcYDi1e:3:CL:1:tag"; - const REV_REG_ID: &str = "NcYxiDXkpYi6ov5FcYDi1e:4:NcYxiDXkpYi6ov5FcYDi1e:3:CL:NcYxiDXkpYi6ov5FcYDi1e:2:gvt:1.0:tag:CL_ACCUM:TAG_1"; - const NO_REV_REG_ID: &str = "None"; - - macro_rules! hashmap { - ($( $key: expr => $val: expr ),*) => { - { - let mut map = ::std::collections::HashMap::new(); - $( - map.insert($key, $val); - )* - map - } - } - } - - mod build_credential_tags { - use super::*; - use crate::domain::anoncreds::revocation_registry_definition::RevocationRegistryId; - - fn _credential() -> Credential { - // note that encoding is not standardized by Indy except that 32-bit integers are encoded as themselves. IS-786 - // so Alex -> 12345 is an application choice while 25 -> 25 is not - let mut attr_values: HashMap = HashMap::new(); - attr_values.insert( - "name".to_string(), - AttributeValues { - raw: "Alex".to_string(), - encoded: "12345".to_string(), - }, - ); - attr_values.insert( - "age".to_string(), - AttributeValues { - raw: "25".to_string(), - encoded: "25".to_string(), - }, - ); - - serde_json::from_str::( - &json!({ - "schema_id": SCHEMA_ID, - "cred_def_id": CRED_DEF_ID, - "values": attr_values, - "signature": json!({ - "p_credential": json!({"m_2": "0","a": "0","e": "0","v": "0"}) - }), - "signature_correctness_proof": json!({"se":"0", "c":"0"}) - }) - .to_string(), - ) - .unwrap() - } - - #[test] - fn build_credential_tags_works() { - let ps = ProverService::new(); - let tags = ps.build_credential_tags(&_credential(), None).unwrap(); - - let expected_tags: HashMap = hashmap!( - "schema_id".to_string() => SCHEMA_ID.to_string(), - "schema_issuer_did".to_string() => SCHEMA_ISSUER_DID.to_string(), - "schema_name".to_string() => SCHEMA_NAME.to_string(), - "schema_version".to_string() => SCHEMA_VERSION.to_string(), - "issuer_did".to_string() => ISSUER_DID.to_string(), - "cred_def_id".to_string() => CRED_DEF_ID.to_string(), - "rev_reg_id".to_string() => NO_REV_REG_ID.to_string(), - "attr::name::marker".to_string() => ATTRIBUTE_EXISTENCE_MARKER.to_string(), - "attr::name::value".to_string() => "Alex".to_string(), - "attr::age::marker".to_string() => ATTRIBUTE_EXISTENCE_MARKER.to_string(), - "attr::age::value".to_string() => "25".to_string() - ); - - assert_eq!(expected_tags, tags) - } - - #[test] - fn build_credential_tags_works_for_catpol() { - let ps = ProverService::new(); - let catpol = CredentialAttrTagPolicy::from(vec![String::from("name")]); - let tags = ps - .build_credential_tags(&_credential(), Some(catpol).as_ref()) - .unwrap(); - - let expected_tags: HashMap = hashmap!( - "schema_id".to_string() => SCHEMA_ID.to_string(), - "schema_issuer_did".to_string() => SCHEMA_ISSUER_DID.to_string(), - "schema_name".to_string() => SCHEMA_NAME.to_string(), - "schema_version".to_string() => SCHEMA_VERSION.to_string(), - "issuer_did".to_string() => ISSUER_DID.to_string(), - "cred_def_id".to_string() => CRED_DEF_ID.to_string(), - "rev_reg_id".to_string() => NO_REV_REG_ID.to_string(), - "attr::name::marker".to_string() => ATTRIBUTE_EXISTENCE_MARKER.to_string(), - "attr::name::value".to_string() => "Alex".to_string() - ); - - assert_eq!(expected_tags, tags) - } - - #[test] - fn build_credential_tags_works_for_rev_reg_id() { - let ps = ProverService::new(); - let mut credential = _credential(); - credential.rev_reg_id = Some(RevocationRegistryId(REV_REG_ID.to_string())); - let tags = ps.build_credential_tags(&credential, None).unwrap(); - - let expected_tags: HashMap = hashmap!( - "schema_id".to_string() => SCHEMA_ID.to_string(), - "schema_issuer_did".to_string() => SCHEMA_ISSUER_DID.to_string(), - "schema_name".to_string() => SCHEMA_NAME.to_string(), - "schema_version".to_string() => SCHEMA_VERSION.to_string(), - "issuer_did".to_string() => ISSUER_DID.to_string(), - "cred_def_id".to_string() => CRED_DEF_ID.to_string(), - "rev_reg_id".to_string() => REV_REG_ID.to_string(), - "attr::name::marker".to_string() => ATTRIBUTE_EXISTENCE_MARKER.to_string(), - "attr::name::value".to_string() => "Alex".to_string(), - "attr::age::marker".to_string() => ATTRIBUTE_EXISTENCE_MARKER.to_string(), - "attr::age::value".to_string() => "25".to_string() - ); - - assert_eq!(expected_tags, tags) - } - - #[test] - fn build_credential_tags_works_for_fully_qualified_ids() { - let ps = ProverService::new(); - - let schema_id = "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/gvt/1.0"; - let issuer_did = "did:indy:NcYxiDXkpYi6ov5FcYDi1e"; - let cred_def_id = "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/CLAIM_DEF/1/tag"; - let rev_reg_id = "did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/REV_REG_DEF/did:indy:NcYxiDXkpYi6ov5FcYDi1e/anoncreds/v0/SCHEMA/gvt/1.0/tag/TAG_1"; - - let mut credential = _credential(); - credential.schema_id = SchemaId(schema_id.to_string()); - credential.cred_def_id = CredentialDefinitionId(cred_def_id.to_string()); - credential.rev_reg_id = Some(RevocationRegistryId(rev_reg_id.to_string())); - - let tags = ps.build_credential_tags(&credential, None).unwrap(); - - let expected_tags: HashMap = hashmap!( - "schema_id".to_string() => schema_id.to_string(), - "schema_id_short".to_string() => SCHEMA_ID.to_string(), - "schema_issuer_did".to_string() => issuer_did.to_string(), - "schema_issuer_did_short".to_string() => ISSUER_DID.to_string(), - "schema_name".to_string() => SCHEMA_NAME.to_string(), - "schema_version".to_string() => SCHEMA_VERSION.to_string(), - "issuer_did".to_string() => issuer_did.to_string(), - "issuer_did_short".to_string() => ISSUER_DID.to_string(), - "cred_def_id".to_string() => cred_def_id.to_string(), - "cred_def_id_short".to_string() => CRED_DEF_ID.to_string(), - "rev_reg_id".to_string() => rev_reg_id.to_string(), - "rev_reg_id_short".to_string() => REV_REG_ID.to_string(), - "attr::name::marker".to_string() => ATTRIBUTE_EXISTENCE_MARKER.to_string(), - "attr::name::value".to_string() => "Alex".to_string(), - "attr::age::marker".to_string() => ATTRIBUTE_EXISTENCE_MARKER.to_string(), - "attr::age::value".to_string() => "25".to_string() - ); - - assert_eq!(expected_tags, tags) - } - } - - mod attribute_satisfy_predicate { - use super::*; - - fn predicate_info() -> PredicateInfo { - PredicateInfo { - name: "age".to_string(), - p_type: PredicateTypes::GE, - p_value: 8, - restrictions: None, - non_revoked: None, - } - } - - #[test] - fn attribute_satisfy_predicate_works() { - let ps = ProverService::new(); - let res = ps - .attribute_satisfy_predicate(&predicate_info(), "10") - .unwrap(); - assert!(res); - } - - #[test] - fn attribute_satisfy_predicate_works_for_false() { - let ps = ProverService::new(); - let res = ps - .attribute_satisfy_predicate(&predicate_info(), "5") - .unwrap(); - assert!(!res); - } - - #[test] - fn attribute_satisfy_predicate_works_for_invalid_attribute_value() { - let ps = ProverService::new(); - let res = ps.attribute_satisfy_predicate(&predicate_info(), "string"); - assert_kind!(IndyErrorKind::InvalidStructure, res); - } - } - - mod prepare_credentials_for_proving { - use crate::domain::anoncreds::{ - proof_request::{AttributeInfo, PredicateInfo}, - requested_credential::RequestedAttribute, - }; - - use super::*; - - const CRED_ID: &str = "8591bcac-ee7d-4bef-ba7e-984696440b30"; - const ATTRIBUTE_REFERENT: &str = "attribute_referent"; - const PREDICATE_REFERENT: &str = "predicate_referent"; - - fn _attr_info() -> AttributeInfo { - AttributeInfo { - name: Some("name".to_string()), - names: None, - restrictions: None, - non_revoked: None, - } - } - - fn _predicate_info() -> PredicateInfo { - PredicateInfo { - name: "age".to_string(), - p_type: PredicateTypes::GE, - p_value: 8, - restrictions: None, - non_revoked: None, - } - } - - fn _proof_req() -> ProofRequestPayload { - ProofRequestPayload { - nonce: ursa::cl::new_nonce().unwrap(), - name: "Job-Application".to_string(), - version: "0.1".to_string(), - requested_attributes: hashmap!( - ATTRIBUTE_REFERENT.to_string() => _attr_info() - ), - requested_predicates: hashmap!( - PREDICATE_REFERENT.to_string() => _predicate_info() - ), - non_revoked: None, - } - } - - fn _req_cred() -> RequestedCredentials { - RequestedCredentials { - self_attested_attributes: HashMap::new(), - requested_attributes: hashmap!( - ATTRIBUTE_REFERENT.to_string() => RequestedAttribute{ - cred_id: CRED_ID.to_string(), - timestamp: None, - revealed: false, - } - ), - requested_predicates: hashmap!( - PREDICATE_REFERENT.to_string() => ProvingCredentialKey{ cred_id: CRED_ID.to_string(), timestamp: None } - ), - } - } - - #[test] - fn prepare_credentials_for_proving_works() { - let req_cred = _req_cred(); - let proof_req = _proof_req(); - - let res = - ProverService::_prepare_credentials_for_proving(&req_cred, &proof_req).unwrap(); - - assert_eq!(1, res.len()); - assert!(res.contains_key(&ProvingCredentialKey { - cred_id: CRED_ID.to_string(), - timestamp: None - })); - - let (req_attr_info, req_pred_info) = res - .get(&ProvingCredentialKey { - cred_id: CRED_ID.to_string(), - timestamp: None, - }) - .unwrap(); - assert_eq!(1, req_attr_info.len()); - assert_eq!(1, req_pred_info.len()); - } - - #[test] - fn prepare_credentials_for_proving_works_for_multiple_attributes_with_same_credential() { - let mut req_cred = _req_cred(); - let mut proof_req = _proof_req(); - - req_cred.requested_attributes.insert( - "attribute_referent_2".to_string(), - RequestedAttribute { - cred_id: CRED_ID.to_string(), - timestamp: None, - revealed: false, - }, - ); - - proof_req.requested_attributes.insert( - "attribute_referent_2".to_string(), - AttributeInfo { - name: Some("last_name".to_string()), - names: None, - restrictions: None, - non_revoked: None, - }, - ); - - let res = - ProverService::_prepare_credentials_for_proving(&req_cred, &proof_req).unwrap(); - - assert_eq!(1, res.len()); - assert!(res.contains_key(&ProvingCredentialKey { - cred_id: CRED_ID.to_string(), - timestamp: None - })); - - let (req_attr_info, req_pred_info) = res - .get(&ProvingCredentialKey { - cred_id: CRED_ID.to_string(), - timestamp: None, - }) - .unwrap(); - assert_eq!(2, req_attr_info.len()); - assert_eq!(1, req_pred_info.len()); - } - - #[test] - fn prepare_credentials_for_proving_works_for_missed_attribute() { - let req_cred = _req_cred(); - let mut proof_req = _proof_req(); - - proof_req.requested_attributes.clear(); - - let res = ProverService::_prepare_credentials_for_proving(&req_cred, &proof_req); - assert_kind!(IndyErrorKind::InvalidStructure, res); - } - - #[test] - fn prepare_credentials_for_proving_works_for_missed_predicate() { - let req_cred = _req_cred(); - let mut proof_req = _proof_req(); - - proof_req.requested_predicates.clear(); - - let res = ProverService::_prepare_credentials_for_proving(&req_cred, &proof_req); - assert_kind!(IndyErrorKind::InvalidStructure, res); - } - } - - mod get_credential_values_for_attribute { - use super::*; - - fn _attr_values() -> AttributeValues { - AttributeValues { - raw: "Alex".to_string(), - encoded: "123".to_string(), - } - } - - fn _cred_values() -> HashMap { - hashmap!("name".to_string() => _attr_values()) - } - - #[test] - fn get_credential_values_for_attribute_works() { - let ps = ProverService::new(); - - let res = ps - .get_credential_values_for_attribute(&_cred_values(), "name") - .unwrap(); - assert_eq!(_attr_values(), res); - } - - #[test] - fn get_credential_values_for_attribute_works_for_requested_attr_different_case() { - let ps = ProverService::new(); - - let res = ps - .get_credential_values_for_attribute(&_cred_values(), "NAme") - .unwrap(); - assert_eq!(_attr_values(), res); - } - - #[test] - fn get_credential_values_for_attribute_works_for_requested_attr_contains_spaces() { - let ps = ProverService::new(); - - let res = ps - .get_credential_values_for_attribute(&_cred_values(), " na me ") - .unwrap(); - assert_eq!(_attr_values(), res); - } - - #[test] - fn get_credential_values_for_attribute_works_for_cred_values_different_case() { - let ps = ProverService::new(); - - let cred_values = hashmap!("NAME".to_string() => _attr_values()); - - let res = ps - .get_credential_values_for_attribute(&cred_values, "name") - .unwrap(); - assert_eq!(_attr_values(), res); - } - - #[test] - fn get_credential_values_for_attribute_works_for_cred_values_contains_spaces() { - let ps = ProverService::new(); - - let cred_values = hashmap!(" name ".to_string() => _attr_values()); - - let res = ps - .get_credential_values_for_attribute(&cred_values, "name") - .unwrap(); - assert_eq!(_attr_values(), res); - } - - #[test] - fn get_credential_values_for_attribute_works_for_cred_values_and_requested_attr_contains_spaces( - ) { - let ps = ProverService::new(); - - let cred_values = hashmap!(" name ".to_string() => _attr_values()); - - let res = ps - .get_credential_values_for_attribute(&cred_values, " name ") - .unwrap(); - assert_eq!(_attr_values(), res); - } - } - - mod extend_operator { - use super::*; - - const QUALIFIABLE_TAG: &str = "issuer_did"; - const NOT_QUALIFIABLE_TAG: &str = "name"; - const VALUE: &str = "1"; - - #[test] - fn extend_operator_works_for_qualifiable_tag() { - let ps = ProverService::new(); - - let query = Query::Eq(QUALIFIABLE_TAG.to_string(), VALUE.to_string()); - let query = ps._double_restrictions(query).unwrap(); - - let expected_query = Query::Or(vec![ - Query::Eq(QUALIFIABLE_TAG.to_string(), VALUE.to_string()), - Query::Eq( - Credential::add_extra_tag_suffix(QUALIFIABLE_TAG), - VALUE.to_string(), - ), - ]); - - assert_eq!(expected_query, query); - } - - #[test] - fn extend_operator_works_for_not_qualifiable_tag() { - let ps = ProverService::new(); - - let query = Query::Eq(NOT_QUALIFIABLE_TAG.to_string(), VALUE.to_string()); - let query = ps._double_restrictions(query).unwrap(); - - let expected_query = Query::Eq(NOT_QUALIFIABLE_TAG.to_string(), VALUE.to_string()); - - assert_eq!(expected_query, query); - } - - #[test] - fn extend_operator_works_for_qualifiable_tag_for_combination() { - let ps = ProverService::new(); - - let query = Query::And(vec![ - Query::Eq(QUALIFIABLE_TAG.to_string(), VALUE.to_string()), - Query::Eq(NOT_QUALIFIABLE_TAG.to_string(), VALUE.to_string()), - ]); - let query = ps._double_restrictions(query).unwrap(); - - let expected_query = Query::And(vec![ - Query::Or(vec![ - Query::Eq(QUALIFIABLE_TAG.to_string(), VALUE.to_string()), - Query::Eq( - Credential::add_extra_tag_suffix(QUALIFIABLE_TAG), - VALUE.to_string(), - ), - ]), - Query::Eq(NOT_QUALIFIABLE_TAG.to_string(), VALUE.to_string()), - ]); - - assert_eq!(expected_query, query); - } - } - - mod extend_proof_request_restrictions { - use super::*; - - const ATTR_NAME: &str = "name"; - const ATTR_NAME_2: &str = "name_2"; - const ATTR_REFERENT: &str = "attr_1"; - - fn _value(json: &str) -> serde_json::Value { - serde_json::from_str::(json).unwrap() - } - - #[test] - fn build_query_works() { - let ps = ProverService::new(); - - let query = ps - .process_proof_request_restrictions( - &ProofRequestsVersion::V2, - &Some(ATTR_NAME.to_string()), - &None, - ATTR_REFERENT, - &None, - &None, - ) - .unwrap(); - - let expected_query = Query::And(vec![Query::Eq( - "attr::name::marker".to_string(), - ATTRIBUTE_EXISTENCE_MARKER.to_string(), - )]); - - assert_eq!(expected_query, query); - } - - #[test] - fn build_query_works_for_name() { - let ps = ProverService::new(); - - let query = ps - .process_proof_request_restrictions( - &ProofRequestsVersion::V2, - &None, - &Some(vec![ATTR_NAME.to_string(), ATTR_NAME_2.to_string()]), - ATTR_REFERENT, - &None, - &None, - ) - .unwrap(); - - let expected_query = Query::And(vec![ - Query::Eq( - "attr::name::marker".to_string(), - ATTRIBUTE_EXISTENCE_MARKER.to_string(), - ), - Query::Eq( - "attr::name_2::marker".to_string(), - ATTRIBUTE_EXISTENCE_MARKER.to_string(), - ), - ]); - - assert_eq!(expected_query, query); - } - - #[test] - fn build_query_works_for_restriction() { - let ps = ProverService::new(); - - let restriction = Query::And(vec![ - Query::Eq("schema_id".to_string(), SCHEMA_ID.to_string()), - Query::Eq("cred_def_id".to_string(), CRED_DEF_ID.to_string()), - ]); - - let query = ps - .process_proof_request_restrictions( - &ProofRequestsVersion::V2, - &Some(ATTR_NAME.to_string()), - &None, - ATTR_REFERENT, - &Some(restriction), - &None, - ) - .unwrap(); - - let expected_query = Query::And(vec![ - Query::And(vec![ - Query::Eq("schema_id".to_string(), SCHEMA_ID.to_string()), - Query::Eq("cred_def_id".to_string(), CRED_DEF_ID.to_string()), - ]), - Query::Eq( - "attr::name::marker".to_string(), - ATTRIBUTE_EXISTENCE_MARKER.to_string(), - ), - ]); - - assert_eq!(expected_query, query); - } - - #[test] - fn build_query_works_for_extra_query() { - let ps = ProverService::new(); - - let extra_query: ProofRequestExtraQuery = hashmap!( - ATTR_REFERENT.to_string() => Query::Eq("name".to_string(), "Alex".to_string()) - ); - - let query = ps - .process_proof_request_restrictions( - &ProofRequestsVersion::V2, - &Some(ATTR_NAME.to_string()), - &None, - ATTR_REFERENT, - &None, - &Some(&extra_query), - ) - .unwrap(); - - let expected_query = Query::And(vec![ - Query::Eq("name".to_string(), "Alex".to_string()), - Query::Eq( - "attr::name::marker".to_string(), - ATTRIBUTE_EXISTENCE_MARKER.to_string(), - ), - ]); - - assert_eq!(expected_query, query); - } - - #[test] - fn build_query_works_for_mix_restriction_and_extra_query() { - let ps = ProverService::new(); - - let restriction = Query::And(vec![ - Query::Eq("schema_id".to_string(), SCHEMA_ID.to_string()), - Query::Eq("cred_def_id".to_string(), CRED_DEF_ID.to_string()), - ]); - - let extra_query: ProofRequestExtraQuery = hashmap!( - ATTR_REFERENT.to_string() => Query::Eq("name".to_string(), "Alex".to_string()) - ); - - let query = ps - .process_proof_request_restrictions( - &ProofRequestsVersion::V2, - &Some(ATTR_NAME.to_string()), - &None, - ATTR_REFERENT, - &Some(restriction), - &Some(&extra_query), - ) - .unwrap(); - - let expected_query = Query::And(vec![ - Query::And(vec![ - Query::Eq("schema_id".to_string(), SCHEMA_ID.to_string()), - Query::Eq("cred_def_id".to_string(), CRED_DEF_ID.to_string()), - ]), - Query::Eq("name".to_string(), "Alex".to_string()), - Query::Eq( - "attr::name::marker".to_string(), - ATTRIBUTE_EXISTENCE_MARKER.to_string(), - ), - ]); - - assert_eq!(expected_query, query); - } - - #[test] - fn build_query_works_for_extra_query_with_other_referent() { - let ps = ProverService::new(); - - let extra_query: ProofRequestExtraQuery = hashmap!( - "other_attr_referent".to_string() => Query::Eq("name".to_string(), "Alex".to_string()) - ); - - let query = ps - .process_proof_request_restrictions( - &ProofRequestsVersion::V2, - &Some(ATTR_NAME.to_string()), - &None, - ATTR_REFERENT, - &None, - &Some(&extra_query), - ) - .unwrap(); - - let expected_query = Query::And(vec![Query::Eq( - "attr::name::marker".to_string(), - ATTRIBUTE_EXISTENCE_MARKER.to_string(), - )]); - - assert_eq!(expected_query, query); - } - - #[test] - fn build_query_works_for_restriction_and_extra_query_contain_or_operator() { - let ps = ProverService::new(); - - let restriction = Query::Or(vec![ - Query::Eq("schema_id".to_string(), SCHEMA_ID.to_string()), - Query::Eq("schema_id".to_string(), "schema_id_2".to_string()), - ]); - - let extra_query: ProofRequestExtraQuery = hashmap!( - ATTR_REFERENT.to_string() => - Query::Or(vec![ - Query::Eq("name".to_string(), "Alex".to_string()), - Query::Eq("name".to_string(), "Alexander".to_string()), - ]) - ); - - let query = ps - .process_proof_request_restrictions( - &ProofRequestsVersion::V2, - &Some(ATTR_NAME.to_string()), - &None, - ATTR_REFERENT, - &Some(restriction), - &Some(&extra_query), - ) - .unwrap(); - - let expected_query = Query::And(vec![ - Query::Or(vec![ - Query::Eq("schema_id".to_string(), SCHEMA_ID.to_string()), - Query::Eq("schema_id".to_string(), "schema_id_2".to_string()), - ]), - Query::Or(vec![ - Query::Eq("name".to_string(), "Alex".to_string()), - Query::Eq("name".to_string(), "Alexander".to_string()), - ]), - Query::Eq( - "attr::name::marker".to_string(), - ATTRIBUTE_EXISTENCE_MARKER.to_string(), - ), - ]); - - assert_eq!(expected_query, query); - } - - #[test] - fn build_query_works_for_restriction_by_internal_tags() { - let ps = ProverService::new(); - - let restriction = Query::And(vec![ - Query::Eq("schema_id".to_string(), SCHEMA_ID.to_string()), - Query::Eq( - "attr::firstname::value".to_string(), - "firstname_value".to_string(), - ), - Query::Eq( - "attr::Last Name::value".to_string(), - "lastname_value".to_string(), - ), - Query::Eq("attr::File Name::marker".to_string(), "1".to_string()), - Query::Eq("attr::textresult::marker".to_string(), "1".to_string()), - ]); - - let query = ps - .process_proof_request_restrictions( - &ProofRequestsVersion::V2, - &Some(ATTR_NAME.to_string()), - &None, - ATTR_REFERENT, - &Some(restriction), - &None, - ) - .unwrap(); - - let expected_query = Query::And(vec![ - Query::And(vec![ - Query::Eq("schema_id".to_string(), SCHEMA_ID.to_string()), - Query::Eq( - "attr::firstname::value".to_string(), - "firstname_value".to_string(), - ), - Query::Eq( - "attr::lastname::value".to_string(), - "lastname_value".to_string(), - ), - Query::Eq("attr::filename::marker".to_string(), "1".to_string()), - Query::Eq("attr::textresult::marker".to_string(), "1".to_string()), - ]), - Query::Eq( - "attr::name::marker".to_string(), - ATTRIBUTE_EXISTENCE_MARKER.to_string(), - ), - ]); - - assert_eq!(expected_query, query); - } - } -} diff --git a/libvdrtools/src/services/anoncreds/verifier.rs b/libvdrtools/src/services/anoncreds/verifier.rs deleted file mode 100644 index 2c015fb1b0..0000000000 --- a/libvdrtools/src/services/anoncreds/verifier.rs +++ /dev/null @@ -1,1359 +0,0 @@ -use std::collections::{HashMap, HashSet}; - -use indy_api_types::errors::prelude::*; -use lazy_static::lazy_static; -use regex::Regex; - -use ursa::{ - bn::BigNumber, - cl::{new_nonce, verifier::Verifier as CryptoVerifier, CredentialPublicKey, Nonce}, -}; - -use crate::{ - domain::anoncreds::{ - credential_definition::{CredentialDefinitionId, CredentialDefinitionV1}, - proof::{Identifier, Proof, RequestedProof, RevealedAttributeInfo}, - proof_request::{AttributeInfo, NonRevocedInterval, PredicateInfo, ProofRequestPayload}, - revocation_registry::RevocationRegistryV1, - revocation_registry_definition::{RevocationRegistryDefinitionV1, RevocationRegistryId}, - schema::{SchemaId, SchemaV1}, - }, - services::AnoncredsHelpers, - utils::wql::Query, -}; - -#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)] -pub(crate) struct Filter { - schema_id: String, - schema_issuer_did: String, - schema_name: String, - schema_version: String, - issuer_did: String, - cred_def_id: String, -} - -lazy_static! { - pub(crate) static ref VALUE_TAG_MATCHER: Regex = Regex::new("^attr::([^:]+)::value$").unwrap(); - pub(crate) static ref MARKER_TAG_MATCHER: Regex = - Regex::new("^attr::([^:]+)::marker$").unwrap(); -} - -pub struct VerifierService {} - -impl VerifierService { - pub(crate) fn new() -> VerifierService { - VerifierService {} - } - - pub(crate) fn verify( - &self, - full_proof: &Proof, - proof_req: &ProofRequestPayload, - schemas: &HashMap, - cred_defs: &HashMap, - rev_reg_defs: &HashMap, - rev_regs: &HashMap>, - ) -> IndyResult { - trace!("verify >>> full_proof: {:?}, proof_req: {:?}, schemas: {:?}, cred_defs: {:?}, rev_reg_defs: {:?} rev_regs: {:?}", - full_proof, proof_req, schemas, cred_defs, rev_reg_defs, rev_regs); - - let received_revealed_attrs: HashMap = - VerifierService::_received_revealed_attrs(&full_proof)?; - let received_unrevealed_attrs: HashMap = - VerifierService::_received_unrevealed_attrs(&full_proof)?; - let received_predicates: HashMap = - VerifierService::_received_predicates(&full_proof)?; - let received_self_attested_attrs: HashSet = - VerifierService::_received_self_attested_attrs(&full_proof); - - VerifierService::_compare_attr_from_proof_and_request( - proof_req, - &received_revealed_attrs, - &received_unrevealed_attrs, - &received_self_attested_attrs, - &received_predicates, - )?; - - VerifierService::_verify_revealed_attribute_values(&proof_req, &full_proof)?; - - VerifierService::_verify_requested_restrictions( - &proof_req, - &full_proof.requested_proof, - &received_revealed_attrs, - &received_unrevealed_attrs, - &received_predicates, - &received_self_attested_attrs, - )?; - - VerifierService::_compare_timestamps_from_proof_and_request( - proof_req, - &received_revealed_attrs, - &received_unrevealed_attrs, - &received_self_attested_attrs, - &received_predicates, - )?; - - let mut proof_verifier = CryptoVerifier::new_proof_verifier()?; - let non_credential_schema = AnoncredsHelpers::build_non_credential_schema()?; - - for sub_proof_index in 0..full_proof.identifiers.len() { - let identifier = full_proof.identifiers[sub_proof_index].clone(); - - let schema: &SchemaV1 = schemas.get(&identifier.schema_id).ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Schema not found for id: {:?}", identifier.schema_id), - ) - })?; - - let cred_def: &CredentialDefinitionV1 = - cred_defs.get(&identifier.cred_def_id).ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!( - "CredentialDefinition not found for id: {:?}", - identifier.cred_def_id - ), - ) - })?; - - let (rev_reg_def, rev_reg) = if let Some(timestamp) = identifier.timestamp { - let rev_reg_id = identifier.rev_reg_id.clone().ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - "Revocation Registry Id not found", - ) - })?; - - let rev_reg_def = Some(rev_reg_defs.get(&rev_reg_id).ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!( - "RevocationRegistryDefinition not found for id: {:?}", - identifier.rev_reg_id - ), - ) - })?); - - let rev_regs_for_cred = rev_regs.get(&rev_reg_id).ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("RevocationRegistry not found for id: {:?}", rev_reg_id), - ) - })?; - - let rev_reg = Some(rev_regs_for_cred.get(×tamp).ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!( - "RevocationRegistry not found for timestamp: {:?}", - timestamp - ), - ) - })?); - - (rev_reg_def, rev_reg) - } else { - (None, None) - }; - - let attrs_for_credential = VerifierService::_get_revealed_attributes_for_credential( - sub_proof_index, - &full_proof.requested_proof, - proof_req, - )?; - let predicates_for_credential = VerifierService::_get_predicates_for_credential( - sub_proof_index, - &full_proof.requested_proof, - proof_req, - )?; - - let credential_schema = - AnoncredsHelpers::build_credential_schema(&schema.attr_names.0)?; - - let sub_proof_request = AnoncredsHelpers::build_sub_proof_request( - &attrs_for_credential, - &predicates_for_credential, - )?; - - let credential_pub_key = CredentialPublicKey::build_from_parts( - &cred_def.value.primary, - cred_def.value.revocation.as_ref(), - )?; - - proof_verifier.add_sub_proof_request( - &sub_proof_request, - &credential_schema, - &non_credential_schema, - &credential_pub_key, - rev_reg_def - .as_ref() - .map(|r_reg_def| &r_reg_def.value.public_keys.accum_key), - rev_reg.as_ref().map(|r_reg| &r_reg.value), - )?; - } - - let valid = proof_verifier.verify(&full_proof.proof, &proof_req.nonce)?; - - trace!("verify <<< valid: {:?}", valid); - - Ok(valid) - } - - pub(crate) fn generate_nonce(&self) -> IndyResult { - trace!("generate_nonce >>> "); - - let nonce = new_nonce()?; - - trace!("generate_nonce <<< nonce: {:?} ", nonce); - - Ok(nonce) - } - - fn _get_revealed_attributes_for_credential( - sub_proof_index: usize, - requested_proof: &RequestedProof, - proof_req: &ProofRequestPayload, - ) -> IndyResult> { - trace!("_get_revealed_attributes_for_credential >>> sub_proof_index: {:?}, requested_credentials: {:?}, proof_req: {:?}", - sub_proof_index, requested_proof, proof_req); - - let mut revealed_attrs_for_credential = requested_proof - .revealed_attrs - .iter() - .filter(|&(attr_referent, ref revealed_attr_info)| { - sub_proof_index == revealed_attr_info.sub_proof_index as usize - && proof_req.requested_attributes.contains_key(attr_referent) - }) - .map(|(attr_referent, _)| proof_req.requested_attributes[attr_referent].clone()) - .collect::>(); - - revealed_attrs_for_credential.append( - &mut requested_proof - .revealed_attr_groups - .iter() - .filter(|&(attr_referent, ref revealed_attr_info)| { - sub_proof_index == revealed_attr_info.sub_proof_index as usize - && proof_req.requested_attributes.contains_key(attr_referent) - }) - .map(|(attr_referent, _)| proof_req.requested_attributes[attr_referent].clone()) - .collect::>(), - ); - - trace!( - "_get_revealed_attributes_for_credential <<< revealed_attrs_for_credential: {:?}", - revealed_attrs_for_credential - ); - - Ok(revealed_attrs_for_credential) - } - - fn _get_predicates_for_credential( - sub_proof_index: usize, - requested_proof: &RequestedProof, - proof_req: &ProofRequestPayload, - ) -> IndyResult> { - trace!("_get_predicates_for_credential >>> sub_proof_index: {:?}, requested_credentials: {:?}, proof_req: {:?}", - sub_proof_index, requested_proof, proof_req); - - let predicates_for_credential = requested_proof - .predicates - .iter() - .filter(|&(predicate_referent, requested_referent)| { - sub_proof_index == requested_referent.sub_proof_index as usize - && proof_req - .requested_predicates - .contains_key(predicate_referent) - }) - .map(|(predicate_referent, _)| { - proof_req.requested_predicates[predicate_referent].clone() - }) - .collect::>(); - - trace!( - "_get_predicates_for_credential <<< predicates_for_credential: {:?}", - predicates_for_credential - ); - - Ok(predicates_for_credential) - } - - fn _compare_attr_from_proof_and_request( - proof_req: &ProofRequestPayload, - received_revealed_attrs: &HashMap, - received_unrevealed_attrs: &HashMap, - received_self_attested_attrs: &HashSet, - received_predicates: &HashMap, - ) -> IndyResult<()> { - let requested_attrs: HashSet = - proof_req.requested_attributes.keys().cloned().collect(); - - let received_attrs: HashSet = received_revealed_attrs - .iter() - .chain(received_unrevealed_attrs) - .map(|(r, _)| r.to_string()) - .collect::>() - .union(&received_self_attested_attrs) - .cloned() - .collect(); - - if requested_attrs != received_attrs { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - format!( - "Requested attributes {:?} do not correspond to received {:?}", - requested_attrs, received_attrs - ), - )); - } - - let requested_predicates: HashSet<&String> = - proof_req.requested_predicates.keys().collect(); - - let received_predicates_: HashSet<&String> = received_predicates.keys().collect(); - - if requested_predicates != received_predicates_ { - return Err(err_msg( - IndyErrorKind::InvalidStructure, - format!( - "Requested predicates {:?} do not correspond to received {:?}", - requested_predicates, received_predicates - ), - )); - } - - Ok(()) - } - - fn _compare_timestamps_from_proof_and_request( - proof_req: &ProofRequestPayload, - received_revealed_attrs: &HashMap, - received_unrevealed_attrs: &HashMap, - received_self_attested_attrs: &HashSet, - received_predicates: &HashMap, - ) -> IndyResult<()> { - proof_req - .requested_attributes - .iter() - .map(|(referent, info)| { - VerifierService::_validate_timestamp( - &received_revealed_attrs, - referent, - &proof_req.non_revoked, - &info.non_revoked, - ) - .or_else(|_| { - VerifierService::_validate_timestamp( - &received_unrevealed_attrs, - referent, - &proof_req.non_revoked, - &info.non_revoked, - ) - }) - .or_else(|_| { - received_self_attested_attrs - .get(referent) - .map(|_| ()) - .ok_or_else(|| IndyError::from(IndyErrorKind::InvalidStructure)) - }) - }) - .collect::>>()?; - - proof_req - .requested_predicates - .iter() - .map(|(referent, info)| { - VerifierService::_validate_timestamp( - received_predicates, - referent, - &proof_req.non_revoked, - &info.non_revoked, - ) - }) - .collect::>>()?; - - Ok(()) - } - - fn _validate_timestamp( - received_: &HashMap, - referent: &str, - global_interval: &Option, - local_interval: &Option, - ) -> IndyResult<()> { - if AnoncredsHelpers::get_non_revoc_interval(global_interval, local_interval).is_none() { - return Ok(()); - } - - if !received_ - .get(referent) - .map(|attr| attr.timestamp.is_some()) - .unwrap_or(false) - { - return Err(IndyError::from(IndyErrorKind::InvalidStructure)); - } - - Ok(()) - } - - fn _received_revealed_attrs(proof: &Proof) -> IndyResult> { - let mut revealed_identifiers: HashMap = HashMap::new(); - for (referent, info) in proof.requested_proof.revealed_attrs.iter() { - revealed_identifiers.insert( - referent.to_string(), - VerifierService::_get_proof_identifier(proof, info.sub_proof_index)?, - ); - } - for (referent, infos) in proof.requested_proof.revealed_attr_groups.iter() { - revealed_identifiers.insert( - referent.to_string(), - VerifierService::_get_proof_identifier(proof, infos.sub_proof_index)?, - ); - } - Ok(revealed_identifiers) - } - - fn _received_unrevealed_attrs(proof: &Proof) -> IndyResult> { - let mut unrevealed_identifiers: HashMap = HashMap::new(); - for (referent, info) in proof.requested_proof.unrevealed_attrs.iter() { - unrevealed_identifiers.insert( - referent.to_string(), - VerifierService::_get_proof_identifier(proof, info.sub_proof_index)?, - ); - } - Ok(unrevealed_identifiers) - } - - fn _received_predicates(proof: &Proof) -> IndyResult> { - let mut predicate_identifiers: HashMap = HashMap::new(); - for (referent, info) in proof.requested_proof.predicates.iter() { - predicate_identifiers.insert( - referent.to_string(), - VerifierService::_get_proof_identifier(proof, info.sub_proof_index)?, - ); - } - Ok(predicate_identifiers) - } - - fn _received_self_attested_attrs(proof: &Proof) -> HashSet { - proof - .requested_proof - .self_attested_attrs - .keys() - .cloned() - .collect() - } - - fn _get_proof_identifier(proof: &Proof, index: u32) -> IndyResult { - proof - .identifiers - .get(index as usize) - .cloned() - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - format!("Identifier not found for index: {}", index), - ) - }) - } - - fn _verify_revealed_attribute_values( - proof_req: &ProofRequestPayload, - proof: &Proof, - ) -> IndyResult<()> { - for (attr_referent, attr_info) in proof.requested_proof.revealed_attrs.iter() { - let attr_name = proof_req - .requested_attributes - .get(attr_referent) - .as_ref() - .ok_or(IndyError::from_msg( - IndyErrorKind::ProofRejected, - format!( - "Attribute with referent \"{}\" not found in ProofRequests", - attr_referent - ), - ))? - .name - .as_ref() - .ok_or(IndyError::from_msg( - IndyErrorKind::ProofRejected, - format!( - "Attribute with referent \"{}\" not found in ProofRequests", - attr_referent - ), - ))?; - VerifierService::_verify_revealed_attribute_value( - attr_name.as_str(), - proof, - &attr_info, - )?; - } - - for (attr_referent, attr_infos) in proof.requested_proof.revealed_attr_groups.iter() { - let attr_names = proof_req - .requested_attributes - .get(attr_referent) - .as_ref() - .ok_or(IndyError::from_msg( - IndyErrorKind::ProofRejected, - format!( - "Attribute with referent \"{}\" not found in ProofRequests", - attr_referent - ), - ))? - .names - .as_ref() - .ok_or(IndyError::from_msg( - IndyErrorKind::ProofRejected, - format!( - "Attribute with referent \"{}\" not found in ProofRequests", - attr_referent - ), - ))?; - if attr_infos.values.len() != attr_names.len() { - error!("Proof Revealed Attr Group does not match Proof Request Attribute Group, proof request attrs: {:?}, referent: {:?}, attr_infos: {:?}", proof_req.requested_attributes, attr_referent, attr_infos); - return Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Proof Revealed Attr Group does not match Proof Request Attribute Group", - )); - } - for attr_name in attr_names { - let attr_info = &attr_infos.values.get(attr_name).ok_or(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Proof Revealed Attr Group does not match Proof Request Attribute Group", - ))?; - VerifierService::_verify_revealed_attribute_value( - attr_name, - proof, - &RevealedAttributeInfo { - sub_proof_index: attr_infos.sub_proof_index, - raw: attr_info.raw.clone(), - encoded: attr_info.encoded.clone(), - }, - )?; - } - } - Ok(()) - } - - fn _verify_revealed_attribute_value( - attr_name: &str, - proof: &Proof, - attr_info: &RevealedAttributeInfo, - ) -> IndyResult<()> { - let reveal_attr_encoded = &attr_info.encoded; - let sub_proof_index = attr_info.sub_proof_index as usize; - - let crypto_proof_encoded = proof - .proof - .proofs - .get(sub_proof_index) - .ok_or(IndyError::from_msg( - IndyErrorKind::ProofRejected, - format!("CryptoProof not found by index \"{}\"", sub_proof_index), - ))? - .revealed_attrs()? - .iter() - .find(|(key, _)| { - AnoncredsHelpers::attr_common_view(attr_name) - == AnoncredsHelpers::attr_common_view(&key) - }) - .map(|(_, val)| val.to_string()) - .ok_or(IndyError::from_msg( - IndyErrorKind::ProofRejected, - format!( - "Attribute with name \"{}\" not found in CryptoProof", - attr_name - ), - ))?; - - if BigNumber::from_dec(reveal_attr_encoded)? != BigNumber::from_dec(&crypto_proof_encoded)? - { - return Err(IndyError::from_msg(IndyErrorKind::ProofRejected, - format!("Encoded Values for \"{}\" are different in RequestedProof \"{}\" and CryptoProof \"{}\"", attr_name, reveal_attr_encoded, crypto_proof_encoded))); - } - - Ok(()) - } - - fn _verify_requested_restrictions( - proof_req: &ProofRequestPayload, - requested_proof: &RequestedProof, - received_revealed_attrs: &HashMap, - received_unrevealed_attrs: &HashMap, - received_predicates: &HashMap, - self_attested_attrs: &HashSet, - ) -> IndyResult<()> { - let proof_attr_identifiers: HashMap = received_revealed_attrs - .iter() - .chain(received_unrevealed_attrs) - .map(|(r, id)| (r.to_string(), id.clone())) - .collect(); - - let requested_attrs: HashMap = proof_req - .requested_attributes - .iter() - .filter(|&(referent, info)| { - !VerifierService::_is_self_attested(&referent, &info, self_attested_attrs) - }) - .map(|(referent, info)| (referent.to_string(), info.clone())) - .collect(); - - for (referent, info) in requested_attrs.clone() { - if let Some(ref query) = info.restrictions { - let filter = - VerifierService::_gather_filter_info(&referent, &proof_attr_identifiers)?; - - let name_value_map: HashMap> = if let Some(name) = info.name { - let mut map = HashMap::new(); - map.insert( - name.clone(), - requested_proof - .revealed_attrs - .get(&referent) - .map(|attr| attr.raw.as_str()), - ); - map - } else if let Some(names) = info.names { - let mut map = HashMap::new(); - let attrs = requested_proof.revealed_attr_groups.get(&referent).ok_or( - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "Proof does not have referent from proof request", - ), - )?; - for name in names { - let val = attrs.values.get(&name).map(|attr| attr.raw.as_str()); - map.insert(name, val); - } - map - } else { - error!( - r#"Proof Request attribute restriction should contain "name" or "names" param. Current proof request: {:?}"#, - proof_req - ); - return Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - r#"Proof Request attribute restriction should contain "name" or "names" param"#, - )); - }; - - VerifierService::_do_process_operator(&name_value_map, &query, &filter).map_err( - |err| { - err.extend(format!( - "Requested restriction validation failed for \"{:?}\" attributes", - &name_value_map - )) - }, - )?; - } - } - - for (referent, info) in proof_req.requested_predicates.iter() { - if let Some(ref query) = info.restrictions { - let filter = VerifierService::_gather_filter_info(&referent, received_predicates)?; - - // start with the predicate requested attribute, which is un-revealed - let mut attr_value_map = HashMap::new(); - attr_value_map.insert(info.name.to_string(), None); - - // include any revealed attributes for the same credential (based on sub_proof_index) - let pred_sub_proof_index = requested_proof - .predicates - .get(referent) - .unwrap() - .sub_proof_index; - for attr_referent in requested_proof.revealed_attrs.keys() { - let attr_info = requested_proof.revealed_attrs.get(attr_referent).unwrap(); - let attr_sub_proof_index = attr_info.sub_proof_index; - if pred_sub_proof_index == attr_sub_proof_index { - let attr_name = requested_attrs.get(attr_referent).unwrap().name.clone(); - if let Some(name) = attr_name { - attr_value_map.insert(name, Some(attr_info.raw.as_str())); - } - } - } - for attr_referent in requested_proof.revealed_attr_groups.keys() { - let attr_info = requested_proof - .revealed_attr_groups - .get(attr_referent) - .unwrap(); - let attr_sub_proof_index = attr_info.sub_proof_index; - if pred_sub_proof_index == attr_sub_proof_index { - for name in attr_info.values.keys() { - let raw_val = attr_info.values.get(name).unwrap().raw.as_str(); - attr_value_map.insert(name.clone(), Some(raw_val)); - } - } - } - - VerifierService::_do_process_operator(&attr_value_map, &query, &filter).map_err( - |err| { - err.extend(format!( - "Requested restriction validation failed for \"{}\" predicate", - &info.name - )) - }, - )?; - - // old style :-/ which fails for attribute restrictions on predicates - //VerifierService::_process_operator(&info.name, &query, &filter, None) - // .map_err(|err| err.extend(format!("Requested restriction validation failed for \"{}\" predicate", &info.name)))?; - } - } - - Ok(()) - } - - fn _is_self_attested( - referent: &str, - info: &AttributeInfo, - self_attested_attrs: &HashSet, - ) -> bool { - match info.restrictions.as_ref() { - Some(&Query::And(ref array)) | Some(&Query::Or(ref array)) if array.is_empty() => { - self_attested_attrs.contains(referent) - } - None => self_attested_attrs.contains(referent), - Some(_) => false, - } - } - - fn _gather_filter_info( - referent: &str, - identifiers: &HashMap, - ) -> IndyResult { - let identifier = identifiers.get(referent).ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidState, - format!("Identifier not found for referent: {}", referent), - ) - })?; - - let (schema_issuer_did, schema_name, schema_version) = - identifier.schema_id.parts().ok_or(IndyError::from_msg( - IndyErrorKind::InvalidState, - format!( - "Invalid Schema ID `{}`: wrong number of parts", - identifier.schema_id.0 - ), - ))?; - - let issuer_did = identifier - .cred_def_id - .issuer_did() - .ok_or(IndyError::from_msg( - IndyErrorKind::InvalidState, - format!( - "Invalid Credential Definition ID `{}`: wrong number of parts", - identifier.cred_def_id.0 - ), - ))?; - - Ok(Filter { - schema_id: identifier.schema_id.0.to_string(), - schema_name, - schema_issuer_did: schema_issuer_did.0, - schema_version, - cred_def_id: identifier.cred_def_id.0.to_string(), - issuer_did: issuer_did.0, - }) - } - - fn _process_operator( - attr: &str, - restriction_op: &Query, - filter: &Filter, - revealed_value: Option<&str>, - ) -> IndyResult<()> { - let mut attr_value_map = HashMap::new(); - attr_value_map.insert(attr.to_string(), revealed_value); - VerifierService::_do_process_operator(&attr_value_map, restriction_op, filter) - } - - fn _do_process_operator( - attr_value_map: &HashMap>, - restriction_op: &Query, - filter: &Filter, - ) -> IndyResult<()> { - match restriction_op { - Query::Eq(ref tag_name, ref tag_value) => { - VerifierService::_process_filter(attr_value_map, &tag_name, &tag_value, filter) - .map_err(|err| { - err.extend(format!( - "$eq operator validation failed for tag: \"{}\", value: \"{}\"", - tag_name, tag_value - )) - }) - } - Query::Neq(ref tag_name, ref tag_value) => { - if VerifierService::_process_filter(attr_value_map, &tag_name, &tag_value, filter) - .is_err() - { - Ok(()) - } else { - Err(IndyError::from_msg(IndyErrorKind::ProofRejected, - format!("$neq operator validation failed for tag: \"{}\", value: \"{}\". Condition was passed.", tag_name, tag_value))) - } - } - Query::In(ref tag_name, ref tag_values) => { - let res = tag_values.iter().any(|val| { - VerifierService::_process_filter(attr_value_map, &tag_name, &val, filter) - .is_ok() - }); - if res { - Ok(()) - } else { - Err(IndyError::from_msg( - IndyErrorKind::ProofRejected, - format!( - "$in operator validation failed for tag: \"{}\", values \"{:?}\".", - tag_name, tag_values - ), - )) - } - } - Query::And(ref operators) => operators - .iter() - .map(|op| VerifierService::_do_process_operator(attr_value_map, op, filter)) - .collect::>>() - .map(|_| ()) - .map_err(|err| err.extend("$and operator validation failed.")), - Query::Or(ref operators) => { - let res = operators.iter().any(|op| { - VerifierService::_do_process_operator(attr_value_map, op, filter).is_ok() - }); - if res { - Ok(()) - } else { - Err(IndyError::from_msg( - IndyErrorKind::ProofRejected, - "$or operator validation failed. All conditions were failed.", - )) - } - } - Query::Not(ref operator) => { - if VerifierService::_do_process_operator(attr_value_map, &*operator, filter) - .is_err() - { - Ok(()) - } else { - Err(IndyError::from_msg( - IndyErrorKind::ProofRejected, - "$not operator validation failed. All conditions were passed.", - )) - } - } - _ => Err(IndyError::from_msg( - IndyErrorKind::ProofRejected, - "unsupported operator", - )), - } - } - - fn _process_filter( - attr_value_map: &HashMap>, - tag: &str, - tag_value: &str, - filter: &Filter, - ) -> IndyResult<()> { - trace!( - "_process_filter: attr_value_map: {:?}, tag: {}, tag_value: {}, filter: {:?}", - attr_value_map, - tag, - tag_value, - filter - ); - match tag { - tag_ @ "schema_id" => { - VerifierService::_precess_filed(tag_, &filter.schema_id, tag_value) - } - tag_ @ "schema_issuer_did" => { - VerifierService::_precess_filed(tag_, &filter.schema_issuer_did, tag_value) - } - tag_ @ "schema_name" => { - VerifierService::_precess_filed(tag_, &filter.schema_name, tag_value) - } - tag_ @ "schema_version" => { - VerifierService::_precess_filed(tag_, &filter.schema_version, tag_value) - } - tag_ @ "cred_def_id" => { - VerifierService::_precess_filed(tag_, &filter.cred_def_id, tag_value) - } - tag_ @ "issuer_did" => { - VerifierService::_precess_filed(tag_, &filter.issuer_did, tag_value) - } - x if VerifierService::_is_attr_with_revealed_value(x, attr_value_map) => { - // attr::::value -> check revealed value - VerifierService::_check_internal_tag_revealed_value(x, tag_value, attr_value_map) - } - x if VerifierService::_is_attr_marker_operator(x) => { - // attr::::marker -> ok - Ok(()) - } - _ => Err(err_msg( - IndyErrorKind::InvalidStructure, - "Unknown Filter Type", - )), - } - } - - fn _precess_filed(filed: &str, filter_value: &str, tag_value: &str) -> IndyResult<()> { - if filter_value == tag_value { - Ok(()) - } else { - Err(IndyError::from_msg( - IndyErrorKind::ProofRejected, - format!( - "\"{}\" values are different: expected: \"{}\", actual: \"{}\"", - filed, tag_value, filter_value - ), - )) - } - } - - pub(crate) fn attr_request_by_value(key: &str) -> Option<&str> { - VALUE_TAG_MATCHER - .captures(key) - .and_then(|caps| caps.get(1).map(|s| s.as_str())) - } - - pub(crate) fn attr_request_by_marker(key: &str) -> Option<&str> { - MARKER_TAG_MATCHER - .captures(key) - .and_then(|caps| caps.get(1).map(|s| s.as_str())) - } - - fn _is_attr_with_revealed_value( - key: &str, - attr_value_map: &HashMap>, - ) -> bool { - VALUE_TAG_MATCHER - .captures(key) - .map(|caps| { - caps.get(1) - .map(|s| { - attr_value_map.keys().any(|key| { - AnoncredsHelpers::attr_common_view(key) - == AnoncredsHelpers::attr_common_view(s.as_str()) - }) - }) - .unwrap_or(false) - }) - .unwrap_or(false) - } - - fn _check_internal_tag_revealed_value( - key: &str, - tag_value: &str, - attr_value_map: &HashMap>, - ) -> IndyResult<()> { - let captures = VALUE_TAG_MATCHER.captures(key).ok_or(IndyError::from_msg( - IndyErrorKind::InvalidState, - format!("Attribute name became unparseable"), - ))?; - - let attr_name = captures - .get(1) - .ok_or(IndyError::from_msg( - IndyErrorKind::InvalidState, - format!("No name has been parsed"), - ))? - .as_str(); - - let revealed_value = attr_value_map.iter().find(|(key, _)| { - AnoncredsHelpers::attr_common_view(key) == AnoncredsHelpers::attr_common_view(attr_name) - }); - - if let Some((_key, Some(revealed_value))) = revealed_value { - if *revealed_value != tag_value { - return Err(IndyError::from_msg( - IndyErrorKind::ProofRejected, - format!( - "\"{}\" values are different: expected: \"{}\", actual: \"{}\"", - key, tag_value, revealed_value - ), - )); - } - } else { - return Err(IndyError::from_msg(IndyErrorKind::ProofRejected, - format!("Revealed value hasn't been find by key: expected key: \"{}\", attr_value_map: \"{:?}\"", key, attr_value_map))); - } - Ok(()) - } - - fn _is_attr_marker_operator(key: &str) -> bool { - MARKER_TAG_MATCHER.is_match(key) - } - - fn _is_attr_value_operator(key: &str) -> bool { - VALUE_TAG_MATCHER.is_match(key) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - pub(crate) const SCHEMA_ID: &str = "123"; - pub(crate) const SCHEMA_NAME: &str = "Schema Name"; - pub(crate) const SCHEMA_ISSUER_DID: &str = "234"; - pub(crate) const SCHEMA_VERSION: &str = "1.2.3"; - pub(crate) const CRED_DEF_ID: &str = "345"; - pub(crate) const ISSUER_DID: &str = "456"; - - fn schema_id_tag() -> String { - "schema_id".to_string() - } - - fn schema_name_tag() -> String { - "schema_name".to_string() - } - - fn schema_issuer_did_tag() -> String { - "schema_issuer_did".to_string() - } - - fn schema_version_tag() -> String { - "schema_version".to_string() - } - - fn cred_def_id_tag() -> String { - "cred_def_id".to_string() - } - - fn issuer_did_tag() -> String { - "issuer_did".to_string() - } - - fn attr_tag() -> String { - "attr::zip::marker".to_string() - } - - fn attr_tag_value() -> String { - "attr::zip::value".to_string() - } - - fn bad_attr_tag() -> String { - "bad::zip::marker".to_string() - } - - fn filter() -> Filter { - Filter { - schema_id: SCHEMA_ID.to_string(), - schema_name: SCHEMA_NAME.to_string(), - schema_issuer_did: SCHEMA_ISSUER_DID.to_string(), - schema_version: SCHEMA_VERSION.to_string(), - cred_def_id: CRED_DEF_ID.to_string(), - issuer_did: ISSUER_DID.to_string(), - } - } - - #[test] - fn test_process_op_eq() { - let filter = filter(); - - let mut op = Query::Eq(schema_id_tag(), SCHEMA_ID.to_string()); - VerifierService::_process_operator("zip", &op, &filter, None).unwrap(); - - op = Query::And(vec![ - Query::Eq(attr_tag(), "1".to_string()), - Query::Eq(schema_id_tag(), SCHEMA_ID.to_string()), - ]); - VerifierService::_process_operator("zip", &op, &filter, None).unwrap(); - - op = Query::And(vec![ - Query::Eq(bad_attr_tag(), "1".to_string()), - Query::Eq(schema_id_tag(), SCHEMA_ID.to_string()), - ]); - - assert!(VerifierService::_process_operator("zip", &op, &filter, None).is_err()); - - op = Query::Eq(schema_id_tag(), "NOT HERE".to_string()); - - assert!(VerifierService::_process_operator("zip", &op, &filter, None).is_err()); - } - - #[test] - fn test_process_op_ne() { - let filter = filter(); - let mut op = Query::Neq(schema_id_tag(), SCHEMA_ID.to_string()); - assert!(VerifierService::_process_operator("zip", &op, &filter, None).is_err()); - - op = Query::Neq(schema_id_tag(), "NOT HERE".to_string()); - VerifierService::_process_operator("zip", &op, &filter, None).unwrap() - } - - #[test] - fn test_process_op_in() { - let filter = filter(); - let mut cred_def_ids = vec!["Not Here".to_string()]; - - let mut op = Query::In(cred_def_id_tag(), cred_def_ids.clone()); - assert!(VerifierService::_process_operator("zip", &op, &filter, None).is_err()); - - cred_def_ids.push(CRED_DEF_ID.to_string()); - op = Query::In(cred_def_id_tag(), cred_def_ids.clone()); - VerifierService::_process_operator("zip", &op, &filter, None).unwrap() - } - - #[test] - fn test_process_op_or() { - let filter = filter(); - let mut op = Query::Or(vec![ - Query::Eq(schema_id_tag(), "Not Here".to_string()), - Query::Eq(cred_def_id_tag(), "Not Here".to_string()), - ]); - assert!(VerifierService::_process_operator("zip", &op, &filter, None).is_err()); - - op = Query::Or(vec![ - Query::Eq(schema_id_tag(), SCHEMA_ID.to_string()), - Query::Eq(cred_def_id_tag(), "Not Here".to_string()), - ]); - VerifierService::_process_operator("zip", &op, &filter, None).unwrap() - } - - #[test] - fn test_process_op_and() { - let filter = filter(); - let mut op = Query::And(vec![ - Query::Eq(schema_id_tag(), "Not Here".to_string()), - Query::Eq(cred_def_id_tag(), "Not Here".to_string()), - ]); - assert!(VerifierService::_process_operator("zip", &op, &filter, None).is_err()); - - op = Query::And(vec![ - Query::Eq(schema_id_tag(), SCHEMA_ID.to_string()), - Query::Eq(cred_def_id_tag(), "Not Here".to_string()), - ]); - assert!(VerifierService::_process_operator("zip", &op, &filter, None).is_err()); - - op = Query::And(vec![ - Query::Eq(schema_id_tag(), SCHEMA_ID.to_string()), - Query::Eq(cred_def_id_tag(), CRED_DEF_ID.to_string()), - ]); - VerifierService::_process_operator("zip", &op, &filter, None).unwrap() - } - - #[test] - fn test_process_op_not() { - let filter = filter(); - let mut op = Query::Not(Box::new(Query::And(vec![ - Query::Eq(schema_id_tag(), SCHEMA_ID.to_string()), - Query::Eq(cred_def_id_tag(), CRED_DEF_ID.to_string()), - ]))); - assert!(VerifierService::_process_operator("zip", &op, &filter, None).is_err()); - - op = Query::Not(Box::new(Query::And(vec![ - Query::Eq(schema_id_tag(), "Not Here".to_string()), - Query::Eq(cred_def_id_tag(), "Not Here".to_string()), - ]))); - VerifierService::_process_operator("zip", &op, &filter, None).unwrap() - } - - #[test] - fn test_proccess_op_or_with_nested_and() { - let filter = filter(); - let mut op = Query::Or(vec![ - Query::And(vec![ - Query::Eq(schema_id_tag(), "Not Here".to_string()), - Query::Eq(cred_def_id_tag(), "Not Here".to_string()), - ]), - Query::And(vec![ - Query::Eq(schema_issuer_did_tag(), "Not Here".to_string()), - Query::Eq(schema_name_tag(), "Not Here".to_string()), - ]), - Query::And(vec![ - Query::Eq(schema_name_tag(), "Not Here".to_string()), - Query::Eq(issuer_did_tag(), "Not Here".to_string()), - ]), - ]); - assert!(VerifierService::_process_operator("zip", &op, &filter, None).is_err()); - - op = Query::Or(vec![ - Query::And(vec![ - Query::Eq(schema_id_tag(), SCHEMA_ID.to_string()), - Query::Eq(cred_def_id_tag(), "Not Here".to_string()), - ]), - Query::And(vec![ - Query::Eq(schema_issuer_did_tag(), "Not Here".to_string()), - Query::Eq(schema_name_tag(), "Not Here".to_string()), - ]), - Query::And(vec![ - Query::Eq(schema_name_tag(), "Not Here".to_string()), - Query::Eq(issuer_did_tag(), "Not Here".to_string()), - ]), - ]); - assert!(VerifierService::_process_operator("zip", &op, &filter, None).is_err()); - - op = Query::Or(vec![ - Query::And(vec![ - Query::Eq(schema_id_tag(), SCHEMA_ID.to_string()), - Query::Eq(cred_def_id_tag(), CRED_DEF_ID.to_string()), - ]), - Query::And(vec![ - Query::Eq(schema_issuer_did_tag(), "Not Here".to_string()), - Query::Eq(schema_name_tag(), "Not Here".to_string()), - ]), - Query::And(vec![ - Query::Eq(schema_name_tag(), "Not Here".to_string()), - Query::Eq(issuer_did_tag(), "Not Here".to_string()), - ]), - ]); - VerifierService::_process_operator("zip", &op, &filter, None).unwrap() - } - - #[test] - fn test_verify_op_complex_nested() { - let filter = filter(); - let mut op = Query::And(vec![ - Query::And(vec![ - Query::Or(vec![ - Query::Eq(schema_name_tag(), "Not Here".to_string()), - Query::Eq(issuer_did_tag(), "Not Here".to_string()), - ]), - Query::Eq(schema_id_tag(), SCHEMA_ID.to_string()), - Query::Eq(cred_def_id_tag(), CRED_DEF_ID.to_string()), - ]), - Query::And(vec![ - Query::Eq(schema_issuer_did_tag(), SCHEMA_ISSUER_DID.to_string()), - Query::Eq(schema_name_tag(), SCHEMA_NAME.to_string()), - ]), - Query::And(vec![ - Query::Eq(schema_version_tag(), SCHEMA_VERSION.to_string()), - Query::Eq(issuer_did_tag(), ISSUER_DID.to_string()), - ]), - ]); - assert!(VerifierService::_process_operator("zip", &op, &filter, None).is_err()); - - op = Query::And(vec![ - Query::And(vec![ - Query::Or(vec![ - Query::Eq(schema_name_tag(), SCHEMA_NAME.to_string()), - Query::Eq(issuer_did_tag(), "Not Here".to_string()), - ]), - Query::Eq(schema_id_tag(), SCHEMA_ID.to_string()), - Query::Eq(cred_def_id_tag(), CRED_DEF_ID.to_string()), - ]), - Query::And(vec![ - Query::Eq(schema_issuer_did_tag(), SCHEMA_ISSUER_DID.to_string()), - Query::Eq(schema_name_tag(), SCHEMA_NAME.to_string()), - ]), - Query::And(vec![ - Query::Eq(schema_version_tag(), SCHEMA_VERSION.to_string()), - Query::Eq(issuer_did_tag(), ISSUER_DID.to_string()), - ]), - Query::Not(Box::new(Query::Eq( - schema_version_tag(), - "NOT HERE".to_string(), - ))), - ]); - VerifierService::_process_operator("zip", &op, &filter, None).unwrap(); - - op = Query::And(vec![ - Query::And(vec![ - Query::Or(vec![ - Query::Eq(schema_name_tag(), SCHEMA_NAME.to_string()), - Query::Eq(issuer_did_tag(), "Not Here".to_string()), - ]), - Query::Eq(schema_id_tag(), SCHEMA_ID.to_string()), - Query::Eq(cred_def_id_tag(), CRED_DEF_ID.to_string()), - ]), - Query::And(vec![ - Query::Eq(schema_issuer_did_tag(), SCHEMA_ISSUER_DID.to_string()), - Query::Eq(schema_name_tag(), SCHEMA_NAME.to_string()), - ]), - Query::And(vec![ - Query::Eq(schema_version_tag(), SCHEMA_VERSION.to_string()), - Query::Eq(issuer_did_tag(), ISSUER_DID.to_string()), - ]), - Query::Not(Box::new(Query::Eq( - schema_version_tag(), - SCHEMA_VERSION.to_string(), - ))), - ]); - assert!(VerifierService::_process_operator("zip", &op, &filter, None).is_err()); - } - - #[test] - fn test_process_op_eq_revealed_value() { - let filter = filter(); - let value = "value"; - - let mut op = Query::Eq(attr_tag_value(), value.to_string()); - VerifierService::_process_operator("zip", &op, &filter, Some(value)).unwrap(); - - op = Query::And(vec![ - Query::Eq(attr_tag_value(), value.to_string()), - Query::Eq(schema_issuer_did_tag(), SCHEMA_ISSUER_DID.to_string()), - ]); - VerifierService::_process_operator("zip", &op, &filter, Some(value)).unwrap(); - - op = Query::Eq(attr_tag_value(), value.to_string()); - assert!(VerifierService::_process_operator("zip", &op, &filter, Some("NOT HERE")).is_err()); - } - - #[test] - fn test_process_op_eq_revealed_value_case_insensitive() { - let filter = filter(); - let value = "Alice Clark"; - - let mut op = Query::Eq("attr::givenname::value".to_string(), value.to_string()); - VerifierService::_process_operator("Given Name", &op, &filter, Some(value)).unwrap(); - - op = Query::And(vec![ - Query::Eq("attr::givenname::value".to_string(), value.to_string()), - Query::Eq(schema_issuer_did_tag(), SCHEMA_ISSUER_DID.to_string()), - ]); - VerifierService::_process_operator("Given Name", &op, &filter, Some(value)).unwrap(); - } - - fn _received() -> HashMap { - let mut res: HashMap = HashMap::new(); - res.insert( - "referent_1".to_string(), - Identifier { - timestamp: Some(1234), - schema_id: SchemaId(String::new()), - cred_def_id: CredentialDefinitionId(String::new()), - rev_reg_id: Some(RevocationRegistryId(String::new())), - }, - ); - res.insert( - "referent_2".to_string(), - Identifier { - timestamp: None, - schema_id: SchemaId(String::new()), - cred_def_id: CredentialDefinitionId(String::new()), - rev_reg_id: Some(RevocationRegistryId(String::new())), - }, - ); - res - } - - fn _interval() -> NonRevocedInterval { - NonRevocedInterval { - from: None, - to: Some(1234), - } - } - - #[test] - fn validate_timestamp_works() { - VerifierService::_validate_timestamp(&_received(), "referent_1", &None, &None).unwrap(); - VerifierService::_validate_timestamp(&_received(), "referent_1", &Some(_interval()), &None) - .unwrap(); - VerifierService::_validate_timestamp(&_received(), "referent_1", &None, &Some(_interval())) - .unwrap(); - } - - #[test] - fn validate_timestamp_not_work() { - VerifierService::_validate_timestamp(&_received(), "referent_2", &Some(_interval()), &None) - .unwrap_err(); - VerifierService::_validate_timestamp(&_received(), "referent_2", &None, &Some(_interval())) - .unwrap_err(); - VerifierService::_validate_timestamp(&_received(), "referent_3", &None, &Some(_interval())) - .unwrap_err(); - } -} diff --git a/libvdrtools/src/services/blob_storage/default_reader.rs b/libvdrtools/src/services/blob_storage/default_reader.rs deleted file mode 100644 index 456d5ff091..0000000000 --- a/libvdrtools/src/services/blob_storage/default_reader.rs +++ /dev/null @@ -1,93 +0,0 @@ -use std::{ - fs::File as SyncFile, - io::{Read, Seek, SeekFrom}, - path::PathBuf, -}; - -use crate::utils::crypto::base58::ToBase58; -use async_trait::async_trait; -use indy_api_types::errors::prelude::*; -use indy_utils::crypto::hash::Hash; -use serde_json; - -use super::{ReadableBlob, Reader, ReaderType}; - -pub(crate) struct DefaultReader { - file: SyncFile, - hash: Vec, -} - -#[derive(Serialize, Deserialize)] -struct DefaultReaderConfig { - base_dir: String, -} - -#[async_trait] -impl ReaderType for DefaultReaderType { - async fn open(&self, config: &str) -> IndyResult> { - let config: DefaultReaderConfig = serde_json::from_str(config).to_indy( - IndyErrorKind::InvalidStructure, - "Can't deserialize DefaultReaderConfig", - )?; - - Ok(Box::new(config)) - } -} - -#[async_trait] -impl Reader for DefaultReaderConfig { - async fn open(&self, hash: &[u8], _location: &str) -> IndyResult> { - let mut path = PathBuf::from(&self.base_dir); - path.push(hash.to_base58()); - - let file = SyncFile::open(path)?; - - Ok(Box::new(DefaultReader { - file, - hash: hash.to_owned(), - })) - } -} - -#[async_trait] -impl ReadableBlob for DefaultReader { - async fn verify(&mut self) -> IndyResult { - self.file.seek(SeekFrom::Start(0))?; - let mut hasher = Hash::new_context()?; - let mut buf = [0u8; 1024]; - - loop { - let sz = self.file.read(&mut buf)?; - - if sz == 0 { - return Ok(hasher.finish()?.to_vec().eq(&self.hash)); - } - - hasher.update(&buf[0..sz])?; - } - } - - fn close(&self) -> IndyResult<()> { - /* nothing to do */ - Ok(()) - } - - fn read(&mut self, size: usize, offset: usize) -> IndyResult> { - let mut buf = vec![0u8; size]; - - self.file.seek(SeekFrom::Start(offset as u64))?; - let act_size = self.file.read(buf.as_mut_slice())?; - - buf.truncate(act_size); - - Ok(buf) - } -} - -pub(crate) struct DefaultReaderType {} - -impl DefaultReaderType { - pub(crate) fn new() -> Self { - DefaultReaderType {} - } -} diff --git a/libvdrtools/src/services/blob_storage/default_writer.rs b/libvdrtools/src/services/blob_storage/default_writer.rs deleted file mode 100644 index fafab7acf1..0000000000 --- a/libvdrtools/src/services/blob_storage/default_writer.rs +++ /dev/null @@ -1,114 +0,0 @@ -use std::path::PathBuf; - -use crate::utils::crypto::base58::ToBase58; -use async_std::{fs, fs::File, prelude::*}; -use async_trait::async_trait; -use indy_api_types::errors::prelude::*; -use serde_json; - -use crate::utils::environment; - -use super::{WritableBlob, Writer, WriterType}; - -#[allow(dead_code)] -pub(crate) struct DefaultWriter { - base_dir: PathBuf, - uri_pattern: String, - file: File, - id: i32, -} - -#[derive(Serialize, Deserialize)] -struct DefaultWriterConfig { - base_dir: String, - uri_pattern: String, -} - -#[async_trait] -impl WriterType for DefaultWriterType { - async fn open(&self, config: &str) -> IndyResult> { - let config: DefaultWriterConfig = serde_json::from_str(config).to_indy( - IndyErrorKind::InvalidStructure, - "Can't deserialize DefaultWriterConfig", - )?; - - Ok(Box::new(config)) - } -} - -#[async_trait] -impl Writer for DefaultWriterConfig { - async fn create(&self, id: i32) -> IndyResult> { - let path = PathBuf::from(&self.base_dir); - - fs::DirBuilder::new() - .recursive(true) - .create(tmp_storage_file(id).parent().unwrap()) - .await?; - - let file = File::create(tmp_storage_file(id)) - .await - .map_err(map_err_trace!())?; - - Ok(Box::new(DefaultWriter { - base_dir: path, - uri_pattern: self.uri_pattern.clone(), - file, - id, - })) - } -} - -#[async_trait] -impl WritableBlob for DefaultWriter { - async fn append(&mut self, bytes: &[u8]) -> IndyResult { - trace!("append >>>"); - - self.file.write_all(bytes).await.map_err(map_err_trace!())?; - - let res = bytes.len(); - trace!("append <<< {}", res); - Ok(res) - } - - async fn finalize(&mut self, hash: &[u8]) -> IndyResult { - trace!("finalize >>>"); - - self.file.flush().await.map_err(map_err_trace!())?; - self.file.sync_all().await.map_err(map_err_trace!())?; - - let mut path = self.base_dir.clone(); - path.push(hash.to_base58()); - - fs::DirBuilder::new() - .recursive(true) - .create(path.parent().unwrap()) - .await - .map_err(map_err_trace!(format!("path: {:?}", path)))?; - - fs::copy(&tmp_storage_file(self.id), &path) - .await - .map_err(map_err_trace!())?; //FIXME - - fs::remove_file(&tmp_storage_file(self.id)) - .await - .map_err(map_err_trace!())?; - - let res = path.to_str().unwrap().to_owned(); - - trace!("finalize <<< {}", res); - Ok(res) - } -} - -fn tmp_storage_file(id: i32) -> PathBuf { - environment::tmp_file_path(&format!("def_storage_tmp_{}", id)) -} - -pub(crate) struct DefaultWriterType {} - -impl DefaultWriterType { - pub fn new() -> Self { - DefaultWriterType {} - } -} diff --git a/libvdrtools/src/services/blob_storage/mod.rs b/libvdrtools/src/services/blob_storage/mod.rs deleted file mode 100644 index 8009f7ccc2..0000000000 --- a/libvdrtools/src/services/blob_storage/mod.rs +++ /dev/null @@ -1,272 +0,0 @@ -mod default_reader; -mod default_writer; - -use std::{collections::HashMap, sync::Mutex as SyncMutex}; - -use async_trait::async_trait; -use futures::lock::Mutex; -use indy_api_types::errors::prelude::*; -use indy_utils::sequence; - -use sha2::{ - digest::{FixedOutput, Update}, - Sha256, -}; - -#[async_trait] -trait WriterType: Send + Sync { - async fn open(&self, config: &str) -> IndyResult>; -} - -#[async_trait] -trait Writer: Send + Sync { - async fn create(&self, id: i32) -> IndyResult>; -} - -#[async_trait] -trait WritableBlob: Send + Sync { - async fn append(&mut self, bytes: &[u8]) -> IndyResult; - async fn finalize(&mut self, hash: &[u8]) -> IndyResult; -} - -#[async_trait] -trait ReaderType: Send + Sync { - async fn open(&self, config: &str) -> IndyResult>; -} - -#[async_trait] -trait Reader: Send + Sync { - async fn open(&self, hash: &[u8], location: &str) -> IndyResult>; -} - -#[async_trait] -trait ReadableBlob: Send + Sync { - fn read(&mut self, size: usize, offset: usize) -> IndyResult>; - async fn verify(&mut self) -> IndyResult; - fn close(&self) -> IndyResult<()>; -} - -pub struct BlobStorageService { - writer_types: Mutex>>, - writer_configs: Mutex>>, - writer_blobs: Mutex, Sha256)>>, - - reader_types: Mutex>>, - reader_configs: Mutex>>, - reader_blobs: SyncMutex>>, -} - -impl BlobStorageService { - pub(crate) fn new() -> BlobStorageService { - let mut writer_types: HashMap> = HashMap::new(); - writer_types.insert( - "default".to_owned(), - Box::new(default_writer::DefaultWriterType::new()), - ); - - let mut reader_types: HashMap> = HashMap::new(); - reader_types.insert( - "default".to_owned(), - Box::new(default_reader::DefaultReaderType::new()), - ); - - BlobStorageService { - writer_types: Mutex::new(writer_types), - writer_configs: Mutex::new(HashMap::new()), - writer_blobs: Mutex::new(HashMap::new()), - - reader_types: Mutex::new(reader_types), - reader_configs: Mutex::new(HashMap::new()), - reader_blobs: SyncMutex::new(HashMap::new()), - } - } -} - -/* Writer */ -impl BlobStorageService { - pub(crate) async fn open_writer(&self, type_: &str, config: &str) -> IndyResult { - let writer_config = self - .writer_types - .lock() - .await - .get(type_) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - "Unknown BlobStorage Writer type", - ) - })? - .open(config) - .await?; - - let config_handle = sequence::get_next_id(); - - self.writer_configs - .lock() - .await - .insert(config_handle, writer_config); - - Ok(config_handle) - } - - pub(crate) async fn create_blob(&self, config_handle: i32) -> IndyResult { - let blob_handle = sequence::get_next_id(); - - let writer = self - .writer_configs - .lock() - .await - .get(&config_handle) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - "Invalid BlobStorage config handle", - ) - })? // FIXME: Review error kind - .create(blob_handle) - .await?; - - self.writer_blobs - .lock() - .await - .insert(blob_handle, (writer, Sha256::default())); - - Ok(blob_handle) - } - - pub(crate) async fn append(&self, handle: i32, bytes: &[u8]) -> IndyResult { - let mut writers = self.writer_blobs.lock().await; - - let &mut (ref mut writer, ref mut hasher) = writers.get_mut(&handle).ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - "Invalid BlobStorage handle", - ) - })?; // FIXME: Review error kind - - hasher.update(bytes); - let res = writer.append(bytes).await?; - Ok(res) - } - - pub(crate) async fn finalize(&self, handle: i32) -> IndyResult<(String, Vec)> { - let mut writers = self.writer_blobs.lock().await; - - let (mut writer, hasher) = writers.remove(&handle).ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - "Invalid BlobStorage handle", - ) - })?; // FIXME: Review error kind - - let hash = hasher.finalize_fixed().to_vec(); - - writer - .finalize(hash.as_slice()) - .await - .map(|location| (location, hash)) - } -} - -/* Reader */ -impl BlobStorageService { - pub(crate) async fn open_reader(&self, type_: &str, config: &str) -> IndyResult { - let reader_config = self - .reader_types - .lock() - .await - .get(type_) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - "Invalid BlobStorage Reader type", - ) - })? // FIXME: Review error kind - .open(config) - .await?; - - let config_handle = sequence::get_next_id(); - - self.reader_configs - .lock() - .await - .insert(config_handle, reader_config); - - Ok(config_handle) - } - - pub(crate) async fn open_blob( - &self, - config_handle: i32, - location: &str, - hash: &[u8], - ) -> IndyResult { - let reader = self - .reader_configs - .lock() - .await - .get(&config_handle) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - "Invalid BlobStorage config handle", - ) - })? // FIXME: Review error kind - .open(hash, location) - .await?; - - let reader_handle = sequence::get_next_id(); - self.reader_blobs - .lock() - .unwrap() - .insert(reader_handle, reader); - - Ok(reader_handle) - } - - pub(crate) fn read(&self, handle: i32, size: usize, offset: usize) -> IndyResult> { - self.reader_blobs - .lock() - .unwrap() - .get_mut(&handle) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - "Invalid BlobStorage handle", - ) - })? // FIXME: Review error kind - .read(size, offset) - } - - pub(crate) async fn _verify(&self, handle: i32) -> IndyResult { - let res = self - .reader_blobs - .lock() - .unwrap() - .get_mut(&handle) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - "Invalid BlobStorage handle", - ) - })? // FIXME: Review error kind - .verify() - .await?; - - Ok(res) - } - - pub(crate) fn close(&self, handle: i32) -> IndyResult<()> { - self.reader_blobs - .lock() - .unwrap() - .remove(&handle) - .ok_or_else(|| { - err_msg( - IndyErrorKind::InvalidStructure, - "Invalid BlobStorage handle", - ) - })? // FIXME: Review error kind - .close() - } -} diff --git a/libvdrtools/src/services/crypto/ed25519.rs b/libvdrtools/src/services/crypto/ed25519.rs index 98029ea06f..00c3ecbb96 100644 --- a/libvdrtools/src/services/crypto/ed25519.rs +++ b/libvdrtools/src/services/crypto/ed25519.rs @@ -1,7 +1,8 @@ -use super::CryptoType; use indy_api_types::errors::IndyError; use indy_utils::crypto::{ed25519_box, ed25519_sign, sealedbox}; +use super::CryptoType; + pub struct ED25519CryptoType {} impl ED25519CryptoType { diff --git a/libvdrtools/src/services/crypto/mod.rs b/libvdrtools/src/services/crypto/mod.rs index 94b2919765..af71363f61 100644 --- a/libvdrtools/src/services/crypto/mod.rs +++ b/libvdrtools/src/services/crypto/mod.rs @@ -3,26 +3,25 @@ mod ed25519; use std::{collections::HashMap, str}; use async_std::sync::RwLock; +use ed25519::ED25519CryptoType; use hex::FromHex; use indy_api_types::errors::prelude::*; - use indy_utils::crypto::{ base64, chacha20poly1305_ietf, chacha20poly1305_ietf::gen_nonce_and_encrypt_detached, ed25519_box, ed25519_sign, }; -use crate::utils::crypto::base58::{FromBase58, ToBase58}; - use crate::{ domain::crypto::{ did::{Did, DidValue, MyDidInfo, TheirDid, TheirDidInfo}, key::{Key, KeyInfo}, }, - utils::crypto::verkey_builder::{build_full_verkey, split_verkey, verkey_get_cryptoname}, + utils::crypto::{ + base58::{DecodeBase58, ToBase58}, + verkey_builder::{build_full_verkey, split_verkey, verkey_get_cryptoname}, + }, }; -use ed25519::ED25519CryptoType; - const DEFAULT_CRYPTO_TYPE: &str = "ed25519"; //TODO fix this crypto trait so it matches the functions below @@ -97,8 +96,7 @@ impl CryptoService { let crypto_type_name = key_info .crypto_type - .as_ref() - .map(String::as_str) + .as_deref() .unwrap_or(DEFAULT_CRYPTO_TYPE); let crypto_types = self.crypto_types.read().await; @@ -132,8 +130,7 @@ impl CryptoService { let crypto_type_name = my_did_info .crypto_type - .as_ref() - .map(String::as_str) + .as_deref() .unwrap_or(DEFAULT_CRYPTO_TYPE); let crypto_types = self.crypto_types.read().await; @@ -189,11 +186,11 @@ impl CryptoService { trace!("create_their_did > their_did_info {:?}", their_did_info); // Check did is correct Base58 - let _ = self.validate_did(&their_did_info.did)?; + self.validate_did(&their_did_info.did)?; let verkey = build_full_verkey( &their_did_info.did.to_unqualified().0, - their_did_info.verkey.as_ref().map(String::as_str), + their_did_info.verkey.as_deref(), )?; self.validate_key(&verkey).await?; @@ -225,7 +222,7 @@ impl CryptoService { })?; let my_sk = ed25519_sign::SecretKey::from_slice( - &my_key.signkey.as_str().from_base58()?.as_slice(), + my_key.signkey.as_str().decode_base58()?.as_slice(), )?; let signature = crypto_type.sign(&my_sk, doc)?[..].to_vec(); @@ -262,8 +259,8 @@ impl CryptoService { ) })?; - let their_vk = ed25519_sign::PublicKey::from_slice(&their_vk.from_base58()?)?; - let signature = ed25519_sign::Signature::from_slice(&signature)?; + let their_vk = ed25519_sign::PublicKey::from_slice(&their_vk.decode_base58()?)?; + let signature = ed25519_sign::Signature::from_slice(signature)?; let valid = crypto_type.verify(&their_vk, msg, &signature)?; @@ -312,10 +309,11 @@ impl CryptoService { ) })?; - let my_sk = - ed25519_sign::SecretKey::from_slice(my_key.signkey.as_str().from_base58()?.as_slice())?; + let my_sk = ed25519_sign::SecretKey::from_slice( + my_key.signkey.as_str().decode_base58()?.as_slice(), + )?; - let their_vk = ed25519_sign::PublicKey::from_slice(their_vk.from_base58()?.as_slice())?; + let their_vk = ed25519_sign::PublicKey::from_slice(their_vk.decode_base58()?.as_slice())?; let nonce = crypto_type.gen_nonce(); let encrypted_doc = crypto_type.crypto_box(&my_sk, &their_vk, doc, &nonce)?; @@ -367,11 +365,12 @@ impl CryptoService { ) })?; - let my_sk = ed25519_sign::SecretKey::from_slice(&my_key.signkey.from_base58()?.as_slice())?; - let their_vk = ed25519_sign::PublicKey::from_slice(their_vk.from_base58()?.as_slice())?; - let nonce = ed25519_box::Nonce::from_slice(&nonce)?; + let my_sk = + ed25519_sign::SecretKey::from_slice(my_key.signkey.decode_base58()?.as_slice())?; + let their_vk = ed25519_sign::PublicKey::from_slice(their_vk.decode_base58()?.as_slice())?; + let nonce = ed25519_box::Nonce::from_slice(nonce)?; - let decrypted_doc = crypto_type.crypto_box_open(&my_sk, &their_vk, &doc, &nonce)?; + let decrypted_doc = crypto_type.crypto_box_open(&my_sk, &their_vk, doc, &nonce)?; let res = Ok(decrypted_doc); trace!("crypto_box_open < {:?}", res); @@ -394,7 +393,7 @@ impl CryptoService { ) })?; - let their_vk = ed25519_sign::PublicKey::from_slice(their_vk.from_base58()?.as_slice())?; + let their_vk = ed25519_sign::PublicKey::from_slice(their_vk.decode_base58()?.as_slice())?; let encrypted_doc = crypto_type.crypto_box_seal(&their_vk, doc)?; let res = Ok(encrypted_doc); @@ -423,10 +422,11 @@ impl CryptoService { ) })?; - let my_vk = ed25519_sign::PublicKey::from_slice(my_vk.from_base58()?.as_slice())?; + let my_vk = ed25519_sign::PublicKey::from_slice(my_vk.decode_base58()?.as_slice())?; - let my_sk = - ed25519_sign::SecretKey::from_slice(my_key.signkey.as_str().from_base58()?.as_slice())?; + let my_sk = ed25519_sign::SecretKey::from_slice( + my_key.signkey.as_str().decode_base58()?.as_slice(), + )?; let decrypted_doc = crypto_type.crypto_box_seal_open(&my_vk, &my_sk, doc)?; @@ -453,7 +453,7 @@ impl CryptoService { seed.as_bytes().to_vec() } else if seed.ends_with('=') { // is base64 string - let decoded = base64::decode(&seed).to_indy( + let decoded = base64::decode(seed).to_indy( IndyErrorKind::InvalidStructure, "Can't deserialize Seed from Base64 string", )?; @@ -463,8 +463,8 @@ impl CryptoService { return Err(err_msg( IndyErrorKind::InvalidStructure, format!( - "Trying to use invalid base64 encoded `seed`. \ - The number of bytes must be {} ", + "Trying to use invalid base64 encoded `seed`. The number of bytes must be \ + {} ", ed25519_sign::SEEDBYTES ), )); @@ -476,8 +476,8 @@ impl CryptoService { return Err(err_msg( IndyErrorKind::InvalidStructure, format!( - "Trying to use invalid `seed`. It can be either \ - {} bytes string or base64 string or {} bytes HEX string", + "Trying to use invalid `seed`. It can be either {} bytes string or base64 \ + string or {} bytes HEX string", ed25519_sign::SEEDBYTES, ed25519_sign::SEEDBYTES * 2 ), @@ -508,10 +508,10 @@ impl CryptoService { ) })?; - if vk.starts_with('~') { - let _ = vk[1..].from_base58()?; // TODO: proper validate abbreviated verkey + if let Some(vk) = vk.strip_prefix('~') { + let _ = vk.decode_base58()?; // TODO: proper validate abbreviated verkey } else { - let vk = ed25519_sign::PublicKey::from_slice(vk.from_base58()?.as_slice())?; + let vk = ed25519_sign::PublicKey::from_slice(vk.decode_base58()?.as_slice())?; crypto_type.validate_key(&vk)?; }; @@ -538,7 +538,7 @@ impl CryptoService { ) -> (String, String, String) { //encrypt message with aad let (ciphertext, iv, tag) = - gen_nonce_and_encrypt_detached(plaintext.as_slice(), aad.as_bytes(), &cek); + gen_nonce_and_encrypt_detached(plaintext.as_slice(), aad.as_bytes(), cek); //base64 url encode data let iv_encoded = base64::encode_urlsafe(&iv[..]); @@ -548,7 +548,7 @@ impl CryptoService { (ciphertext_encoded, iv_encoded, tag_encoded) } - /* ciphertext helper functions*/ + /* ciphertext helper functions */ pub(crate) fn decrypt_ciphertext( &self, ciphertext: &str, @@ -626,10 +626,10 @@ impl CryptoService { #[cfg(test)] mod tests { - use crate::domain::crypto::did::MyDidInfo; use indy_utils::crypto::chacha20poly1305_ietf::gen_key; use super::*; + use crate::domain::crypto::did::MyDidInfo; #[async_std::test] async fn create_my_did_with_works_for_empty_info() { @@ -877,7 +877,7 @@ mod tests { .await .unwrap(); - assert_eq!(false, valid); + assert!(!valid); } #[async_std::test] @@ -1051,8 +1051,8 @@ mod tests { async fn test_encrypt_plaintext_and_decrypt_ciphertext_works() { let service: CryptoService = CryptoService::new(); let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and alg - // Which the receiver MUST then check before decryption + // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and + // alg Which the receiver MUST then check before decryption let aad = "some protocol data input to the encryption"; let cek = gen_key(); @@ -1070,8 +1070,8 @@ mod tests { async fn test_encrypt_plaintext_decrypt_ciphertext_empty_string_works() { let service: CryptoService = CryptoService::new(); let plaintext = "".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and alg - // Which the receiver MUST then check before decryption + // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and + // alg Which the receiver MUST then check before decryption let aad = "some protocol data input to the encryption"; let cek = gen_key(); @@ -1089,8 +1089,8 @@ mod tests { async fn test_encrypt_plaintext_decrypt_ciphertext_bad_iv_fails() { let service: CryptoService = CryptoService::new(); let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and alg - // Which the receiver MUST then check before decryption + // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and + // alg Which the receiver MUST then check before decryption let aad = "some protocol data input to the encryption"; let cek = gen_key(); @@ -1109,8 +1109,8 @@ mod tests { async fn test_encrypt_plaintext_decrypt_ciphertext_bad_ciphertext_fails() { let service: CryptoService = CryptoService::new(); let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and alg - // Which the receiver MUST then check before decryption + // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and + // alg Which the receiver MUST then check before decryption let aad = "some protocol data input to the encryption"; let cek = gen_key(); @@ -1128,8 +1128,8 @@ mod tests { async fn test_encrypt_plaintext_and_decrypt_ciphertext_wrong_cek_fails() { let service: CryptoService = CryptoService::new(); let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and alg - // Which the receiver MUST then check before decryption + // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and + // alg Which the receiver MUST then check before decryption let aad = "some protocol data input to the encryption"; let cek = chacha20poly1305_ietf::gen_key(); @@ -1148,8 +1148,8 @@ mod tests { async fn test_encrypt_plaintext_and_decrypt_ciphertext_bad_tag_fails() { let service: CryptoService = CryptoService::new(); let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and alg - // Which the receiver MUST then check before decryption + // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and + // alg Which the receiver MUST then check before decryption let aad = "some protocol data input to the encryption"; let cek = gen_key(); @@ -1166,8 +1166,8 @@ mod tests { async fn test_encrypt_plaintext_and_decrypt_ciphertext_bad_aad_fails() { let service: CryptoService = CryptoService::new(); let plaintext = "Hello World".as_bytes().to_vec(); - // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and alg - // Which the receiver MUST then check before decryption + // AAD allows the sender to tie extra (protocol) data to the encryption. Example JWE enc and + // alg Which the receiver MUST then check before decryption let aad = "some protocol data input to the encryption"; let cek = gen_key(); diff --git a/libvdrtools/src/services/mod.rs b/libvdrtools/src/services/mod.rs index 424cb4c373..83962a3493 100644 --- a/libvdrtools/src/services/mod.rs +++ b/libvdrtools/src/services/mod.rs @@ -1,10 +1,5 @@ -mod anoncreds; -mod blob_storage; mod crypto; mod wallet; -pub use anoncreds::{AnoncredsHelpers, IssuerService, ProverService, VerifierService}; - -pub use blob_storage::BlobStorageService; pub use crypto::CryptoService; pub(crate) use wallet::WalletService; diff --git a/libvdrtools/src/utils/ccallback.rs b/libvdrtools/src/utils/ccallback.rs deleted file mode 100644 index db4df06a72..0000000000 --- a/libvdrtools/src/utils/ccallback.rs +++ /dev/null @@ -1,8 +0,0 @@ -macro_rules! check_useful_c_callback { - ($x:ident, $e:expr) => { - let $x = match $x { - Some($x) => $x, - None => return $e, - }; - }; -} diff --git a/libvdrtools/src/utils/cheqd_crypto.rs b/libvdrtools/src/utils/cheqd_crypto.rs deleted file mode 100644 index c48335423f..0000000000 --- a/libvdrtools/src/utils/cheqd_crypto.rs +++ /dev/null @@ -1,122 +0,0 @@ -use indy_api_types::IndyError; -use indy_api_types::errors::{IndyErrorKind, IndyResult, IndyResultExt}; -use cosmrs::rpc; -use prost::Message; - -pub fn check_proofs( - result: &rpc::endpoint::abci_query::Response, -) -> IndyResult<()> { - // Decode state proofs - - // Decode proof for inner ival tree - let proof_op_0 = &result.response.proof.as_ref().ok_or( - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "The proof for inner ival tree is absent but should be placed" - ))?; - let proof_op_0 = &proof_op_0.ops[0].clone(); - let proof_0_data_decoded = - ics23::CommitmentProof::decode(proof_op_0.data.as_slice()).to_indy( - IndyErrorKind::InvalidStructure, - "The proof for inner ival tree cannot be decoded into ics23::CommitmentProof" - )?; - - // Decode proof for outer `ics23:simple` tendermint tree) - let proof_op_1 = result.response.proof.as_ref().ok_or( - IndyError::from_msg( - IndyErrorKind::InvalidStructure, - "The proof for outer ics23:simple is absent but should be placed" - ))?; - let proof_op_1 = &proof_op_1.ops[1].clone(); - let proof_1_data_decoded = - ics23::CommitmentProof::decode(proof_op_1.data.as_slice()).to_indy( - IndyErrorKind::InvalidStructure, - "The proof for outer ics23:simple cannot be decoded into ics23::CommitmentProof" - )?; - - // Get a root hash for the inner ival tree from the outer tree proof - let proof_1_existence = if let Some(ics23::commitment_proof::Proof::Exist(ex)) = - proof_1_data_decoded.proof.clone() - { - ex - } else { - let proof_op_1_str = serde_json::to_string(proof_op_1).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize object with proof for outer `ics23:simple` tendermint tree" - )?; - return Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - format!( - "Commitment proof has an incorrect format {}", - proof_op_1_str - ), - )); - }; - let proof_0_root = proof_1_existence.clone().value; - - // Check state proofs 0 (inner iavl tree) - let is_proof_correct = match proof_0_data_decoded.proof { - Some(ics23::commitment_proof::Proof::Exist(_)) => { - ics23::verify_membership( - &proof_0_data_decoded, // proof for verification - &ics23::iavl_spec(), // tree specification - &proof_0_root, // value root hash in the inner ival tree (value for outer tree) - &proof_op_0.key, // key for the inner ival tree - &result.response.value, // received value - ) - } - Some(ics23::commitment_proof::Proof::Nonexist(_)) => { - ics23::verify_non_membership( - &proof_0_data_decoded, // proof for verification - &ics23::iavl_spec(), // tree specification - &proof_0_root, // value root hash in the inner ival tree - &proof_op_0.key // key for the inner ival tree - ) - } - _ => {false} - }; - - if !is_proof_correct { - let proof_op_0_str = serde_json::to_string(proof_op_0).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize object with proof for inner ival tree" - )?; - return Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - format!( - "Commitment proof 0 is incorrect {}", - proof_op_0_str - ), - )); - } - - // Should be output from light client - // Calculate a root hash for the outer tree - let proof_1_root = ics23::calculate_existence_root(&proof_1_existence.clone()) - .map_err(|er | IndyError::from_msg( - IndyErrorKind::InvalidStructure, - format!("Commitment proof has an incorrect format {}", er)))?; - - // Check state proofs 1 (outer `ics23:simple` tendermint tree) - if !ics23::verify_membership( - &proof_1_data_decoded, // proof for verification - &ics23::tendermint_spec(), // tree specification - &proof_1_root, // root hash for the outer tree - &proof_op_1.key, // key for the outer tree - &proof_0_root, // inner tree root hash in the outer tree (should exist) - ) { - let proof_op_1_str = serde_json::to_string(proof_op_1).to_indy( - IndyErrorKind::InvalidState, - "Cannot serialize object with proof for outer `ics23:simple` tendermint tree" - )?; - return Err(IndyError::from_msg( - IndyErrorKind::InvalidStructure, - format!( - "Commitment proof 1 is incorrect {}", - proof_op_1_str - ), - )); - } - - Ok(()) -} diff --git a/libvdrtools/src/utils/cheqd_ledger.rs b/libvdrtools/src/utils/cheqd_ledger.rs deleted file mode 100644 index 95fc030a44..0000000000 --- a/libvdrtools/src/utils/cheqd_ledger.rs +++ /dev/null @@ -1,12 +0,0 @@ -pub const VERKEY_TYPE: &str = "Ed25519VerificationKey2020"; -const VERKEY_ALIAS: &str = "#verkey"; - -pub fn make_verification_id(did: &str) -> String { - let mut fully_v_id = did.to_string(); - fully_v_id.push_str(VERKEY_ALIAS); - fully_v_id -} - -pub fn make_base58_btc(verkey: &str) -> String { - format!("z{}",verkey.to_string()) -} diff --git a/libvdrtools/src/utils/crypto/base58.rs b/libvdrtools/src/utils/crypto/base58.rs index 257adac951..1f4948543a 100644 --- a/libvdrtools/src/utils/crypto/base58.rs +++ b/libvdrtools/src/utils/crypto/base58.rs @@ -3,8 +3,8 @@ use bs58::{decode, decode::Result, encode}; pub trait ToBase58 { fn to_base58(&self) -> String; } -pub trait FromBase58 { - fn from_base58(&self) -> Result>; +pub trait DecodeBase58 { + fn decode_base58(self) -> Result>; } impl ToBase58 for [u8] { @@ -13,14 +13,14 @@ impl ToBase58 for [u8] { } } -impl FromBase58 for [u8] { - fn from_base58(&self) -> Result> { +impl DecodeBase58 for &[u8] { + fn decode_base58(self) -> Result> { decode(self).into_vec() } } -impl FromBase58 for str { - fn from_base58(&self) -> Result> { +impl DecodeBase58 for &str { + fn decode_base58(self) -> Result> { decode(self.as_bytes()).into_vec() } } diff --git a/libvdrtools/src/utils/crypto/mod.rs b/libvdrtools/src/utils/crypto/mod.rs index 331746e09c..df7b29719e 100644 --- a/libvdrtools/src/utils/crypto/mod.rs +++ b/libvdrtools/src/utils/crypto/mod.rs @@ -1,3 +1,2 @@ pub mod base58; -pub mod signature_serializer; pub mod verkey_builder; diff --git a/libvdrtools/src/utils/crypto/proof_op.rs b/libvdrtools/src/utils/crypto/proof_op.rs deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/libvdrtools/src/utils/crypto/signature_serializer.rs b/libvdrtools/src/utils/crypto/signature_serializer.rs deleted file mode 100644 index 2701d36c65..0000000000 --- a/libvdrtools/src/utils/crypto/signature_serializer.rs +++ /dev/null @@ -1,182 +0,0 @@ -use crate::domain::ledger::constants::{ATTRIB, GET_ATTR}; -use indy_api_types::errors::prelude::*; -use indy_utils::crypto::hash::Hash; -use serde_json::Value; - -fn _serialize_signature( - v: Value, - is_top_level: bool, - _type: Option<&str>, -) -> Result { - match v { - Value::Bool(value) => Ok(if value { - "True".to_string() - } else { - "False".to_string() - }), - Value::Number(value) => Ok(value.to_string()), - Value::String(value) => Ok(value), - Value::Array(array) => array - .into_iter() - .map(|element| _serialize_signature(element, false, _type)) - .collect::, IndyError>>() - .map(|res| res.join(",")), - Value::Object(map) => { - let mut result = "".to_string(); - let mut in_middle = false; - for key in map.keys() { - // Skip signature field at top level as in python code - if is_top_level && (key == "signature" || key == "fees" || key == "signatures") { - continue; - } - - if in_middle { - result += "|"; - } - - let mut value = map[key].clone(); - if (_type == Some(ATTRIB) || _type == Some(GET_ATTR)) - && (key == "raw" || key == "hash" || key == "enc") - { - // do it only for attribute related request - let mut ctx = Hash::new_context()?; - - ctx.update( - &value - .as_str() - .ok_or_else(|| { - IndyError::from_msg( - IndyErrorKind::InvalidState, - "Cannot update hash context", - ) - })? - .as_bytes(), - )?; - - value = Value::String(hex::encode(ctx.finish()?.as_ref())); - } - result = result + key + ":" + &_serialize_signature(value, false, _type)?; - in_middle = true; - } - Ok(result) - } - _ => Ok("".to_string()), - } -} - -#[cfg(test)] -mod tests { - use super::*; - - fn serialize_signature(v: Value) -> Result { - let _type = v["operation"]["type"].clone(); - _serialize_signature(v, true, _type.as_str()) - } - - #[test] - fn signature_serialize_works() { - let data = r#"{ - "name": "John Doe", - "age": 43, - "operation": { - "dest": 54 - }, - "phones": [ - "1234567", - "2345678", - {"rust": 5, "age": 1}, - 3 - ] - }"#; - let msg: Value = serde_json::from_str(data).unwrap(); - - let result = "age:43|name:John Doe|operation:dest:54|phones:1234567,2345678,age:1|rust:5,3"; - - assert_eq!(serialize_signature(msg).unwrap(), result) - } - - #[test] - fn signature_serialize_works_for_skipped_fields() { - let data = r#"{ - "name": "John Doe", - "age": 43, - "operation": { - "type": "100", - "hash": "cool hash", - "dest": 54 - }, - "fees": "fees1", - "signature": "sign1", - "signatures": "sign-m", - "phones": [ - "1234567", - "2345678", - {"rust": 5, "age": 1}, - 3 - ] - }"#; - let msg: Value = serde_json::from_str(data).unwrap(); - - let result = "age:43|name:John Doe|operation:dest:54|hash:46aa0c92129b33ee72ee1478d2ae62fa6e756869dedc6c858af3214a6fcf1904|type:100|phones:1234567,2345678,age:1|rust:5,3"; - - assert_eq!(serialize_signature(msg).unwrap(), result) - } - - #[test] - fn signature_serialize_works_with_raw_hash_for_attrib_related_type() { - let data = r#"{ - "name": "John Doe", - "age": 43, - "operation": { - "type": "100", - "hash": "cool hash", - "dest": 54, - "raw": "string for hash" - }, - "phones": [ - "1234567", - "2345678", - {"rust": 5, "age": 1}, - 3 - ] - }"#; - let msg: Value = serde_json::from_str(data).unwrap(); - - let result = "age:43|name:John Doe|operation:dest:54|hash:46aa0c92129b33ee72ee1478d2ae62fa6e756869dedc6c858af3214a6fcf1904|raw:1dcd0759ce38f57049344a6b3c5fc18144fca1724713090c2ceeffa788c02711|type:100|phones:1234567,2345678,age:1|rust:5,3"; - - assert_eq!(serialize_signature(msg).unwrap(), result) - } - - #[test] - fn signature_serialize_works_with_raw_hash_for_not_attrib_related_type() { - let data = r#"{ - "name": "John Doe", - "age": 43, - "operation": { - "type": "101", - "hash": "cool hash", - "dest": 54, - "raw": "string for hash" - }, - "phones": [ - "1234567", - "2345678", - {"rust": 5, "age": 1}, - 3 - ] - }"#; - let msg: Value = serde_json::from_str(data).unwrap(); - - let result = "age:43|name:John Doe|operation:dest:54|hash:cool hash|raw:string for hash|type:101|phones:1234567,2345678,age:1|rust:5,3"; - - assert_eq!(serialize_signature(msg).unwrap(), result) - } - - #[test] - fn signature_serialize_works_with_null() { - let data = r#"{"signature": null}"#; - let v: serde_json::Value = serde_json::from_str(data).unwrap(); - let serialized = serialize_signature(v).unwrap(); - assert_eq!(serialized, ""); - } -} diff --git a/libvdrtools/src/utils/crypto/verkey_builder.rs b/libvdrtools/src/utils/crypto/verkey_builder.rs index 4acb3c98e6..b0b6fb2126 100644 --- a/libvdrtools/src/utils/crypto/verkey_builder.rs +++ b/libvdrtools/src/utils/crypto/verkey_builder.rs @@ -1,8 +1,9 @@ +use indy_api_types::errors::prelude::*; + use crate::{ services::CryptoService, - utils::crypto::base58::{FromBase58, ToBase58}, + utils::crypto::base58::{DecodeBase58, ToBase58}, }; -use indy_api_types::errors::prelude::*; pub fn build_full_verkey(dest: &str, verkey: Option<&str>) -> Result { if let Some(verkey) = verkey { @@ -13,9 +14,9 @@ pub fn build_full_verkey(dest: &str, verkey: Option<&str>) -> Result i32 { - let result = unsafe { OPENSSL_memcmp(a, b, len) }; - return result; -} - -#[cfg(target_os = "ios")] -extern "C" { - fn OPENSSL_memcmp(a: *const u8, b: *const u8, len: usize) -> i32; -} diff --git a/libvdrtools/src/utils/logger.rs b/libvdrtools/src/utils/logger.rs deleted file mode 100644 index e1fb77db9e..0000000000 --- a/libvdrtools/src/utils/logger.rs +++ /dev/null @@ -1,61 +0,0 @@ -#[macro_export] -macro_rules! try_log { - ($expr:expr) => { - match $expr { - Ok(val) => val, - Err(err) => { - error!("try_log! | {}", err); - return Err(From::from(err)); - } - } - }; -} - -macro_rules! _map_err { - ($lvl:expr, $expr:expr) => { - |err| { - log!($lvl, "{} - {}", $expr, err); - err - } - }; - ($lvl:expr) => { - |err| { - log!($lvl, "{}", err); - err - } - }; -} - -#[macro_export] -macro_rules! map_err_err { - () => ( _map_err!(::log::Level::Error) ); - ($($arg:tt)*) => ( _map_err!(::log::Level::Error, $($arg)*) ) -} - -#[macro_export] -macro_rules! map_err_trace { - () => ( _map_err!(::log::Level::Trace) ); - ($($arg:tt)*) => ( _map_err!(::log::Level::Trace, $($arg)*) ) -} - -#[macro_export] -macro_rules! map_err_info { - () => ( _map_err!(::log::Level::Info) ); - ($($arg:tt)*) => ( _map_err!(::log::Level::Info, $($arg)*) ) -} - -#[cfg(debug_assertions)] -#[macro_export] -macro_rules! secret { - ($val:expr) => {{ - $val - }}; -} - -#[cfg(not(debug_assertions))] -#[macro_export] -macro_rules! secret { - ($val:expr) => {{ - "_" - }}; -} diff --git a/libvdrtools/src/utils/mod.rs b/libvdrtools/src/utils/mod.rs index 5566ec33b8..5acf386eae 100755 --- a/libvdrtools/src/utils/mod.rs +++ b/libvdrtools/src/utils/mod.rs @@ -1,25 +1,5 @@ pub use indy_utils::environment; - pub mod crypto; - -#[macro_use] -pub mod logger; - -#[allow(unused_macros)] -#[macro_use] -pub mod result; - -#[cfg(test)] -pub use indy_utils::test; - -#[macro_use] -pub mod try_utils; - -pub use indy_api_types::validation; - pub use indy_utils::wql; - #[macro_use] pub mod qualifier; - -pub mod extensions; diff --git a/libvdrtools/src/utils/qualifier.rs b/libvdrtools/src/utils/qualifier.rs index b2da56e8e7..6da00b58b8 100644 --- a/libvdrtools/src/utils/qualifier.rs +++ b/libvdrtools/src/utils/qualifier.rs @@ -37,9 +37,8 @@ pub fn method(entity: &str) -> Option { (Some(type_), None) => Some(type_.as_str().to_owned()), _ => { warn!( - "Unrecognized FQ method for {}, parsed items are \ - (where 2nd is method type, and 3rd is sub-method (namespace, ledger, type, etc)\ - {:?}", + "Unrecognized FQ method for {}, parsed items are (where 2nd is method \ + type, and 3rd is sub-method (namespace, ledger, type, etc){:?}", entity, caps ); None @@ -50,7 +49,7 @@ pub fn method(entity: &str) -> Option { } pub fn is_fully_qualified(entity: &str) -> bool { - REGEX.is_match(&entity) + REGEX.is_match(entity) } macro_rules! qualifiable_type (($newtype:ident) => ( diff --git a/libvdrtools/src/utils/result.rs b/libvdrtools/src/utils/result.rs deleted file mode 100644 index a63c325f53..0000000000 --- a/libvdrtools/src/utils/result.rs +++ /dev/null @@ -1,44 +0,0 @@ -macro_rules! prepare_result { - ($result:ident) => {{ - trace!("prepare_result: >>> {:?}", $result); - match $result { - Ok(_) => ErrorCode::Success, - Err(err) => { - if err.kind() == indy_api_types::errors::IndyErrorKind::InvalidState { - error!("InvalidState: {}", err); - } - err.into() - } - } - }}; - ($result:ident, $($dflt_val:expr),*) => {{ - trace!("prepare_result: >>> {:?}", $result); - match $result { - Ok(res) => (ErrorCode::Success, res), - Err(err) => { - if err.kind() == indy_api_types::errors::IndyErrorKind::InvalidState { - error!("InvalidState: {}", err); - } - (err.into(), ($($dflt_val),*)) - } - } - }} -} - -macro_rules! unwrap_opt_or_return { - ($opt:expr, $err:expr) => { - match $opt { - Some(val) => val, - None => return $err, - } - }; -} - -macro_rules! unwrap_or_return { - ($result:expr, $err:expr) => { - match $result { - Ok(res) => res, - Err(_) => return $err, - } - }; -} diff --git a/libvdrtools/src/utils/try_utils.rs b/libvdrtools/src/utils/try_utils.rs deleted file mode 100644 index 456df3630d..0000000000 --- a/libvdrtools/src/utils/try_utils.rs +++ /dev/null @@ -1 +0,0 @@ -//FIXME [async] TODO remove file? diff --git a/messages/src/msg_fields/protocols/cred_issuance/v2/mod.rs b/messages/src/msg_fields/protocols/cred_issuance/v2/mod.rs index 8f9d100c62..24b235dea0 100644 --- a/messages/src/msg_fields/protocols/cred_issuance/v2/mod.rs +++ b/messages/src/msg_fields/protocols/cred_issuance/v2/mod.rs @@ -178,8 +178,8 @@ impl Serialize for CredentialPreviewV2MsgType { #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, TypedBuilder)] #[serde(rename_all = "snake_case")] pub struct AttachmentFormatSpecifier { - attach_id: String, - format: MaybeKnown, + pub attach_id: String, + pub format: MaybeKnown, } transit_to_aries_msg!( diff --git a/testnet b/testnet new file mode 100644 index 0000000000..eb4a6ecae4 --- /dev/null +++ b/testnet @@ -0,0 +1,7 @@ +{"reqSignature":{},"txn":{"data":{"data":{"alias":"OpsNode","blskey":"4i39oJqm7fVX33gnYEbFdGurMtwYQJgDEYfXdYykpbJMWogByocaXxKbuXdrg3k9LP33Tamq64gUwnm4oA7FkxqJ5h4WfKH6qyVLvmBu5HgeV8Rm1GJ33mKX6LWPbm1XE9TfzpQXJegKyxHQN9ABquyBVAsfC6NSM4J5t1QGraJBfZi","blskey_pop":"Qq3CzhSfugsCJotxSCRAnPjmNDJidDz7Ra8e4xvLTEzQ5w3ppGray9KynbGPH8T7XnUTU1ioZadTbjXaRY26xd4hQ3DxAyR4GqBymBn3UBomLRJHmj7ukcdJf9WE6tu1Fp1EhxmyaMqHv13KkDrDfCthgd2JjAWvSgMGWwAAzXEow5","client_ip":"13.58.197.208","client_port":"9702","node_ip":"3.135.134.42","node_port":"9701","services":["VALIDATOR"]},"dest":"EVwxHoKXUy2rnRzVdVKnJGWFviamxMwLvUso7KMjjQNH"},"metadata":{"from":"Pms5AZzgPWHSj6nNmJDfmo"},"type":"0"},"txnMetadata":{"seqNo":1,"txnId":"77ad6682f320be9969f70a37d712344afed8e3fba8d43fa5602c81b578d26088"},"ver":"1"} +{"reqSignature":{},"txn":{"data":{"data":{"alias":"cynjanode","blskey":"32DLSweyJRxVMcVKGjUeNkVF1fwyFfRcFqGU9x7qL2ox2STpF6VxZkbxoLkGMPnt3gywRaY6jAjqgC8XMkf3webMJ4SEViPtBKZJjCCFTf4tGXfEsMwinummaPja85GgTALf7DddCNyCojmkXWHpgjrLx3626Z2MiNxVbaMapG2taFX","blskey_pop":"RQRU8GVYSYZeu9dfH6myhzZ2qfxeVpCL3bTzgto1bRbx3QCt3mFFQQBVbgrqui2JpXhcWXxoDzp1WyYbSZwYqYQbRmvK7PPG82VAvVagv1n83Qa3cdyGwCevZdEzxuETiiXBRWSPfb4JibAXPKkLZHyQHWCEHcAEVeXtx7FRS1wjTd","client_ip":"3.17.103.221","client_port":"9702","node_ip":"3.17.215.226","node_port":"9701","services":["VALIDATOR"]},"dest":"iTq944JTtwHnst7rucfsRA4m26x9i6zCKKohETBCiWu"},"metadata":{"from":"QC174PGaL4zA9YHYqofPH2"},"type":"0"},"txnMetadata":{"seqNo":2,"txnId":"ce7361e44ec10a275899ece1574f6e38f2f3c7530c179fa07a2924e55775759b"},"ver":"1"} +{"reqSignature":{},"txn":{"data":{"data":{"alias":"GlobaliD","blskey":"4Behdr1KJfLTAPNospghtL7iWdCHca6MZDxAtzYNXq35QCUr4aqpLu6p4Sgu9wNbTACB3DbwmVgE2L7hX6UsasuvZautqUpf4nC5viFpH7X6mHyqLreBJTBH52tSwifQhRjuFAySbbfyRK3wb6R2Emxun9GY7MFNuy792LXYg4C6sRJ","blskey_pop":"RKYDRy8oTxKnyAV3HocapavH2jkw3PVe54JcEekxXz813DFbEy87N3i3BNqwHB7MH93qhtTRb7EZMaEiYhm92uaLKyubUMo5Rqjve2jbEdYEYVRmgNJWpxFKCmUBa5JwBWYuGunLMZZUTU3qjbdDXkJ9UNMQxDULCPU5gzLTy1B5kb","client_ip":"13.56.175.126","client_port":"9702","node_ip":"50.18.84.131","node_port":"9701","services":["VALIDATOR"]},"dest":"2ErWxamsNGBfhkFnwYgs4UW4aApct1kHUvu7jbkA1xX4"},"metadata":{"from":"4H8us7B1paLW9teANv8nam"},"type":"0"},"txnMetadata":{"seqNo":3,"txnId":"0c3b33b77e0419d6883be35d14b389c3936712c38a469ac5320a3cae68be1293"},"ver":"1"} +{"reqSignature":{},"txn":{"data":{"data":{"alias":"IdRamp","blskey":"LoYzqUMPDZEfRshwGSzkgATxcM5FAS1LYx896zHnMfXP7duDsCQ6CBG2akBkZzgH3tBMvnjhs2z7PFc2gFeaKUF9fKDHhtbVqPofxH3ebcRfA959qU9mgvmkUwMUgwd21puRU6BebUwBiYxMxcE5ChReBnAkdAv19gVorm3prBMk94","blskey_pop":"R1DjpsG7UxgwstuF7WDUL17a9Qq64vCozwJZ88bTrSDPwC1cdRn3WmhqJw5LpEhFQJosDSVVT6tS8dAZrrssRv2YsELbfGEJ7ZGjhNjZHwhqg4qeustZ7PZZE3Vr1ALSHY4Aa6KpNzGodxu1XymYZWXAFokPAs3Kho8mKcJwLCHn3h","client_ip":"207.126.128.12","client_port":"9702","node_ip":"207.126.129.12","node_port":"9701","services":["VALIDATOR"]},"dest":"5Zj5Aec6Kt9ki1runrXu87wZ522mnm3zwmaoHLUcHLx9"},"metadata":{"from":"AFLDFPoJuDQUHqnfmg8U7i"},"type":"0"},"txnMetadata":{"seqNo":4,"txnId":"c9df105558333ac8016610d9da5aad1e9a5dd50b9d9cc5684e94f439fa10f836"},"ver":"1"} +{"reqSignature":{},"txn":{"data":{"data":{"alias":"idlab-node01","blskey":"2fjJVi33U1tCTjW77cJaf1NLz7EzWkVNzR9BEQpVVK64MJpRKNUzt6k7Td2U8yqU5hGyAFH5N7ZymSB55TnpC3rJYLVTcGXZeXpmrQx3mwnXNyfTDnxfTpdQ1KMoFeZoDPZ8acfaH8GWeW2jL1qREE52tetBf4tXTeshmWzGkEN7r4y","blskey_pop":"RSjiM6dYUmN2rv2ca7dUCmEKrivq12rhxhXUKHdmSwUxbCmcijsgoERjYG7MqxhKLjSAJ5715K23fVEc6uK1kTenKmYCcCts8MLMAQG8Upb22nfgHJ3py8RwRoACeAjFF3myAMNRJJPhUdv96drJdwkGRv7f6JjvoB5KWVQYTNgheP","client_ip":"205.159.92.17","client_port":"9702","node_ip":"205.159.92.16","node_port":"9701","services":["VALIDATOR"]},"dest":"8czYgwmLDazVrBHuo53Tyx7Tw8ZhvnoC2BfhQGir4r8F"},"metadata":{"from":"PN8wFxLKjdkwyxoEEXwyz2"},"type":"0"},"txnMetadata":{"seqNo":5,"txnId":"9237eca7d2a203f6e1779f63064d2f22cf28e1bcd4e6fe5d791b15e82969acdc"},"ver":"1"} +{"reqSignature":{},"txn":{"data":{"data":{"alias":"lorica-identity-node1","blskey":"wUh24sVCQ8PHDgSb343g2eLxjD5vwxsrETfuV2sbwMNnYon9nhbaK5jcWTekvXtyiwxHxuiCCoZwKS97MQEAeC2oLbbMeKjYm212QwSnm7aKLEqTStXht35VqZvZLT7Q3mPQRYLjMGixdn4ocNHrBTMwPUQYycEqwaHWgE1ncDueXY","blskey_pop":"R2sMwF7UW6AaD4ALa1uB1YVPuP6JsdJ7LsUoViM9oySFqFt34C1x1tdHDysS9wwruzaaEFui6xNPqJ8eu3UBqcFKkoWhdsMqCALwe63ytxPwvtLtCffJLhHAcgrPC7DorXYdqhdG2cevdqc5oqFEAaKoFDBf12p5SsbbM4PYWCmVCb","client_ip":"35.225.220.151","client_port":"9702","node_ip":"35.224.26.110","node_port":"9701","services":["VALIDATOR"]},"dest":"k74ZsZuUaJEcB8RRxMwkCwdE5g1r9yzA3nx41qvYqYf"},"metadata":{"from":"Ex6hzsJFYzNJ7kzbfncNeU"},"type":"0"},"txnMetadata":{"seqNo":6,"txnId":"6880673ce4ae4a2352f103d2a6ae20469dd070f2027283a1da5e62a64a59d688"},"ver":"1"} +{"reqSignature":{},"txn":{"data":{"data":{"alias":"cysecure-itn","blskey":"GdCvMLkkBYevRFi93b6qaj9G2u1W6Vnbg8QhRD1chhrWR8vRE8x9x7KXVeUBPFf6yW5qq2JCfA2frc8SGni2RwjtTagezfwAwnorLhVJqS5ZxTi4pgcw6smebnt4zWVhTkh6ugDHEypHwNQBcw5WhBZcEJKgNbyVLnHok9ob6cfr3u","blskey_pop":"RbH9mY7M5p3UB3oj4sT1skYwMkxjoUnja8eTYfcm83VcNbxC9zR9pCiRhk4q1dJT3wkDBPGNKnk2p83vaJYLcgMuJtzoWoJAWAxjb3Mcq8Agf6cgQpBuzBq2uCzFPuQCAhDS4Kv9iwA6FsRnfvoeFTs1hhgSJVxQzDWMVTVAD9uCqu","client_ip":"35.169.19.171","client_port":"9702","node_ip":"54.225.56.21","node_port":"9701","services":["VALIDATOR"]},"dest":"4ETBDmHzx8iDQB6Xygmo9nNXtMgq9f6hxGArNhQ6Hh3u"},"metadata":{"from":"uSXXXEdBicPHMMhr3ddNF"},"type":"0"},"txnMetadata":{"seqNo":7,"txnId":"3c21718b07806b2f193b35953dda5b68b288efd551dce4467ce890703d5ba549"},"ver":"1"} diff --git a/uniffi_aries_vcx/core/Cargo.toml b/uniffi_aries_vcx/core/Cargo.toml index 4edd395d40..8610776bd1 100644 --- a/uniffi_aries_vcx/core/Cargo.toml +++ b/uniffi_aries_vcx/core/Cargo.toml @@ -16,7 +16,7 @@ path = "uniffi-bindgen.rs" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -uniffi = "0.23.0" +uniffi = { version = "0.23.0", features = ["cli"] } aries-vcx = { path = "../../aries_vcx" } tokio = { version = "1.24.1" } once_cell = "1.17.0" diff --git a/uniffi_aries_vcx/core/src/core/profile.rs b/uniffi_aries_vcx/core/src/core/profile.rs index dc3baba3b3..7e0c995b1c 100644 --- a/uniffi_aries_vcx/core/src/core/profile.rs +++ b/uniffi_aries_vcx/core/src/core/profile.rs @@ -1,35 +1,74 @@ use std::sync::Arc; use aries_vcx::{ - aries_vcx_core::wallet::indy::{wallet::create_and_open_wallet, IndySdkWallet, WalletConfig}, - core::profile::{profile::Profile, vdrtools_profile::VdrtoolsProfile}, + aries_vcx_core::{ + anoncreds::credx_anoncreds::IndyCredxAnonCreds, + ledger::base_ledger::TxnAuthrAgrmtOptions, + wallet::indy::{wallet::create_and_open_wallet, IndySdkWallet, WalletConfig}, + }, + core::profile::Profile, + errors::error::{AriesVcxError, AriesVcxErrorKind, VcxResult}, utils::mockdata::profile::mock_ledger::MockLedger, }; +use async_trait::async_trait; use crate::{errors::error::VcxUniFFIResult, runtime::block_on}; -pub struct ProfileHolder { - pub inner: Arc, +#[derive(Debug)] +pub struct UniffiProfile { + wallet: Arc, + anoncreds: IndyCredxAnonCreds, + ledger_read: MockLedger, + ledger_write: MockLedger, } -impl ProfileHolder {} +#[async_trait] +impl Profile for UniffiProfile { + type LedgerRead = MockLedger; + type LedgerWrite = MockLedger; + type Anoncreds = IndyCredxAnonCreds; + type Wallet = IndySdkWallet; + + fn ledger_read(&self) -> &Self::LedgerRead { + &self.ledger_read + } + + fn ledger_write(&self) -> &Self::LedgerWrite { + &self.ledger_write + } + + fn anoncreds(&self) -> &Self::Anoncreds { + &self.anoncreds + } + + fn wallet(&self) -> &Self::Wallet { + &self.wallet + } + + fn update_taa_configuration(&self, _taa_options: TxnAuthrAgrmtOptions) -> VcxResult<()> { + Err(AriesVcxError::from_msg( + AriesVcxErrorKind::ActionNotSupported, + "update_taa_configuration no implemented for VdrtoolsProfile", + )) + } +} + +pub struct ProfileHolder { + pub(crate) inner: UniffiProfile, +} pub fn new_indy_profile(wallet_config: WalletConfig) -> VcxUniFFIResult> { block_on(async { let wh = create_and_open_wallet(&wallet_config).await?; - let wallet = IndySdkWallet::new(wh); - let ledger = Arc::new(MockLedger); - let profile = VdrtoolsProfile::init( - Arc::new(wallet), - ledger.clone(), - ledger.clone(), - ledger.clone(), - ledger.clone(), - ); - - Ok(Arc::new(ProfileHolder { - inner: Arc::new(profile), - })) + let wallet = Arc::new(IndySdkWallet::new(wh)); + let profile = UniffiProfile { + anoncreds: IndyCredxAnonCreds::new(wallet.clone()), + wallet, + ledger_read: MockLedger, + ledger_write: MockLedger, + }; + + Ok(Arc::new(ProfileHolder { inner: profile })) }) } diff --git a/uniffi_aries_vcx/core/src/core/unpack_message.rs b/uniffi_aries_vcx/core/src/core/unpack_message.rs index ec105354d8..5c5673119f 100644 --- a/uniffi_aries_vcx/core/src/core/unpack_message.rs +++ b/uniffi_aries_vcx/core/src/core/unpack_message.rs @@ -1,17 +1,14 @@ use std::sync::Arc; -use serde::{Deserialize, Serialize}; +use aries_vcx::{ + aries_vcx_core::wallet::{base_wallet::BaseWallet, structs_io::UnpackMessageOutput}, + core::profile::Profile, +}; use super::profile::ProfileHolder; use crate::{errors::error::VcxUniFFIResult, runtime::block_on}; -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -pub struct UnpackMessage { - pub message: String, - pub recipient_verkey: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub sender_verkey: Option, -} +pub type UnpackMessage = UnpackMessageOutput; pub fn unpack_message( profile_holder: Arc, @@ -19,10 +16,8 @@ pub fn unpack_message( ) -> VcxUniFFIResult { block_on(async { let packed_bytes = packed_msg.as_bytes(); - let wallet = profile_holder.inner.inject_wallet(); - let unpacked_bytes = wallet.unpack_message(packed_bytes).await?; - let unpacked_string = String::from_utf8(unpacked_bytes)?; - let unpacked_message = serde_json::from_str::(&unpacked_string)?; + let wallet = profile_holder.inner.wallet(); + let unpacked_message = wallet.unpack_message(packed_bytes).await?; Ok(unpacked_message) }) } diff --git a/uniffi_aries_vcx/core/src/handlers/connection.rs b/uniffi_aries_vcx/core/src/handlers/connection.rs index ba68424992..48aec88c46 100644 --- a/uniffi_aries_vcx/core/src/handlers/connection.rs +++ b/uniffi_aries_vcx/core/src/handlers/connection.rs @@ -1,6 +1,7 @@ use std::sync::{Arc, Mutex}; use aries_vcx::{ + core::profile::Profile, errors::error::{AriesVcxError, AriesVcxErrorKind}, protocols::connection::{ pairwise_info::PairwiseInfo, Connection as VcxConnection, @@ -74,7 +75,7 @@ pub struct Connection { // seperate function since uniffi can't handle constructors with results pub fn create_inviter(profile: Arc) -> VcxUniFFIResult> { block_on(async { - let pairwise_info = PairwiseInfo::create(&profile.inner.inject_wallet()).await?; + let pairwise_info = PairwiseInfo::create(profile.inner.wallet()).await?; let connection = VcxConnection::new_inviter(String::new(), pairwise_info); let handler = Mutex::new(VcxGenericConnection::from(connection)); Ok(Arc::new(Connection { handler })) @@ -84,7 +85,7 @@ pub fn create_inviter(profile: Arc) -> VcxUniFFIResult) -> VcxUniFFIResult> { block_on(async { - let pairwise_info = PairwiseInfo::create(&profile.inner.inject_wallet()).await?; + let pairwise_info = PairwiseInfo::create(profile.inner.wallet()).await?; let connection = VcxConnection::new_invitee(String::new(), pairwise_info); let handler = Mutex::new(VcxGenericConnection::from(connection)); @@ -119,7 +120,7 @@ impl Connection { block_on(async { let new_conn = connection - .accept_invitation(&profile.inner.inject_indy_ledger_read(), invitation) + .accept_invitation(profile.inner.ledger_read(), invitation) .await?; *handler = VcxGenericConnection::from(new_conn); Ok(()) @@ -147,13 +148,7 @@ impl Connection { block_on(async { let new_conn = connection - .handle_request( - &profile.inner.inject_wallet(), - request, - url, - routing_keys, - &HttpClient, - ) + .handle_request(profile.inner.wallet(), request, url, routing_keys) .await?; *handler = VcxGenericConnection::from(new_conn); @@ -178,7 +173,7 @@ impl Connection { block_on(async { let new_conn = connection - .handle_response(&profile.inner.inject_wallet(), response, &HttpClient) + .handle_response(profile.inner.wallet(), response) .await?; *handler = VcxGenericConnection::from(new_conn); @@ -203,7 +198,7 @@ impl Connection { let connection = connection.prepare_request(url, routing_keys).await?; let request = connection.get_request().clone(); connection - .send_message(&profile.inner.inject_wallet(), &request.into(), &HttpClient) + .send_message(profile.inner.wallet(), &request.into(), &HttpClient) .await?; *handler = VcxGenericConnection::from(connection); Ok(()) @@ -218,11 +213,7 @@ impl Connection { block_on(async { let response = connection.get_connection_response_msg(); connection - .send_message( - &profile.inner.inject_wallet(), - &response.into(), - &HttpClient, - ) + .send_message(profile.inner.wallet(), &response.into(), &HttpClient) .await?; *handler = VcxGenericConnection::from(connection); @@ -239,7 +230,7 @@ impl Connection { block_on(async { connection .send_message( - &profile.inner.inject_wallet(), + profile.inner.wallet(), &connection.get_ack().into(), &HttpClient, ) diff --git a/wallet_migrator/Cargo.toml b/wallet_migrator/Cargo.toml index cb7a498798..124871d161 100644 --- a/wallet_migrator/Cargo.toml +++ b/wallet_migrator/Cargo.toml @@ -6,8 +6,8 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -aries_vcx_core = { path = "../aries_vcx_core", features = ["modular_libs"] } -credx = { package = "indy-credx", git = "https://github.com/hyperledger/indy-shared-rs", tag = "v1.0.1" } +aries_vcx_core = { path = "../aries_vcx_core", features = ["credx", "vdrtools_wallet"] } +credx = { package = "indy-credx", git = "https://github.com/hyperledger/indy-shared-rs", tag = "v1.1.0" } vdrtools = { package = "libvdrtools", path = "../libvdrtools" } serde_json = "1.0.96" thiserror = "1.0.40" diff --git a/wallet_migrator/src/lib.rs b/wallet_migrator/src/lib.rs index 58b84809b0..4f0ded459c 100644 --- a/wallet_migrator/src/lib.rs +++ b/wallet_migrator/src/lib.rs @@ -22,7 +22,7 @@ pub async fn migrate_wallet( where E: Display, { - info!("Starting wallet migration..."); + info!("Starting wallet migration"); if src_wallet_handle == dest_wallet_handle { error!("Equal wallet handles: {src_wallet_handle:?} {dest_wallet_handle:?}"); @@ -40,8 +40,8 @@ where .await?; info!( - "Migration from wallet with handle {src_wallet_handle:?} to wallet with handle \ - {dest_wallet_handle:?} finished successfully!" + "Completed migration from wallet with handle {src_wallet_handle:?} to wallet with handle \ + {dest_wallet_handle:?}" ); Ok(()) diff --git a/wrappers/node/package-lock.json b/wrappers/node/package-lock.json index 1ed8b52a06..ab7017ac75 100644 --- a/wrappers/node/package-lock.json +++ b/wrappers/node/package-lock.json @@ -1,12 +1,12 @@ { "name": "@hyperledger/node-vcx-wrapper", - "version": "0.58.0", + "version": "0.60.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@hyperledger/node-vcx-wrapper", - "version": "0.58.0", + "version": "0.60.0", "license": "Apache-2.0", "dependencies": { "@hyperledger/vcx-napi-rs": "file:../vcx-napi-rs", diff --git a/wrappers/node/package.json b/wrappers/node/package.json index 240bcb0829..3db4d176d8 100644 --- a/wrappers/node/package.json +++ b/wrappers/node/package.json @@ -3,7 +3,7 @@ "name": "@hyperledger/node-vcx-wrapper", "description": "NodeJS wrapper Aries Framework", "license": "Apache-2.0", - "version": "0.58.0", + "version": "0.60.0", "directories": { "test": "test", "build": "dist", diff --git a/wrappers/node/src/api/common.ts b/wrappers/node/src/api/common.ts index 81c8742502..cdaecd3e42 100644 --- a/wrappers/node/src/api/common.ts +++ b/wrappers/node/src/api/common.ts @@ -134,9 +134,9 @@ export enum ConnectionStateType { export enum HolderStateType { Initial = 0, - ProposalSent = 1, + ProposalSet = 1, OfferReceived = 2, - RequestSent = 3, + RequestSet = 3, Finished = 4, Failed = 5, } @@ -146,7 +146,7 @@ export enum IssuerStateType { ProposalReceived = 1, OfferSet = 2, RequestReceived = 4, - CredentialSent = 5, + CredentialSet = 5, Finished = 6, Failed = 7, } diff --git a/wrappers/node/src/api/credential-def.ts b/wrappers/node/src/api/credential-def.ts index 9bf9d79492..8660d84cc6 100644 --- a/wrappers/node/src/api/credential-def.ts +++ b/wrappers/node/src/api/credential-def.ts @@ -4,6 +4,7 @@ import { VcxBase } from './vcx-base'; import { VCXInternalError } from '../errors'; export interface ICredentialDefCreateDataV2 { + issuerDid: string; sourceId: string; schemaId: string; supportRevocation: boolean; @@ -44,6 +45,7 @@ export enum CredentialDefState { export class CredentialDef extends VcxBase { public static async create({ + issuerDid, supportRevocation, schemaId, sourceId, @@ -51,7 +53,7 @@ export class CredentialDef extends VcxBase { }: ICredentialDefCreateDataV2): Promise { try { const credentialDef = new CredentialDef({ schemaId }); - const handle = await ffi.credentialdefCreateV2(sourceId, schemaId, tag, supportRevocation); + const handle = await ffi.credentialdefCreateV2(issuerDid, sourceId, schemaId, tag, supportRevocation); credentialDef._setHandle(handle); return credentialDef; } catch (err: any) { diff --git a/wrappers/node/src/api/init.ts b/wrappers/node/src/api/init.ts index 8b26891b23..e0ba3b72b5 100644 --- a/wrappers/node/src/api/init.ts +++ b/wrappers/node/src/api/init.ts @@ -9,22 +9,10 @@ export function createAgencyClientForMainWallet(config: object): void { } } -export async function initIssuerConfig(config: object): Promise { - try { - return await ffiNapi.vcxInitIssuerConfig(JSON.stringify(config)); - } catch (err: any) { - throw new VCXInternalError(err); - } -} - export async function openMainPool(config: object): Promise { try { return await ffiNapi.openMainPool(JSON.stringify(config)); } catch (err: any) { throw new VCXInternalError(err); } -} - -export function enableMocks(): void { - return ffiNapi.enableMocks(); -} +} \ No newline at end of file diff --git a/wrappers/node/src/api/revocation-registry.ts b/wrappers/node/src/api/revocation-registry.ts index 63fcc3fdb4..5d59b0930c 100644 --- a/wrappers/node/src/api/revocation-registry.ts +++ b/wrappers/node/src/api/revocation-registry.ts @@ -63,9 +63,9 @@ export class RevocationRegistry extends VcxBase { } } - public async publishRevocations(): Promise { + public async publishRevocations(submittderDid: string): Promise { try { - await ffi.revocationRegistryPublishRevocations(this.handle); + await ffi.revocationRegistryPublishRevocations(this.handle, submittderDid); } catch (err: any) { throw new VCXInternalError(err); } diff --git a/wrappers/node/src/api/schema.ts b/wrappers/node/src/api/schema.ts index 5e1456f736..74dcc7de17 100644 --- a/wrappers/node/src/api/schema.ts +++ b/wrappers/node/src/api/schema.ts @@ -70,10 +70,11 @@ export class Schema extends VcxBase { return this._name; } - public static async create({ data, sourceId }: ISchemaCreateData): Promise { + public static async create({ data, sourceId }: ISchemaCreateData, issuerDid: string): Promise { try { const schema = new Schema({ name: data.name, schemaId: '', schemaAttrs: data }); const handle = await ffi.schemaCreate( + issuerDid, sourceId, schema._name, data.version, diff --git a/wrappers/node/src/api/wallet.ts b/wrappers/node/src/api/wallet.ts index 2f2e3aeaef..3d86954d05 100644 --- a/wrappers/node/src/api/wallet.ts +++ b/wrappers/node/src/api/wallet.ts @@ -9,6 +9,22 @@ export async function createWallet(config: object): Promise { } } +export async function migrateWallet(config: object): Promise { + try { + return await ffi.walletMigrate(JSON.stringify(config)); + } catch (err: any) { + throw new VCXInternalError(err); + } +} + +export async function deleteWallet(config: object): Promise { + try { + return await ffi.walletDelete(JSON.stringify(config)); + } catch (err: any) { + throw new VCXInternalError(err); + } +} + export async function configureIssuerWallet(seed: string): Promise { try { return await ffi.configureIssuerWallet(seed); diff --git a/wrappers/node/test/helpers/entities.ts b/wrappers/node/test/helpers/entities.ts index 8cb142123b..878ccdd753 100644 --- a/wrappers/node/test/helpers/entities.ts +++ b/wrappers/node/test/helpers/entities.ts @@ -2,26 +2,26 @@ import '../module-resolver-helper'; import { assert } from 'chai'; import { - ARIES_CREDENTIAL_OFFER, - ARIES_PROOF_REQUEST, - Connection, - Credential, - CredentialDef, - DisclosedProof, - IConnectionCreateData, - ICredentialCreateWithOffer, - ICredentialDefCreateDataV2, - IDisclosedProofCreateData, - IIssuerCredentialBuildOfferDataV2, - IProofCreateData, IProofCreateDataV2, - ISchemaCreateData, - IssuerCredential, - Proof, - RevocationRegistry, - Schema, + ARIES_CREDENTIAL_OFFER, + ARIES_PROOF_REQUEST, + Connection, + Credential, + CredentialDef, + DisclosedProof, + IConnectionCreateData, + ICredentialCreateWithOffer, + ICredentialDefCreateDataV2, + IDisclosedProofCreateData, + IIssuerCredentialBuildOfferDataV2, + IProofCreateData, IProofCreateDataV2, + ISchemaCreateData, + IssuerCredential, + Proof, + RevocationRegistry, + Schema, } from 'src'; import * as uuid from 'uuid'; -import { ARIES_CONNECTION_ACK, ARIES_CONNECTION_REQUEST } from './mockdata'; +import { ARIES_CONNECTION_ACK, ARIES_CONNECTION_REQUEST, ARIES_ISSUER_DID } from './mockdata'; export const dataConnectionCreate = (): IConnectionCreateData => ({ id: `testConnectionId-${uuid.v4()}`, @@ -60,6 +60,7 @@ export const createConnectionInviterFinished = async ( }; export const dataCredentialDefCreate = (): ICredentialDefCreateDataV2 => ({ + issuerDid: ARIES_ISSUER_DID, schemaId: 'testCredentialDefSchemaId', sourceId: 'testCredentialDefSourceId', supportRevocation: true, @@ -165,7 +166,7 @@ export const dataProofCreate = (): IProofCreateDataV2 => ({ ref3: { names: ['attr3', 'attr4'] }, }, name: 'Proof', - preds: { pred1: { name: 'pred1', p_type: 'GE', p_value: 123 }}, + preds: { pred1: { name: 'pred1', p_type: 'GE', p_value: 123 } }, revocationInterval: { from: undefined, to: undefined, @@ -189,7 +190,7 @@ export const dataSchemaCreate = (): ISchemaCreateData => ({ }); export const schemaCreate = async (data = dataSchemaCreate()): Promise => { - const schema = await Schema.create(data); + const schema = await Schema.create(data, ARIES_ISSUER_DID); assert.notEqual(schema.handle, undefined); assert.equal(schema.name, data.data.name); assert.deepEqual(schema.schemaAttrs, data.data); diff --git a/wrappers/node/test/helpers/mockdata.ts b/wrappers/node/test/helpers/mockdata.ts index 81c786f6a3..4fc24317d3 100644 --- a/wrappers/node/test/helpers/mockdata.ts +++ b/wrappers/node/test/helpers/mockdata.ts @@ -1,3 +1,5 @@ +export const ARIES_ISSUER_DID = "2hoqvcwupRTUNkXn6ArYzs"; + export const ARIES_PING = { '@type': 'https://didcomm.org/trust_ping/1.0/ping', '@id': '518be002-de8e-456e-b3d5-8fe472477a86', diff --git a/wrappers/node/test/helpers/utils.ts b/wrappers/node/test/helpers/utils.ts index 372aa91f2b..4c517a3595 100644 --- a/wrappers/node/test/helpers/utils.ts +++ b/wrappers/node/test/helpers/utils.ts @@ -34,12 +34,6 @@ function generateWalletConfig() { }; } -export async function initVcxTestMode(): Promise { - const institution_did = await initVcx(); - vcx.enableMocks(); - return institution_did -} - export async function initVcx(): Promise { const rustLogPattern = process.env.RUST_LOG || 'vcx=error'; vcx.defaultLogger(rustLogPattern); @@ -50,9 +44,7 @@ export async function initVcx(): Promise { const issuerConfig = { institution_did, }; - await vcx.initIssuerConfig(issuerConfig); vcx.createAgencyClientForMainWallet(configAgency); - vcx.enableMocks(); return institution_did } diff --git a/wrappers/node/test/integration/ledger.test.ts b/wrappers/node/test/integration/ledger.test.ts index fb32cf1a04..c86162145b 100644 --- a/wrappers/node/test/integration/ledger.test.ts +++ b/wrappers/node/test/integration/ledger.test.ts @@ -1,10 +1,10 @@ import '../module-resolver-helper'; import * as path from 'path'; -import {createAndStoreDid, openMainPool, shutdownVcx, writeEndorserDid} from 'src'; -import { initVcx, initVcxTestMode } from '../helpers/utils'; +import { createAndStoreDid, openMainPool, shutdownVcx, writeEndorserDid } from 'src'; +import { initVcx } from '../helpers/utils'; import { expect } from 'chai'; -const seed = '1234567890123456789012'; +const seed = '00000LookAtMeIAmARandomSeed00000'; // @ts-ignore let publicDid: string; @@ -24,8 +24,8 @@ describe('wallet', () => { const genesisPath = path.join(__dirname, '/../../resources/localhost.txn'); await openMainPool({ genesis_path: genesisPath }); const pwInfo = await createAndStoreDid(seed); - expect(pwInfo.pw_did).equal('FhrSrYtQcw3p9xwf7NYemf'); - expect(pwInfo.pw_vk).equal('91qMFrZjXDoi2Vc8Mm14Ys112tEZdDegBZZoembFEATE'); + expect(pwInfo.pw_did).equal('WSM9V5siLPUbx4BNnLH6Fj'); + expect(pwInfo.pw_vk).equal('H3Zchkv2nSYRZTVL66rhLnwJF9veJCYAm5eeBPoANyTH'); }); it('write new endorser did', async () => { diff --git a/wrappers/node/test/suite1/ariesvcx-connection.test.ts b/wrappers/node/test/suite1/ariesvcx-connection.test.ts index bb78907e48..25f7b290af 100644 --- a/wrappers/node/test/suite1/ariesvcx-connection.test.ts +++ b/wrappers/node/test/suite1/ariesvcx-connection.test.ts @@ -1,17 +1,17 @@ import '../module-resolver-helper'; import { assert, expect } from 'chai'; -import { initVcxTestMode } from 'helpers/utils'; -import {ConnectionStateType } from 'src'; +import { initVcx } from 'helpers/utils'; +import { ConnectionStateType } from 'src'; import { NonmediatedConnection } from 'src'; describe('Nonmediated connection:', () => { - before(() => initVcxTestMode()); + before(() => initVcx()); describe('create invitation:', () => { it('success', async () => { const serviceEndpoint = 'http://localhost:8080/'; - const routingKeys = [ 'routingKey' ]; + const routingKeys = ['routingKey']; const connection = await NonmediatedConnection.createInviter(); assert.equal(connection.getState(), ConnectionStateType.Initial); diff --git a/wrappers/node/test/suite1/ariesvcx-credential-def.test.ts b/wrappers/node/test/suite1/ariesvcx-credential-def.test.ts index 2fab19ec15..6eeb3824f1 100644 --- a/wrappers/node/test/suite1/ariesvcx-credential-def.test.ts +++ b/wrappers/node/test/suite1/ariesvcx-credential-def.test.ts @@ -2,11 +2,11 @@ import '../module-resolver-helper'; import { assert } from 'chai'; import { credentialDefCreate } from 'helpers/entities'; -import { initVcxTestMode, shouldThrow } from 'helpers/utils'; +import { initVcx, shouldThrow } from 'helpers/utils'; import { CredentialDef, VCXCode } from 'src'; describe('CredentialDef:', () => { - before(() => initVcxTestMode()); + before(() => initVcx()); describe('create:', () => { it('success', async () => { diff --git a/wrappers/node/test/suite1/ariesvcx-credential.test.ts b/wrappers/node/test/suite1/ariesvcx-credential.test.ts index 9d4a32b7d4..a08933670d 100644 --- a/wrappers/node/test/suite1/ariesvcx-credential.test.ts +++ b/wrappers/node/test/suite1/ariesvcx-credential.test.ts @@ -6,11 +6,11 @@ import { credentialCreateWithOffer, dataCredentialCreateWithOffer, } from 'helpers/entities'; -import { initVcxTestMode, shouldThrow } from 'helpers/utils'; +import { initVcx, shouldThrow } from 'helpers/utils'; import { Credential, HolderStateType, VCXCode } from 'src'; describe('Credential:', () => { - before(() => initVcxTestMode()); + before(() => initVcx()); describe('create:', () => { it('success', async () => { @@ -79,7 +79,7 @@ describe('Credential:', () => { const credential = await Credential.create(data); assert.equal(await credential.getState(), HolderStateType.OfferReceived); await credential.sendRequest({ connection }); - assert.equal(await credential.getState(), HolderStateType.RequestSent); + assert.equal(await credential.getState(), HolderStateType.RequestSet); }); }); @@ -88,7 +88,7 @@ describe('Credential:', () => { const data = await dataCredentialCreateWithOffer(); const credential = await credentialCreateWithOffer(data); await credential.sendRequest({ connection: data.connection }); - assert.equal(await credential.getState(), HolderStateType.RequestSent); + assert.equal(await credential.getState(), HolderStateType.RequestSet); }); }); diff --git a/wrappers/node/test/suite1/ariesvcx-disclosed-proof.test.ts b/wrappers/node/test/suite1/ariesvcx-disclosed-proof.test.ts index 5f8f7a3d82..24c3ce1990 100644 --- a/wrappers/node/test/suite1/ariesvcx-disclosed-proof.test.ts +++ b/wrappers/node/test/suite1/ariesvcx-disclosed-proof.test.ts @@ -6,11 +6,11 @@ import { dataDisclosedProofCreateWithRequest, disclosedProofCreateWithRequest, } from 'helpers/entities'; -import { initVcxTestMode, shouldThrow } from 'helpers/utils'; +import { initVcx, shouldThrow } from 'helpers/utils'; import { DisclosedProof, ProverStateType, VCXCode } from 'src'; describe('DisclosedProof', () => { - before(() => initVcxTestMode()); + before(() => initVcx()); describe('create:', () => { it('success1', async () => { diff --git a/wrappers/node/test/suite1/ariesvcx-issuer-credential.test.ts b/wrappers/node/test/suite1/ariesvcx-issuer-credential.test.ts index 3630100142..8d2e677b56 100644 --- a/wrappers/node/test/suite1/ariesvcx-issuer-credential.test.ts +++ b/wrappers/node/test/suite1/ariesvcx-issuer-credential.test.ts @@ -7,11 +7,11 @@ import { dataCredentialCreateWithOffer, issuerCredentialCreate, } from 'helpers/entities'; -import { initVcxTestMode, shouldThrow } from 'helpers/utils'; +import { initVcx, shouldThrow } from 'helpers/utils'; import { Connection, Credential, IssuerCredential, IssuerStateType, VCXCode } from 'src'; describe('IssuerCredential:', () => { - before(() => initVcxTestMode()); + before(() => initVcx()); describe('create:', () => { it('success', async () => { diff --git a/wrappers/node/test/suite1/ariesvcx-mediated-connection.test.ts b/wrappers/node/test/suite1/ariesvcx-mediated-connection.test.ts index 6658a12b9e..ff0a27e231 100644 --- a/wrappers/node/test/suite1/ariesvcx-mediated-connection.test.ts +++ b/wrappers/node/test/suite1/ariesvcx-mediated-connection.test.ts @@ -8,12 +8,12 @@ import { createConnectionInviterRequested, dataConnectionCreate, } from 'helpers/entities'; -import { initVcxTestMode, shouldThrow } from 'helpers/utils'; +import { initVcx, shouldThrow } from 'helpers/utils'; import { Connection, MediatedConnectionStateType, VCXCode } from 'src'; import { ARIES_PING, ARIES_PING_RESPONSE } from '../helpers/mockdata'; describe('Connection:', () => { - before(() => initVcxTestMode()); + before(() => initVcx()); describe('create:', () => { it('success', async () => { diff --git a/wrappers/node/test/suite1/ariesvcx-oob.test.ts b/wrappers/node/test/suite1/ariesvcx-oob.test.ts index 6b6a5cc0bd..d87f671d08 100644 --- a/wrappers/node/test/suite1/ariesvcx-oob.test.ts +++ b/wrappers/node/test/suite1/ariesvcx-oob.test.ts @@ -1,6 +1,6 @@ import '../module-resolver-helper'; -import { initVcxTestMode } from 'helpers/utils'; +import { initVcx } from 'helpers/utils'; import { GoalCode, OutOfBandSender, OutOfBandReceiver, HandshakeProtocol } from 'src'; import { assert } from 'chai'; @@ -28,7 +28,7 @@ const credentialOffer = { }; describe('Out of Band:', () => { - before(() => initVcxTestMode()); + before(() => initVcx()); describe('create:', () => { it('success', async () => { diff --git a/wrappers/node/test/suite1/ariesvcx-proof.test.ts b/wrappers/node/test/suite1/ariesvcx-proof.test.ts index 9c1916b832..507bd81cf1 100644 --- a/wrappers/node/test/suite1/ariesvcx-proof.test.ts +++ b/wrappers/node/test/suite1/ariesvcx-proof.test.ts @@ -7,11 +7,11 @@ import { dataProofCreateLegacy, dataProofCreate, proofCreate, } from 'helpers/entities'; -import { initVcxTestMode, shouldThrow } from 'helpers/utils'; +import { initVcx, shouldThrow } from 'helpers/utils'; import { Proof, VerifierStateType, VCXCode } from 'src'; describe('Proof:', () => { - before(() => initVcxTestMode()); + before(() => initVcx()); describe('create:', () => { it('lgeacy success', async () => { diff --git a/wrappers/node/test/suite1/ariesvcx-schema.test.ts b/wrappers/node/test/suite1/ariesvcx-schema.test.ts index 12f5419ca9..ab7d723e87 100644 --- a/wrappers/node/test/suite1/ariesvcx-schema.test.ts +++ b/wrappers/node/test/suite1/ariesvcx-schema.test.ts @@ -2,11 +2,11 @@ import '../module-resolver-helper'; import { assert } from 'chai'; import { dataSchemaCreate, schemaCreate } from 'helpers/entities'; -import { initVcxTestMode, shouldThrow } from 'helpers/utils'; +import { initVcx, shouldThrow } from 'helpers/utils'; import { Schema, SchemaState, VCXCode } from 'src'; describe('Schema:', () => { - before(() => initVcxTestMode()); + before(() => initVcx()); describe('create:', () => { it('success', async () => { diff --git a/wrappers/node/test/suite1/ariesvcx-utils.test.ts b/wrappers/node/test/suite1/ariesvcx-utils.test.ts index fee3cd59ca..5e1ab1cc75 100644 --- a/wrappers/node/test/suite1/ariesvcx-utils.test.ts +++ b/wrappers/node/test/suite1/ariesvcx-utils.test.ts @@ -1,7 +1,7 @@ import '../module-resolver-helper'; import { assert } from 'chai'; -import { initVcxTestMode, shouldThrow } from 'helpers/utils'; +import { initVcx, shouldThrow } from 'helpers/utils'; import { getLedgerAuthorAgreement, getVersion, @@ -11,7 +11,7 @@ import { } from 'src'; describe('utils:', () => { - before(() => initVcxTestMode()); + before(() => initVcx()); describe('provisionAgent:', () => { it('success', async () => { diff --git a/wrappers/node/test/suite1/ariesvcx-wallet.test.ts b/wrappers/node/test/suite1/ariesvcx-wallet.test.ts index c8a0e3bb4b..853c6d5a3b 100644 --- a/wrappers/node/test/suite1/ariesvcx-wallet.test.ts +++ b/wrappers/node/test/suite1/ariesvcx-wallet.test.ts @@ -1,10 +1,10 @@ import '../module-resolver-helper'; import { assert } from 'chai'; -import { initVcxTestMode, shouldThrow } from 'helpers/utils'; +import { initVcx, shouldThrow } from 'helpers/utils'; import { shutdownVcx, VCXCode, Wallet } from 'src'; describe('Wallet:', () => { - before(() => initVcxTestMode()); + before(() => initVcx()); describe('import:', () => { it('throws: libindy error', async () => { diff --git a/wrappers/vcx-napi-rs/Cargo.toml b/wrappers/vcx-napi-rs/Cargo.toml index bfe079bb51..fe14f1a632 100644 --- a/wrappers/vcx-napi-rs/Cargo.toml +++ b/wrappers/vcx-napi-rs/Cargo.toml @@ -11,14 +11,9 @@ path = "src/lib.rs" crate-type = ["cdylib"] doctest = false -[features] -default = ["anoncreds_vdrtools"] - -anoncreds_credx = ["libvcx_core/anoncreds_credx"] -anoncreds_vdrtools = ["libvcx_core/anoncreds_vdrtools"] - [dependencies] libvcx_core = { path = "../../libvcx_core" } +wallet_migrator = { path = "../../wallet_migrator" } log = "0.4.16" napi = { version = "2.10.14", default-features = false, features = [ "async" ] } napi-derive = { version = "2.10.1" } diff --git a/wrappers/vcx-napi-rs/index.d.ts b/wrappers/vcx-napi-rs/index.d.ts index 97496b9c71..80d8f0cc6d 100644 --- a/wrappers/vcx-napi-rs/index.d.ts +++ b/wrappers/vcx-napi-rs/index.d.ts @@ -46,7 +46,7 @@ export function credentialGetTailsLocation(handle: number): string export function credentialGetTailsHash(handle: number): string export function credentialGetRevRegId(handle: number): string export function credentialGetThreadId(handle: number): string -export function credentialdefCreateV2(sourceId: string, schemaId: string, tag: string, supportRevocation: boolean): Promise +export function credentialdefCreateV2(issuerDid: string, sourceId: string, schemaId: string, tag: string, supportRevocation: boolean): Promise export function credentialdefPublish(handle: number): Promise export function credentialdefDeserialize(serialized: string): number export function credentialdefRelease(handle: number): void @@ -166,7 +166,7 @@ export function proofGetThreadId(handle: number): string export function markPresentationRequestMsgSent(handle: number): void export function revocationRegistryCreate(config: string): Promise export function revocationRegistryPublish(handle: number, tailsUrl: string): Promise -export function revocationRegistryPublishRevocations(handle: number): Promise +export function revocationRegistryPublishRevocations(handle: number, submitterDid: string): Promise export function revocationRegistryGetRevRegId(handle: number): string export function revocationRegistryGetTailsHash(handle: number): string export function revocationRegistrySerialize(handle: number): string @@ -174,14 +174,13 @@ export function revocationRegistryDeserialize(data: string): number export function revocationRegistryRelease(handle: number): void export function schemaGetAttributes(sourceId: string, schemaId: string): void export function schemaPrepareForEndorser(): void -export function schemaCreate(sourceId: string, name: string, version: string, data: string): Promise +export function schemaCreate(issuerDid: string, sourceId: string, name: string, version: string, data: string): Promise export function schemaGetSchemaId(handleSchema: number): string export function schemaDeserialize(serialized: string): number export function schemaSerialize(handleSchema: number): string export function schemaRelease(handleSchema: number): void export function schemaUpdateState(handleSchema: number): Promise export function schemaGetState(handleSchema: number): number -export function enableMocks(): void export function trustpingBuildResponseMsg(ping: string): string export function trustpingBuildPing(requestResponse: boolean, comment?: string | undefined | null): string export function shutdown(deleteAll?: boolean | undefined | null): void @@ -189,12 +188,13 @@ export function getVersion(): string export function walletOpenAsMain(walletConfig: string): Promise export function walletCreateMain(walletConfig: string): Promise export function walletCloseMain(): Promise -export function vcxInitIssuerConfig(config: string): Promise export function configureIssuerWallet(enterpriseSeed: string): Promise export function unpack(data: Buffer): Promise export function createAndStoreDid(seed?: string | undefined | null): Promise export function walletImport(config: string): Promise export function walletExport(path: string, backupKey: string): Promise +export function walletMigrate(walletConfig: string): Promise +export function walletDelete(walletConfig: string): Promise export function getVerkeyFromWallet(did: string): Promise export function rotateVerkey(did: string): Promise export function rotateVerkeyStart(did: string): Promise diff --git a/wrappers/vcx-napi-rs/index.js b/wrappers/vcx-napi-rs/index.js index 3f9ec9a979..33378a515c 100644 --- a/wrappers/vcx-napi-rs/index.js +++ b/wrappers/vcx-napi-rs/index.js @@ -252,7 +252,7 @@ if (!nativeBinding) { throw new Error(`Failed to load native binding`) } -const { updateWebhookUrl, createAgencyClientForMainWallet, provisionCloudAgent, messagesUpdateStatus, generatePublicInvitation, connectionCreateInviter, connectionCreateInvitee, connectionGetThreadId, connectionGetPairwiseInfo, connectionGetRemoteDid, connectionGetRemoteVk, connectionGetState, connectionGetInvitation, connectionProcessInvite, connectionProcessRequest, connectionProcessResponse, connectionProcessAck, connectionProcessProblemReport, connectionSendResponse, connectionSendRequest, connectionSendAck, connectionSendGenericMessage, connectionSendAriesMessage, connectionCreateInvite, connectionSerialize, connectionDeserialize, connectionRelease, credentialCreateWithOffer, credentialRelease, credentialSendRequest, credentialDeclineOffer, credentialSerialize, credentialDeserialize, v2CredentialUpdateStateWithMessage, v2CredentialUpdateState, credentialGetState, credentialGetOffers, credentialGetAttributes, credentialGetAttachment, credentialGetTailsLocation, credentialGetTailsHash, credentialGetRevRegId, credentialGetThreadId, credentialdefCreateV2, credentialdefPublish, credentialdefDeserialize, credentialdefRelease, credentialdefSerialize, credentialdefGetCredDefId, credentialdefUpdateState, credentialdefGetState, disclosedProofCreateWithRequest, disclosedProofRelease, disclosedProofSendProof, disclosedProofRejectProof, disclosedProofGetProofMsg, disclosedProofSerialize, disclosedProofDeserialize, v2DisclosedProofUpdateState, v2DisclosedProofUpdateStateWithMessage, disclosedProofGetState, disclosedProofGetRequests, disclosedProofRetrieveCredentials, disclosedProofGetProofRequestAttachment, disclosedProofGenerateProof, disclosedProofDeclinePresentationRequest, disclosedProofGetThreadId, issuerCredentialDeserialize, issuerCredentialSerialize, issuerCredentialUpdateStateV2, issuerCredentialUpdateStateWithMessageV2, issuerCredentialUpdateStateWithMessageNonmediated, issuerCredentialGetState, issuerCredentialGetRevRegId, issuerCredentialCreate, issuerCredentialRevokeLocal, issuerCredentialIsRevokable, issuerCredentialGetRevocationId, issuerCredentialSendCredential, issuerCredentialSendCredentialNonmediated, issuerCredentialSendOfferV2, issuerCredentialSendOfferNonmediated, issuerCredentialBuildOfferMsgV2, issuerCredentialGetOfferMsg, issuerCredentialRelease, issuerCredentialGetThreadId, getLedgerAuthorAgreement, setActiveTxnAuthorAgreementMeta, createService, createServiceV2, getServiceFromLedger, getAttrFromLedger, clearAttrFromLedger, writeEndorserDid, getVerkeyFromLedger, getLedgerTxn, initDefaultLogger, mediatedConnectionGeneratePublicInvite, mediatedConnectionGetPwDid, mediatedConnectionGetTheirPwDid, mediatedConnectionGetThreadId, mediatedConnectionGetState, mediatedConnectionGetSourceId, mediatedConnectionCreate, mediatedConnectionCreateWithInvite, mediatedConnectionSendMessage, mediatedConnectionCreateWithConnectionRequestV2, mediatedConnectionSendHandshakeReuse, mediatedConnectionUpdateStateWithMessage, mediatedConnectionHandleMessage, mediatedConnectionUpdateState, mediatedConnectionDeleteConnection, mediatedConnectionConnect, mediatedConnectionSerialize, mediatedConnectionDeserialize, mediatedConnectionRelease, mediatedConnectionInviteDetails, mediatedConnectionSendPing, mediatedConnectionSendDiscoveryFeatures, mediatedConnectionInfo, mediatedConnectionMessagesDownload, mediatedConnectionSignData, mediatedConnectionVerifySignature, outOfBandBuildHandshakeReuseAcceptedMsg, outOfBandReceiverCreate, outOfBandReceiverExtractMessage, outOfBandReceiverConnectionExists, outOfBandReceiverNonmediatedConnectionExists, outOfBandReceiverBuildConnection, outOfBandReceiverGetThreadId, outOfBandReceiverSerialize, outOfBandReceiverDeserialize, outOfBandReceiverRelease, outOfBandSenderCreate, outOfBandSenderAppendMessage, outOfBandSenderAppendService, outOfBandSenderAppendServiceDid, outOfBandSenderToMessage, outOfBandSenderGetThreadId, outOfBandSenderSerialize, outOfBandSenderDeserialize, outOfBandSenderRelease, openMainPool, closeMainPool, proofCreate, proofGetPresentationMsg, proofGetPresentationRequestAttachment, proofGetPresentationAttachment, proofRelease, proofSendRequest, proofSendRequestNonmediated, proofGetRequestMsg, proofSerialize, proofDeserialize, v2ProofUpdateState, v2ProofUpdateStateWithMessage, proofUpdateStateWithMessageNonmediated, proofGetState, proofGetVerificationStatus, proofGetThreadId, markPresentationRequestMsgSent, revocationRegistryCreate, revocationRegistryPublish, revocationRegistryPublishRevocations, revocationRegistryGetRevRegId, revocationRegistryGetTailsHash, revocationRegistrySerialize, revocationRegistryDeserialize, revocationRegistryRelease, schemaGetAttributes, schemaPrepareForEndorser, schemaCreate, schemaGetSchemaId, schemaDeserialize, schemaSerialize, schemaRelease, schemaUpdateState, schemaGetState, enableMocks, trustpingBuildResponseMsg, trustpingBuildPing, shutdown, getVersion, walletOpenAsMain, walletCreateMain, walletCloseMain, vcxInitIssuerConfig, configureIssuerWallet, unpack, createAndStoreDid, walletImport, walletExport, getVerkeyFromWallet, rotateVerkey, rotateVerkeyStart, rotateVerkeyApply } = nativeBinding +const { updateWebhookUrl, createAgencyClientForMainWallet, provisionCloudAgent, messagesUpdateStatus, generatePublicInvitation, connectionCreateInviter, connectionCreateInvitee, connectionGetThreadId, connectionGetPairwiseInfo, connectionGetRemoteDid, connectionGetRemoteVk, connectionGetState, connectionGetInvitation, connectionProcessInvite, connectionProcessRequest, connectionProcessResponse, connectionProcessAck, connectionProcessProblemReport, connectionSendResponse, connectionSendRequest, connectionSendAck, connectionSendGenericMessage, connectionSendAriesMessage, connectionCreateInvite, connectionSerialize, connectionDeserialize, connectionRelease, credentialCreateWithOffer, credentialRelease, credentialSendRequest, credentialDeclineOffer, credentialSerialize, credentialDeserialize, v2CredentialUpdateStateWithMessage, v2CredentialUpdateState, credentialGetState, credentialGetOffers, credentialGetAttributes, credentialGetAttachment, credentialGetTailsLocation, credentialGetTailsHash, credentialGetRevRegId, credentialGetThreadId, credentialdefCreateV2, credentialdefPublish, credentialdefDeserialize, credentialdefRelease, credentialdefSerialize, credentialdefGetCredDefId, credentialdefUpdateState, credentialdefGetState, disclosedProofCreateWithRequest, disclosedProofRelease, disclosedProofSendProof, disclosedProofRejectProof, disclosedProofGetProofMsg, disclosedProofSerialize, disclosedProofDeserialize, v2DisclosedProofUpdateState, v2DisclosedProofUpdateStateWithMessage, disclosedProofGetState, disclosedProofGetRequests, disclosedProofRetrieveCredentials, disclosedProofGetProofRequestAttachment, disclosedProofGenerateProof, disclosedProofDeclinePresentationRequest, disclosedProofGetThreadId, issuerCredentialDeserialize, issuerCredentialSerialize, issuerCredentialUpdateStateV2, issuerCredentialUpdateStateWithMessageV2, issuerCredentialUpdateStateWithMessageNonmediated, issuerCredentialGetState, issuerCredentialGetRevRegId, issuerCredentialCreate, issuerCredentialRevokeLocal, issuerCredentialIsRevokable, issuerCredentialGetRevocationId, issuerCredentialSendCredential, issuerCredentialSendCredentialNonmediated, issuerCredentialSendOfferV2, issuerCredentialSendOfferNonmediated, issuerCredentialBuildOfferMsgV2, issuerCredentialGetOfferMsg, issuerCredentialRelease, issuerCredentialGetThreadId, getLedgerAuthorAgreement, setActiveTxnAuthorAgreementMeta, createService, createServiceV2, getServiceFromLedger, getAttrFromLedger, clearAttrFromLedger, writeEndorserDid, getVerkeyFromLedger, getLedgerTxn, initDefaultLogger, mediatedConnectionGeneratePublicInvite, mediatedConnectionGetPwDid, mediatedConnectionGetTheirPwDid, mediatedConnectionGetThreadId, mediatedConnectionGetState, mediatedConnectionGetSourceId, mediatedConnectionCreate, mediatedConnectionCreateWithInvite, mediatedConnectionSendMessage, mediatedConnectionCreateWithConnectionRequestV2, mediatedConnectionSendHandshakeReuse, mediatedConnectionUpdateStateWithMessage, mediatedConnectionHandleMessage, mediatedConnectionUpdateState, mediatedConnectionDeleteConnection, mediatedConnectionConnect, mediatedConnectionSerialize, mediatedConnectionDeserialize, mediatedConnectionRelease, mediatedConnectionInviteDetails, mediatedConnectionSendPing, mediatedConnectionSendDiscoveryFeatures, mediatedConnectionInfo, mediatedConnectionMessagesDownload, mediatedConnectionSignData, mediatedConnectionVerifySignature, outOfBandBuildHandshakeReuseAcceptedMsg, outOfBandReceiverCreate, outOfBandReceiverExtractMessage, outOfBandReceiverConnectionExists, outOfBandReceiverNonmediatedConnectionExists, outOfBandReceiverBuildConnection, outOfBandReceiverGetThreadId, outOfBandReceiverSerialize, outOfBandReceiverDeserialize, outOfBandReceiverRelease, outOfBandSenderCreate, outOfBandSenderAppendMessage, outOfBandSenderAppendService, outOfBandSenderAppendServiceDid, outOfBandSenderToMessage, outOfBandSenderGetThreadId, outOfBandSenderSerialize, outOfBandSenderDeserialize, outOfBandSenderRelease, openMainPool, closeMainPool, proofCreate, proofGetPresentationMsg, proofGetPresentationRequestAttachment, proofGetPresentationAttachment, proofRelease, proofSendRequest, proofSendRequestNonmediated, proofGetRequestMsg, proofSerialize, proofDeserialize, v2ProofUpdateState, v2ProofUpdateStateWithMessage, proofUpdateStateWithMessageNonmediated, proofGetState, proofGetVerificationStatus, proofGetThreadId, markPresentationRequestMsgSent, revocationRegistryCreate, revocationRegistryPublish, revocationRegistryPublishRevocations, revocationRegistryGetRevRegId, revocationRegistryGetTailsHash, revocationRegistrySerialize, revocationRegistryDeserialize, revocationRegistryRelease, schemaGetAttributes, schemaPrepareForEndorser, schemaCreate, schemaGetSchemaId, schemaDeserialize, schemaSerialize, schemaRelease, schemaUpdateState, schemaGetState, trustpingBuildResponseMsg, trustpingBuildPing, shutdown, getVersion, walletOpenAsMain, walletCreateMain, walletCloseMain, configureIssuerWallet, unpack, createAndStoreDid, walletImport, walletExport, walletMigrate, walletDelete, getVerkeyFromWallet, rotateVerkey, rotateVerkeyStart, rotateVerkeyApply } = nativeBinding module.exports.updateWebhookUrl = updateWebhookUrl module.exports.createAgencyClientForMainWallet = createAgencyClientForMainWallet @@ -432,7 +432,6 @@ module.exports.schemaSerialize = schemaSerialize module.exports.schemaRelease = schemaRelease module.exports.schemaUpdateState = schemaUpdateState module.exports.schemaGetState = schemaGetState -module.exports.enableMocks = enableMocks module.exports.trustpingBuildResponseMsg = trustpingBuildResponseMsg module.exports.trustpingBuildPing = trustpingBuildPing module.exports.shutdown = shutdown @@ -440,12 +439,13 @@ module.exports.getVersion = getVersion module.exports.walletOpenAsMain = walletOpenAsMain module.exports.walletCreateMain = walletCreateMain module.exports.walletCloseMain = walletCloseMain -module.exports.vcxInitIssuerConfig = vcxInitIssuerConfig module.exports.configureIssuerWallet = configureIssuerWallet module.exports.unpack = unpack module.exports.createAndStoreDid = createAndStoreDid module.exports.walletImport = walletImport module.exports.walletExport = walletExport +module.exports.walletMigrate = walletMigrate +module.exports.walletDelete = walletDelete module.exports.getVerkeyFromWallet = getVerkeyFromWallet module.exports.rotateVerkey = rotateVerkey module.exports.rotateVerkeyStart = rotateVerkeyStart diff --git a/wrappers/vcx-napi-rs/src/api/credential_definition.rs b/wrappers/vcx-napi-rs/src/api/credential_definition.rs index b841cc6837..42d5a0a8e6 100644 --- a/wrappers/vcx-napi-rs/src/api/credential_definition.rs +++ b/wrappers/vcx-napi-rs/src/api/credential_definition.rs @@ -5,12 +5,13 @@ use crate::error::to_napi_err; #[napi] async fn credentialdef_create_v2_( + issuer_did: String, source_id: String, schema_id: String, tag: String, support_revocation: bool, ) -> napi::Result { - credential_def::create(source_id, schema_id, tag, support_revocation) + credential_def::create(issuer_did, source_id, schema_id, tag, support_revocation) .await .map_err(to_napi_err) } diff --git a/wrappers/vcx-napi-rs/src/api/mod.rs b/wrappers/vcx-napi-rs/src/api/mod.rs index bb94976c21..741d61ee88 100644 --- a/wrappers/vcx-napi-rs/src/api/mod.rs +++ b/wrappers/vcx-napi-rs/src/api/mod.rs @@ -15,7 +15,6 @@ pub mod pool; pub mod proof; pub mod revocation_registry; pub mod schema; -pub mod testing; pub mod trustping; pub mod utils; pub mod wallet; diff --git a/wrappers/vcx-napi-rs/src/api/revocation_registry.rs b/wrappers/vcx-napi-rs/src/api/revocation_registry.rs index dfd9522eed..2ef9d8c0ef 100644 --- a/wrappers/vcx-napi-rs/src/api/revocation_registry.rs +++ b/wrappers/vcx-napi-rs/src/api/revocation_registry.rs @@ -30,8 +30,11 @@ async fn revocation_registry_publish(handle: u32, tails_url: String) -> napi::Re } #[napi] -async fn revocation_registry_publish_revocations(handle: u32) -> napi::Result<()> { - revocation_registry::publish_revocations(handle) +async fn revocation_registry_publish_revocations( + handle: u32, + submitter_did: String, +) -> napi::Result<()> { + revocation_registry::publish_revocations(handle, &submitter_did) .await .map_err(to_napi_err) } diff --git a/wrappers/vcx-napi-rs/src/api/schema.rs b/wrappers/vcx-napi-rs/src/api/schema.rs index 2d93a3f2f3..02949df21a 100644 --- a/wrappers/vcx-napi-rs/src/api/schema.rs +++ b/wrappers/vcx-napi-rs/src/api/schema.rs @@ -15,12 +15,13 @@ fn schema_prepare_for_endorser() -> napi::Result<()> { #[napi] async fn schema_create( + issuer_did: String, source_id: String, name: String, version: String, data: String, ) -> napi::Result { - schema::create_and_publish_schema(&source_id, name, version, data) + schema::create_and_publish_schema(&issuer_did, &source_id, name, version, data) .await .map_err(to_napi_err) } diff --git a/wrappers/vcx-napi-rs/src/api/testing.rs b/wrappers/vcx-napi-rs/src/api/testing.rs deleted file mode 100644 index 59b148cab2..0000000000 --- a/wrappers/vcx-napi-rs/src/api/testing.rs +++ /dev/null @@ -1,10 +0,0 @@ -use libvcx_core::api_vcx::api_global::settings; -use napi_derive::napi; - -use crate::error::to_napi_err; - -#[napi] -pub fn enable_mocks() -> ::napi::Result<()> { - settings::enable_mocks().map_err(to_napi_err)?; - Ok(()) -} diff --git a/wrappers/vcx-napi-rs/src/api/wallet.rs b/wrappers/vcx-napi-rs/src/api/wallet.rs index 238cb4efef..d089b46ada 100644 --- a/wrappers/vcx-napi-rs/src/api/wallet.rs +++ b/wrappers/vcx-napi-rs/src/api/wallet.rs @@ -1,6 +1,8 @@ use libvcx_core::{ - api_vcx::api_global::{ledger, settings::settings_init_issuer_config, wallet}, - aries_vcx::aries_vcx_core::wallet::indy::{IssuerConfig, RestoreWalletConfigs, WalletConfig}, + api_vcx::api_global::{ledger, wallet}, + aries_vcx::aries_vcx_core::wallet::indy::{ + wallet::delete_wallet, RestoreWalletConfigs, WalletConfig, + }, errors::error::{LibvcxError, LibvcxErrorKind}, serde_json, serde_json::json, @@ -46,19 +48,6 @@ pub async fn wallet_close_main() -> napi::Result<()> { wallet::close_main_wallet().await.map_err(to_napi_err) } -#[napi] -pub async fn vcx_init_issuer_config(config: String) -> napi::Result<()> { - let config = serde_json::from_str::(&config) - .map_err(|err| { - LibvcxError::from_msg( - LibvcxErrorKind::InvalidConfiguration, - format!("Serialization error: {:?}", err), - ) - }) - .map_err(to_napi_err)?; - settings_init_issuer_config(&config).map_err(to_napi_err) -} - #[napi] pub async fn configure_issuer_wallet(enterprise_seed: String) -> napi::Result { let res = wallet::wallet_configure_issuer(&enterprise_seed) @@ -70,9 +59,10 @@ pub async fn configure_issuer_wallet(enterprise_seed: String) -> napi::Result napi::Result { let data = data.as_ref(); - wallet::wallet_unpack_message_to_string(data) + let unpacked = wallet::wallet_unpack_message(data) .await - .map_err(to_napi_err) + .map_err(to_napi_err)?; + serde_json::to_string(&unpacked).map_err(|err| napi::Error::from_reason(err.to_string())) } #[napi] @@ -103,6 +93,38 @@ pub async fn wallet_export(path: String, backup_key: String) -> napi::Result<()> .map_err(to_napi_err) } +#[napi] +pub async fn wallet_migrate(wallet_config: String) -> napi::Result<()> { + let wallet_config = serde_json::from_str(&wallet_config) + .map_err(|err| { + LibvcxError::from_msg( + LibvcxErrorKind::InvalidConfiguration, + format!("Serialization error: {:?}", err), + ) + }) + .map_err(to_napi_err)?; + + wallet::wallet_migrate(&wallet_config) + .await + .map_err(|e| napi::Error::from_reason(e.to_string())) +} + +#[napi] +pub async fn wallet_delete(wallet_config: String) -> napi::Result<()> { + let wallet_config = serde_json::from_str(&wallet_config) + .map_err(|err| { + LibvcxError::from_msg( + LibvcxErrorKind::InvalidConfiguration, + format!("Serialization error: {:?}", err), + ) + }) + .map_err(to_napi_err)?; + + delete_wallet(&wallet_config) + .await + .map_err(|e| napi::Error::from_reason(e.to_string())) +} + #[napi] pub async fn get_verkey_from_wallet(did: String) -> napi::Result { wallet::key_for_local_did(&did).await.map_err(to_napi_err) diff --git a/wrappers/vcx-napi-rs/tsconfig.json b/wrappers/vcx-napi-rs/tsconfig.json index 605f4bd4c7..716c67db71 100644 --- a/wrappers/vcx-napi-rs/tsconfig.json +++ b/wrappers/vcx-napi-rs/tsconfig.json @@ -12,23 +12,29 @@ "noUnusedLocals": true, "noUnusedParameters": true, "strict": true, - "skipLibCheck": true, - "suppressImplicitAnyIndexErrors": true, - "suppressExcessPropertyErrors": true, "forceConsistentCasingInFileNames": true, "preserveSymlinks": true, - "target": "ES2015", "sourceMap": true, "esModuleInterop": true, "stripInternal": true, "resolveJsonModule": true, "importsNotUsedAsValues": "remove", - "outDir": "scripts", - "lib": ["dom", "DOM.Iterable", "ES2019", "ES2020", "esnext"], + "lib": [ + "dom", + "DOM.Iterable", + "ES2019", + "ES2020", + "esnext" + ], "outDir": "./dist", "target": "ES2018", "skipLibCheck": false }, - "include": ["."], - "exclude": ["dist", "node_modules"] -} + "include": [ + "." + ], + "exclude": [ + "dist", + "node_modules" + ] +} \ No newline at end of file