diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..cd4692b79f --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,14 @@ + # For details on how this file works refer to: + # - https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file +version: 2 +updates: + # Maintain dependencies for GitHub Actions + # - Check for updates once a week + # - Group all updates into a single PR + - package-ecosystem: github-actions + directory: / + schedule: + interval: weekly + groups: + all-actions: + patterns: [ "*" ] \ No newline at end of file diff --git a/.github/workflows/blackformat.yml b/.github/workflows/blackformat.yml index 4474603ea5..39f2345bf3 100644 --- a/.github/workflows/blackformat.yml +++ b/.github/workflows/blackformat.yml @@ -10,11 +10,11 @@ jobs: name: lint runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: "3.9" - name: Black Code Formatter Check # The version of black should be adjusted at the same time dev # dependencies are updated. - uses: psf/black@24.1.1 + uses: psf/black@24.2.0 diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index e6f15917a0..e77074d757 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -17,13 +17,13 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/integrationtests.yml b/.github/workflows/integrationtests.yml index e7c5a23275..6d2fca9349 100644 --- a/.github/workflows/integrationtests.yml +++ b/.github/workflows/integrationtests.yml @@ -20,7 +20,7 @@ jobs: if: (github.event_name == 'pull_request' && github.repository == 'hyperledger/aries-cloudagent-python') || (github.event_name != 'pull_request') steps: - name: checkout-acapy - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: path: acapy #- name: run-von-network diff --git a/.github/workflows/nigthly.yml b/.github/workflows/nigthly.yml index 7af06f95a4..c6e01b95ce 100644 --- a/.github/workflows/nigthly.yml +++ b/.github/workflows/nigthly.yml @@ -26,7 +26,7 @@ jobs: commits_today: ${{ steps.commits.outputs.commits_today }} date: ${{ steps.date.outputs.date }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: print latest_commit run: echo ${{ github.sha }} - name: Get new commits diff --git a/.github/workflows/pip-audit.yml b/.github/workflows/pip-audit.yml index 486a36e0fb..5fa3be6f7e 100644 --- a/.github/workflows/pip-audit.yml +++ b/.github/workflows/pip-audit.yml @@ -11,14 +11,14 @@ jobs: runs-on: ubuntu-latest if: (github.event_name == 'pull_request' && github.repository == 'hyperledger/aries-cloudagent-python') || (github.event_name != 'pull_request') steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: install run: | python -m venv env/ source env/bin/activate python -m pip install --upgrade pip python -m pip install . - - uses: pypa/gh-action-pip-audit@v1.0.0 + - uses: pypa/gh-action-pip-audit@v1.0.8 with: virtual-environment: env/ local: true diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml new file mode 100644 index 0000000000..82cbdd3554 --- /dev/null +++ b/.github/workflows/publish-docs.yml @@ -0,0 +1,52 @@ +name: publish-docs + +on: + push: + # Publish `main` as latest + branches: + - main + + # Publish `v1.2.3` tags as releases + tags: + - v* + +permissions: + contents: write + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 # fetch all commits/branches + - uses: actions/setup-python@v4 + with: + python-version: 3.x + - uses: actions/cache@v2 + with: + key: ${{ github.ref }} + path: .cache + - name: Install Python dependencies + run: pip install -r ./mkdocs-requirements.txt + - name: Configure git user + run: | + git config --local user.email "github-actions[bot]@users.noreply.github.com" + git config --local user.name "github-actions[bot]" + + - name: Deploy docs + run: | + # Strip git ref prefix from version + echo "${{ github.ref }}" + VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,') + # Strip "v" prefix from tag name + [[ "${{ github.ref }}" == "refs/tags/"* ]] && ALIAS=$(echo $VERSION | sed -e 's/^v//') + # Copy all of the root level md files into the docs folder for deployment, tweaking the relative paths + for i in *.md; do sed -e "s#docs/#./#g" $i >docs/$i; done + # Populate overrides for the current version, and then remove to not apply if VERSION is main branch + echo -e "{% extends "base.html" %}\n\n{% block outdated %}\n You are viewing the documentation for ACA-Py Release $VERSION.\n{% endblock %}" >overrides/base.html + # If building from main, use latest as ALIAS + [ "$VERSION" == "main" ] && ALIAS=latest && rm overrides/base.html + echo $VERSION $ALIAS + mike deploy --push --update-aliases $VERSION $ALIAS + mike set-default latest diff --git a/.github/workflows/publish-indy.yml b/.github/workflows/publish-indy.yml index 17f479c8e6..3d95cc6d84 100644 --- a/.github/workflows/publish-indy.yml +++ b/.github/workflows/publish-indy.yml @@ -51,7 +51,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Code - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ inputs.ref || '' }} @@ -61,7 +61,7 @@ jobs: echo "repo-owner=${GITHUB_REPOSITORY_OWNER,,}" >> $GITHUB_OUTPUT - name: Cache Docker layers - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: /tmp/.buildx-cache key: ${{ runner.os }}-buildx-${{ github.sha }} @@ -69,10 +69,10 @@ jobs: ${{ runner.os }}-buildx- - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Log in to the GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -80,7 +80,7 @@ jobs: - name: Setup Image Metadata id: meta - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: | ghcr.io/${{ steps.info.outputs.repo-owner }}/aries-cloudagent-python @@ -88,7 +88,7 @@ jobs: type=raw,value=py${{ matrix.python-version }}-indy-${{ env.INDY_VERSION }}-${{ inputs.tag || github.event.release.tag_name }} - name: Build and Push Image to ghcr.io - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: push: true context: . diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 937c2e1bbd..8195dc1b7c 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -48,7 +48,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout Code - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ inputs.ref || '' }} @@ -58,7 +58,7 @@ jobs: echo "repo-owner=${GITHUB_REPOSITORY_OWNER,,}" >> $GITHUB_OUTPUT - name: Cache Docker layers - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: /tmp/.buildx-cache key: ${{ runner.os }}-buildx-${{ github.sha }} @@ -66,10 +66,10 @@ jobs: ${{ runner.os }}-buildx- - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Log in to the GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -77,7 +77,7 @@ jobs: - name: Setup Image Metadata id: meta - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: | ghcr.io/${{ steps.info.outputs.repo-owner }}/aries-cloudagent-python @@ -85,7 +85,7 @@ jobs: type=raw,value=py${{ matrix.python-version }}-${{ inputs.tag || github.event.release.tag_name }} - name: Build and Push Image to ghcr.io - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: push: true context: . diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index 5e7ebfb330..8211541fbe 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -8,9 +8,9 @@ jobs: deploy: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.x" - name: Install dependencies diff --git a/.github/workflows/snyk.yml b/.github/workflows/snyk.yml index 7160212071..30d997d594 100644 --- a/.github/workflows/snyk.yml +++ b/.github/workflows/snyk.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest if: ${{ github.repository_owner == 'hyperledger' }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Build a Docker image run: docker build -t aries-cloudagent -f docker/Dockerfile . - name: Run Snyk to check Docker image for vulnerabilities @@ -28,6 +28,6 @@ jobs: image: aries-cloudagent args: --file=docker/Dockerfile - name: Upload result to GitHub Code Scanning - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: snyk.sarif diff --git a/.github/workflows/tests-indy.yml b/.github/workflows/tests-indy.yml index 7e69e76b30..8b7651a39f 100644 --- a/.github/workflows/tests-indy.yml +++ b/.github/workflows/tests-indy.yml @@ -18,10 +18,10 @@ jobs: name: Test Python ${{ inputs.python-version }} on Indy ${{ inputs.indy-version }} runs-on: ${{ inputs.os }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Cache image layers - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: /tmp/.buildx-cache-test key: ${{ runner.os }}-buildx-test-${{ github.sha }} @@ -29,10 +29,10 @@ jobs: ${{ runner.os }}-buildx-test- - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Build test image - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 with: load: true context: . diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5fb610580b..62699408a3 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -15,9 +15,9 @@ jobs: name: Test Python ${{ inputs.python-version }} runs-on: ${{ inputs.os }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ inputs.python-version }} cache: 'pip' diff --git a/CHANGELOG.md b/CHANGELOG.md index 5a0f30a8ec..45c584ede3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,24 +1,34 @@ # Aries Cloud Agent Python Changelog -## 0.12.0rc0 +## 0.12.0rc1 -### January 23, 2024 +### February 17, 2024 -Release 0.12.0 is a relative large release (53 PRs and counting...) but currently with no breaking changes. We expect there will be breaking changes (at least in the handling of endorsement) before the 0.12.0 release is finalized, hence the minor version update. +Release 0.12.0 is a relative large release but currently with no breaking changes. We expect there will be breaking changes (at least in the handling of endorsement) before the 0.12.0 release is finalized, hence the minor version update. + +The first `rc0` release candidate `rc0` introduced a regression via [PR \#2705] that has been reverted in [PR \#2789]. Further investigation is needed to determine how to accomplish the goal of [PR \#2705] ("feat: inject profile") without the regression. + +[PR \#2705]: https://github.com/hyperledger/aries-cloudagent-python/pull/2705 +[PR \#2789]: https://github.com/hyperledger/aries-cloudagent-python/pull/2789 Much progress was made on `did:peer` support in this release, with the handling of inbound [DID Peer] 1 added, and inbound and outbound support for DID Peer 2 and 4. The goal of that work is to eliminate the remaining places where "unqualified" DIDs remain. Work continues in supporting ledger agnostic [AnonCreds], and the new [Hyperledger AnonCreds Rust] library. Attention was also given in the release to the handling of JSON-LD [Data Integrity Verifiable Credentials], with more expected before the release is finalized. In addition to those updates, there were fixes and improvements across the codebase. +The most visible change in this release is the re-organization of the ACA-Py documentation, moving the vast majority of the documents to the folders within the `docs` folder -- a long overdue change that will allow us to soon publish the documents on [https://aca-py.org](https://aca-py.org) directly from the ACA-Py repository, rather than from the separate [aries-acapy-docs](https://github.com/hyperledger/aries-acapy-docs) currently being used. + +A big developer improvement is a revampling of the test handling to eliminate ~2500 warnings that were previously generated in the test suite. Nice job [@ff137](https://github.com/ff137)! + [DID Peer]: https://identity.foundation/peer-did-method-spec/ [AnonCreds]: https://www.hyperledger.org/projects/anoncreds [Hyperledger AnonCreds Rust]: https://github.com/hyperledger/anoncreds-rs [Data Integrity Verifiable Credentials]: https://www.w3.org/TR/vc-data-integrity/ -### 0.12.0rc0 Breaking Changes +### 0.12.0rc1 Breaking Changes -There are no breaking changes in 0.12.0rc0. +There are no breaking changes in 0.12.0rc1. + -#### 0.12.0rc0 Categorized List of Pull Requests +#### 0.12.0rc1 Categorized List of Pull Requests - DID Handling and Connection Establishment Updates/Fixes - fix: save multi_use to the DB for OOB invitations [\#2694](https://github.com/hyperledger/aries-cloudagent-python/pull/2694) [frostyfrog](https://github.com/frostyfrog) @@ -35,6 +45,12 @@ There are no breaking changes in 0.12.0rc0. - feat: support resolving did:peer:1 received in did exchange [\#2611](https://github.com/hyperledger/aries-cloudagent-python/pull/2611) [dbluhm](https://github.com/dbluhm) - Ledger Agnostic AnonCreds RS Changes + - Anoncreds revoke and publish-revocations endorsement [\#2782](https://github.com/hyperledger/aries-cloudagent-python/pull/2782) [jamshale](https://github.com/jamshale) + - Upgrade anoncreds to version 0.2.0-dev11 [\#2763](https://github.com/hyperledger/aries-cloudagent-python/pull/2763) [jamshale](https://github.com/jamshale) + - Update anoncreds to 0.2.0-dev10 [\#2758](https://github.com/hyperledger/aries-cloudagent-python/pull/2758) [jamshale](https://github.com/jamshale) + - Anoncreds - Cred Def and Revocation Endorsement [\#2752](https://github.com/hyperledger/aries-cloudagent-python/pull/2752) [jamshale](https://github.com/jamshale) + - Upgrade anoncreds to 0.2.0-dev9 [\#2741](https://github.com/hyperledger/aries-cloudagent-python/pull/2741) [jamshale](https://github.com/jamshale) + - Upgrade anoncred-rs to version 0.2.0-dev8 [\#2734](https://github.com/hyperledger/aries-cloudagent-python/pull/2734) [jamshale](https://github.com/jamshale) - Upgrade anoncreds to 0.2.0.dev7 [\#2719](https://github.com/hyperledger/aries-cloudagent-python/pull/2719) [jamshale](https://github.com/jamshale) - Improve api documentation and error handling [\#2690](https://github.com/hyperledger/aries-cloudagent-python/pull/2690) [jamshale](https://github.com/jamshale) - Add unit tests for anoncreds revocation [\#2688](https://github.com/hyperledger/aries-cloudagent-python/pull/2688) [jamshale](https://github.com/jamshale) @@ -47,11 +63,15 @@ There are no breaking changes in 0.12.0rc0. - Initial code migration from anoncreds-rs branch AnonCreds [\#2596](https://github.com/hyperledger/aries-cloudagent-python/pull/2596) [ianco](https://github.com/ianco) - Hyperledger Indy ledger related updates and fixes + - Add known issues section to Multiledger.md documentation [\#2788](https://github.com/hyperledger/aries-cloudagent-python/pull/2788) [esune](https://github.com/esune) - fix: update constants in TransactionRecord [\#2698](https://github.com/hyperledger/aries-cloudagent-python/pull/2698) [amanji](https://github.com/amanji) - Cache TAA by wallet name [\#2676](https://github.com/hyperledger/aries-cloudagent-python/pull/2676) [jamshale](https://github.com/jamshale) - Fix: RevRegEntry Transaction Endorsement 0.11.0 [\#2558](https://github.com/hyperledger/aries-cloudagent-python/pull/2558) [shaangill025](https://github.com/shaangill025) - JSON-LD Verifiable Credential/DIF Presentation Exchange updates + - Revert profile injection for VcLdpManager on vc-api endpoints [\#2794](https://github.com/hyperledger/aries-cloudagent-python/pull/2794) [PatStLouis](https://github.com/PatStLouis) + - Add cached copy of BBS v1 context [\#2749](https://github.com/hyperledger/aries-cloudagent-python/pull/2749) [andrewwhitehead](https://github.com/andrewwhitehead) + - Update BBS+ context to bypass redirections [\#2739](https://github.com/hyperledger/aries-cloudagent-python/pull/2739) [swcurran](https://github.com/swcurran) - feat: make VcLdpManager pluggable [\#2706](https://github.com/hyperledger/aries-cloudagent-python/pull/2706) [dbluhm](https://github.com/dbluhm) - fix: minor type hint corrections for VcLdpManager [\#2704](https://github.com/hyperledger/aries-cloudagent-python/pull/2704) [dbluhm](https://github.com/dbluhm) - Remove if condition which checks if the credential.type array is equal to 1 [\#2670](https://github.com/hyperledger/aries-cloudagent-python/pull/2670) [PatStLouis](https://github.com/PatStLouis) @@ -65,9 +85,13 @@ There are no breaking changes in 0.12.0rc0. - Add ConnectionProblemReport handler [\#2600](https://github.com/hyperledger/aries-cloudagent-python/pull/2600) [usingtechnology](https://github.com/usingtechnology) - Multitenancy Updates and Fixes + - feature/per tenant settings [\#2790](https://github.com/hyperledger/aries-cloudagent-python/pull/2790) [amanji](https://github.com/amanji) - Improve Per Tenant Logging: Fix issues around default log file path [\#2659](https://github.com/hyperledger/aries-cloudagent-python/pull/2659) [shaangill025](https://github.com/shaangill025) - Other Fixes, Demo, DevContainer and Documentation Fixes + - Demo description of reuse in establishing a connection [\#2787](https://github.com/hyperledger/aries-cloudagent-python/pull/2787) [swcurran](https://github.com/swcurran) + - Reorganize the ACA-Py Documentation Files [\#2765](https://github.com/hyperledger/aries-cloudagent-python/pull/2765) [swcurran](https://github.com/swcurran) + - Tweaks to MD files to enable aca-py.org publishing [\#2771](https://github.com/hyperledger/aries-cloudagent-python/pull/2771) [swcurran](https://github.com/swcurran) - Update devcontainer documentation [\#2729](https://github.com/hyperledger/aries-cloudagent-python/pull/2729) [jamshale](https://github.com/jamshale) - Update the SupportedRFCs Document to be up to date [\#2722](https://github.com/hyperledger/aries-cloudagent-python/pull/2722) [swcurran](https://github.com/swcurran) - Fix incorrect Sphinx search library version reference [\#2716](https://github.com/hyperledger/aries-cloudagent-python/pull/2716) [swcurran](https://github.com/swcurran) @@ -78,6 +102,13 @@ There are no breaking changes in 0.12.0rc0. - Update the ReadTheDocs config in case we do another 0.10.x release [\#2629](https://github.com/hyperledger/aries-cloudagent-python/pull/2629) [swcurran](https://github.com/swcurran) - Dependencies and Internal Updates + - Change middleware registration order [\#2796](https://github.com/hyperledger/aries-cloudagent-python/pull/2796) [PatStLouis](https://github.com/PatStLouis) + - Bump pyld version to 2.0.4 [\#2795](https://github.com/hyperledger/aries-cloudagent-python/pull/2795) [PatStLouis](https://github.com/PatStLouis) + - Revert profile inject [\#2789](https://github.com/hyperledger/aries-cloudagent-python/pull/2789) [jamshale](https://github.com/jamshale) + - Move emit events to profile and delay sending until after commit [\#2760](https://github.com/hyperledger/aries-cloudagent-python/pull/2760) [ianco](https://github.com/ianco) + - fix: partial revert of ConnRecord schema change 1.0.0 [\#2746](https://github.com/hyperledger/aries-cloudagent-python/pull/2746) [dbluhm](https://github.com/dbluhm) + - chore(deps): Bump aiohttp from 3.9.1 to 3.9.2 dependencies [\#2745](https://github.com/hyperledger/aries-cloudagent-python/pull/2745) [dependabot bot](https://github.com/dependabot bot) + - bump pydid to v 0.4.3 [\#2737](https://github.com/hyperledger/aries-cloudagent-python/pull/2737) [PatStLouis](https://github.com/PatStLouis) - Fix subwallet record removal [\#2721](https://github.com/hyperledger/aries-cloudagent-python/pull/2721) [andrewwhitehead](https://github.com/andrewwhitehead) - chore(deps): Bump jinja2 from 3.1.2 to 3.1.3 dependencies [\#2707](https://github.com/hyperledger/aries-cloudagent-python/pull/2707) [dependabot bot](https://github.com/dependabot bot) - feat: inject profile [\#2705](https://github.com/hyperledger/aries-cloudagent-python/pull/2705) [dbluhm](https://github.com/dbluhm) @@ -90,10 +121,21 @@ There are no breaking changes in 0.12.0rc0. - Bump aiohttp from 3.8.6 to 3.9.0 dependencies [\#2635](https://github.com/hyperledger/aries-cloudagent-python/pull/2635) [dependabot bot](https://github.com/dependabot bot) - CI/CD, Testing, and Developer Tools/Productivity Updates + - Fix deprecation warnings [\#2756](https://github.com/hyperledger/aries-cloudagent-python/pull/2756) [ff137](https://github.com/ff137) + - chore(deps): Bump the all-actions group with 10 updates dependencies [\#2784](https://github.com/hyperledger/aries-cloudagent-python/pull/2784) [dependabot bot](https://github.com/dependabot bot) + - Add Dependabot configuration [\#2783](https://github.com/hyperledger/aries-cloudagent-python/pull/2783) [WadeBarnes](https://github.com/WadeBarnes) + - Implement B006 rule [\#2775](https://github.com/hyperledger/aries-cloudagent-python/pull/2775) [jamshale](https://github.com/jamshale) + - ⬆️ Upgrade pytest to 8.0 [\#2773](https://github.com/hyperledger/aries-cloudagent-python/pull/2773) [ff137](https://github.com/ff137) + - ⬆️ Update pytest-asyncio to 0.23.4 [\#2764](https://github.com/hyperledger/aries-cloudagent-python/pull/2764) [ff137](https://github.com/ff137) + - Remove asynctest dependency and fix "coroutine not awaited" warnings [\#2755](https://github.com/hyperledger/aries-cloudagent-python/pull/2755) [ff137](https://github.com/ff137) + - Fix pytest collection errors when anoncreds package is not installed [\#2750](https://github.com/hyperledger/aries-cloudagent-python/pull/2750) [andrewwhitehead](https://github.com/andrewwhitehead) + - chore: pin black version [\#2747](https://github.com/hyperledger/aries-cloudagent-python/pull/2747) [dbluhm](https://github.com/dbluhm) - Tweak scope of GHA integration tests [\#2662](https://github.com/hyperledger/aries-cloudagent-python/pull/2662) [ianco](https://github.com/ianco) - Update snyk workflow to execute on Pull Request [\#2658](https://github.com/hyperledger/aries-cloudagent-python/pull/2658) [usingtechnology](https://github.com/usingtechnology) - Release management pull requests + - 0.12.0rc1 [\#2800](https://github.com/hyperledger/aries-cloudagent-python/pull/2800) [swcurran](https://github.com/swcurran) + - 0.12.0rc1 [\#2799](https://github.com/hyperledger/aries-cloudagent-python/pull/2799) [swcurran](https://github.com/swcurran) - 0.12.0rc0 [\#2732](https://github.com/hyperledger/aries-cloudagent-python/pull/2732) [swcurran](https://github.com/swcurran) ## 0.11.0 diff --git a/PUBLISHING.md b/PUBLISHING.md index e44c2dfdee..4aa7c91003 100644 --- a/PUBLISHING.md +++ b/PUBLISHING.md @@ -91,7 +91,9 @@ Once you have the list of PRs: - Organize the list into suitable categories, update (if necessary) the PR description and add notes to clarify the changes. See previous release entries to understand the style -- a format that should help developers. - Add a narrative about the release above the PR that highlights what has gone into the release. -4. Update the ReadTheDocs in the `/docs` folder by following the instructions in +4. Check to see if there are any other PRs that should be included in the release. + +5. Update the ReadTheDocs in the `/docs` folder by following the instructions in the `docs/README.md` file. That will likely add a number of new and modified files to the PR. Eliminate all of the errors in the generation process, either by mocking external dependencies or by fixing ACA-Py code. If @@ -99,44 +101,36 @@ Once you have the list of PRs: developer. Experience has demonstrated to use that documentation generation errors should be fixed in the code. -5. Regenerate openapi.json and swagger.json by running - `./scripts/generate-open-api-spec`. - -6. Update the version number listed in - [pyproject.toml](https://github.com/hyperledger/aries-cloudagent-python/tree/main/pyproject.toml) and, prefixed with - a "v" in [open-api/openapi.json](https://github.com/hyperledger/open-api/tree/main/openapi.json) and - [open-api/swagger.json](https://github.com/hyperledger/open-api/tree/main/swagger.json) (e.g. "0.7.2" in the - pyproject.toml file and "v0.7.2" in the openapi.json file). The incremented - version number should adhere to the [Semantic Versioning +6. Search across the repository for the previous version number and update it + everywhere that makes sense. The CHANGELOG.md is a likely exception, and the + `pyproject.toml` in the root is **MUST**. You can skip (although it won't + hurt) to update the files in the `open-api` folder as they will be + automagically updated by the next step in publishing. The incremented version + number **MUST** adhere to the [Semantic Versioning Specification](https://semver.org/#semantic-versioning-specification-semver) based on the changes since the last published release. For Release - Candidates, the form of the tag is "0.11.0rc2". As of release `0.11.0` - we have dropped the previously used `-` in the release candidate version - string to better follow the semver rules. - -7. An extra search of the repo for the existing tag is recommended to see if - there are any other instances of the tag in the repo. If any are found to be - required (other than in CHANGELOG.md and the examples in this file, of - course), finding a way to not need them is best, but if they are needed, - please update this document to note where the tag can be found. - -8. Check to see if there are any other PRs that should be included in the release. - -9. Double check all of these steps above, and then submit a PR from the branch. + Candidates, the form of the tag is "0.11.0rc2". As of release `0.11.0` we + have dropped the previously used `-` in the release candidate version string + to better follow the semver rules. + +7. Regenerate openapi.json and swagger.json by running + `./scripts/generate-open-api-spec`. + +8. Double check all of these steps above, and then submit a PR from the branch. Add this new PR to CHANGELOG.md so that all the PRs are included. If there are still further changes to be merged, mark the PR as "Draft", repeat **ALL** of the steps again, and then mark this PR as ready and then wait until it is merged. It's embarrassing when you have to do a whole new release just because you missed something silly...I know! -10. Immediately after it is merged, create a new GitHub tag representing the +9. Immediately after it is merged, create a new GitHub tag representing the version. The tag name and title of the release should be the same as the version in [pyproject.toml](https://github.com/hyperledger/aries-cloudagent-python/tree/main/pyproject.toml). Use the "Generate Release Notes" capability to get a sequential listing of the PRs in the release, to complement the manually curated Changelog. Verify on PyPi that the version is published. -11. New images for the release are automatically published by the GitHubAction +10. New images for the release are automatically published by the GitHubAction Workflows: [publish.yml] and [publish-indy.yml]. The actions are triggered when a release is tagged, so no manual action is needed. The images are published in the [Hyperledger Package Repository under @@ -150,11 +144,11 @@ Once you have the list of PRs: [publish.yml]: https://github.com/hyperledger/aries-cloudagent-python/blob/main/.github/workflows/publish.yml [publish-indy.yml]: https://github.com/hyperledger/aries-cloudagent-python/blob/main/.github/workflows/publish-indy.yml -12. Update the ACA-Py Read The Docs site by building the new "latest" (main +11. Update the ACA-Py Read The Docs site by building the new "latest" (main branch) and activating and building the new release. Appropriate permissions are required to publish the new documentation version. -13. Update the [https://aca-py.org] website with the latest documentation by +12. Update the [https://aca-py.org] website with the latest documentation by creating a PR and tag of the latest documentation from this site. Details are provided in the [aries-acapy-docs] repository. diff --git a/aries_cloudagent/admin/server.py b/aries_cloudagent/admin/server.py index f500a9d4c4..946cb0baf2 100644 --- a/aries_cloudagent/admin/server.py +++ b/aries_cloudagent/admin/server.py @@ -299,7 +299,7 @@ def _matches_additional_routes(self, path: str) -> bool: async def make_application(self) -> web.Application: """Get the aiohttp application instance.""" - middlewares = [ready_middleware, debug_middleware, validation_middleware] + middlewares = [ready_middleware, debug_middleware] # admin-token and admin-token are mutually exclusive and required. # This should be enforced during parameter parsing but to be sure, @@ -453,6 +453,9 @@ async def setup_context(request: web.Request, handler): middlewares.append(setup_context) + # Register validation_middleware last avoiding unauthorized validations + middlewares.append(validation_middleware) + app = web.Application( middlewares=middlewares, client_max_size=( diff --git a/aries_cloudagent/admin/tests/test_admin_server.py b/aries_cloudagent/admin/tests/test_admin_server.py index 7eae850288..300e82f758 100644 --- a/aries_cloudagent/admin/tests/test_admin_server.py +++ b/aries_cloudagent/admin/tests/test_admin_server.py @@ -20,6 +20,11 @@ from ..server import AdminServer, AdminSetupError +# Ignore Marshmallow warning, as well as 'NotAppKeyWarning' coming from apispec packages +@pytest.mark.filterwarnings( + "ignore:The 'missing' attribute of fields is deprecated. Use 'load_default' instead.", + "ignore:It is recommended to use web.AppKey instances for keys.", +) class TestAdminServer(IsolatedAsyncioTestCase): async def asyncSetUp(self): self.message_results = [] @@ -507,5 +512,6 @@ def _smaller_scope(): with pytest.raises(RuntimeError): await responder.send_outbound(None) - with pytest.raises(RuntimeError): - await responder.send_webhook("test", {}) + with pytest.deprecated_call(): + with pytest.raises(RuntimeError): + await responder.send_webhook("test", {}) diff --git a/aries_cloudagent/anoncreds/default/legacy_indy/registry.py b/aries_cloudagent/anoncreds/default/legacy_indy/registry.py index 3be3cab976..c302f2348d 100644 --- a/aries_cloudagent/anoncreds/default/legacy_indy/registry.py +++ b/aries_cloudagent/anoncreds/default/legacy_indy/registry.py @@ -796,8 +796,8 @@ async def _revoc_reg_entry_with_fix( # Ledger rejected transaction request: client request invalid: # InvalidClientRequest(...) # In this scenario we try to post a correction - LOGGER.warn("Retry ledger update/fix due to error") - LOGGER.warn(err) + LOGGER.warning("Retry ledger update/fix due to error") + LOGGER.warning(err) (_, _, rev_entry_res) = await self.fix_ledger_entry( profile, rev_list, @@ -806,7 +806,7 @@ async def _revoc_reg_entry_with_fix( write_ledger, endorser_did, ) - LOGGER.warn("Ledger update/fix applied") + LOGGER.warning("Ledger update/fix applied") elif "InvalidClientTaaAcceptanceError" in err.roll_up: # if no write access (with "InvalidClientTaaAcceptanceError") # e.g. aries_cloudagent.ledger.error.LedgerTransactionError: diff --git a/aries_cloudagent/anoncreds/models/anoncreds_cred_def.py b/aries_cloudagent/anoncreds/models/anoncreds_cred_def.py index b9e038ea9d..0630114bcc 100644 --- a/aries_cloudagent/anoncreds/models/anoncreds_cred_def.py +++ b/aries_cloudagent/anoncreds/models/anoncreds_cred_def.py @@ -16,7 +16,10 @@ NUM_STR_WHOLE_VALIDATE, ) -NUM_STR_WHOLE = {"validate": NUM_STR_WHOLE_VALIDATE, "example": NUM_STR_WHOLE_EXAMPLE} +NUM_STR_WHOLE = { + "validate": NUM_STR_WHOLE_VALIDATE, + "metadata": {"example": NUM_STR_WHOLE_EXAMPLE}, +} class CredDefValuePrimary(BaseModel): @@ -126,17 +129,27 @@ class Meta: model_class = CredDefValueRevocation unknown = EXCLUDE - g = fields.Str(example="1 1F14F&ECB578F 2 095E45DDF417D") - g_dash = fields.Str(example="1 1D64716fCDC00C 1 0C781960FA66E3D3 2 095E45DDF417D") - h = fields.Str(example="1 16675DAE54BFAE8 2 095E45DD417D") - h0 = fields.Str(example="1 21E5EF9476EAF18 2 095E45DDF417D") - h1 = fields.Str(example="1 236D1D99236090 2 095E45DDF417D") - h2 = fields.Str(example="1 1C3AE8D1F1E277 2 095E45DDF417D") - htilde = fields.Str(example="1 1D8549E8C0F8 2 095E45DDF417D") - h_cap = fields.Str(example="1 1B2A32CF3167 1 2490FEBF6EE55 1 0000000000000000") - u = fields.Str(example="1 0C430AAB2B4710 1 1CB3A0932EE7E 1 0000000000000000") - pk = fields.Str(example="1 142CD5E5A7DC 1 153885BD903312 2 095E45DDF417D") - y = fields.Str(example="1 153558BD903312 2 095E45DDF417D 1 0000000000000000") + g = fields.Str(metadata={"example": "1 1F14F&ECB578F 2 095E45DDF417D"}) + g_dash = fields.Str( + metadata={"example": "1 1D64716fCDC00C 1 0C781960FA66E3D3 2 095E45DDF417D"} + ) + h = fields.Str(metadata={"example": "1 16675DAE54BFAE8 2 095E45DD417D"}) + h0 = fields.Str(metadata={"example": "1 21E5EF9476EAF18 2 095E45DDF417D"}) + h1 = fields.Str(metadata={"example": "1 236D1D99236090 2 095E45DDF417D"}) + h2 = fields.Str(metadata={"example": "1 1C3AE8D1F1E277 2 095E45DDF417D"}) + htilde = fields.Str(metadata={"example": "1 1D8549E8C0F8 2 095E45DDF417D"}) + h_cap = fields.Str( + metadata={"example": "1 1B2A32CF3167 1 2490FEBF6EE55 1 0000000000000000"} + ) + u = fields.Str( + metadata={"example": "1 0C430AAB2B4710 1 1CB3A0932EE7E 1 0000000000000000"} + ) + pk = fields.Str( + metadata={"example": "1 142CD5E5A7DC 1 153885BD903312 2 095E45DDF417D"} + ) + y = fields.Str( + metadata={"example": "1 153558BD903312 2 095E45DDF417D 1 0000000000000000"} + ) class CredDefValue(BaseModel): @@ -178,11 +191,11 @@ class Meta: primary = fields.Nested( CredDefValuePrimarySchema(), - description="Primary value for credential definition", + metadata={"description": "Primary value for credential definition"}, ) revocation = fields.Nested( CredDefValueRevocationSchema(), - description="Revocation value for credential definition", + metadata={"description": "Revocation value for credential definition"}, required=False, ) @@ -243,20 +256,26 @@ class Meta: unknown = EXCLUDE issuer_id = fields.Str( - description="Issuer Identifier of the credential definition or schema", + metadata={ + "description": "Issuer Identifier of the credential definition or schema", + "example": INDY_OR_KEY_DID_EXAMPLE, + }, data_key="issuerId", - example=INDY_OR_KEY_DID_EXAMPLE, ) schema_id = fields.Str( data_key="schemaId", - description="Schema identifier", - example=INDY_SCHEMA_ID_EXAMPLE, + metadata={ + "description": "Schema identifier", + "example": INDY_SCHEMA_ID_EXAMPLE, + }, ) type = fields.Str(validate=OneOf(["CL"])) tag = fields.Str( - description="""The tag value passed in by the Issuer to + metadata={ + "description": """The tag value passed in by the Issuer to an AnonCred's Credential Definition create and store implementation.""", - example="default", + "example": "default", + } ) value = fields.Nested(CredDefValueSchema()) @@ -315,12 +334,14 @@ class Meta: ) ) credential_definition_id = fields.Str( - description="credential definition id", + metadata={ + "description": "credential definition id", + "example": INDY_CRED_DEF_ID_EXAMPLE, + }, allow_none=True, - example=INDY_CRED_DEF_ID_EXAMPLE, ) credential_definition = fields.Nested( - CredDefSchema(), description="credential definition" + CredDefSchema(), metadata={"description": "credential definition"} ) @@ -418,11 +439,13 @@ class Meta: unknown = EXCLUDE credential_definition_id = fields.Str( - description="credential definition id", - example=INDY_CRED_DEF_ID_EXAMPLE, + metadata={ + "description": "credential definition id", + "example": INDY_CRED_DEF_ID_EXAMPLE, + }, ) credential_definition = fields.Nested( - CredDefSchema(), description="credential definition" + CredDefSchema(), metadata={"description": "credential definition"} ) resolution_metadata = fields.Dict() credential_definitions_metadata = fields.Dict() diff --git a/aries_cloudagent/anoncreds/models/anoncreds_revocation.py b/aries_cloudagent/anoncreds/models/anoncreds_revocation.py index b60eefafa2..5fe66d8f0e 100644 --- a/aries_cloudagent/anoncreds/models/anoncreds_revocation.py +++ b/aries_cloudagent/anoncreds/models/anoncreds_revocation.py @@ -62,15 +62,18 @@ class Meta: unknown = EXCLUDE public_keys = fields.Dict( - data_key="publicKeys", example=INDY_RAW_PUBLIC_KEY_EXAMPLE + data_key="publicKeys", metadata={"example": INDY_RAW_PUBLIC_KEY_EXAMPLE} ) - max_cred_num = fields.Int(data_key="maxCredNum", example=666) + max_cred_num = fields.Int(data_key="maxCredNum", metadata={"example": 777}) tails_location = fields.Str( data_key="tailsLocation", - example="https://tails-server.com/hash/7Qen9RDyemMuV7xGQvp7NjwMSpyHieJyBakycxN7dX7P", + metadata={ + "example": "https://tails-server.com/hash/7Qen9RDyemMuV7xGQvp7NjwMSpyHieJyBakycxN7dX7P" + }, ) tails_hash = fields.Str( - data_key="tailsHash", example="7Qen9RDyemMuV7xGQvp7NjwMSpyHieJyBakycxN7dX7P" + data_key="tailsHash", + metadata={"example": "7Qen9RDyemMuV7xGQvp7NjwMSpyHieJyBakycxN7dX7P"}, ) @@ -130,18 +133,25 @@ class Meta: unknown = EXCLUDE issuer_id = fields.Str( - description="Issuer Identifier of the credential definition or schema", + metadata={ + "description": "Issuer Identifier of the credential definition or schema", + "example": INDY_OR_KEY_DID_EXAMPLE, + }, data_key="issuerId", - example=INDY_OR_KEY_DID_EXAMPLE, ) type = fields.Str(data_key="revocDefType") cred_def_id = fields.Str( - description="Credential definition identifier", + metadata={ + "description": "Credential definition identifier", + "example": INDY_CRED_DEF_ID_EXAMPLE, + }, data_key="credDefId", - example=INDY_CRED_DEF_ID_EXAMPLE, ) tag = fields.Str( - description="tag for the revocation registry definition", example="default" + metadata={ + "description": "tag for the revocation registry definition", + "example": "default", + } ) value = fields.Nested(RevRegDefValueSchema()) @@ -204,11 +214,13 @@ class Meta: ) ) revocation_registry_definition_id = fields.Str( - description="revocation registry definition id", - example=INDY_REV_REG_ID_EXAMPLE, + metadata={ + "description": "revocation registry definition id", + "example": INDY_REV_REG_ID_EXAMPLE, + } ) revocation_registry_definition = fields.Nested( - RevRegDefSchema(), description="revocation registry definition" + RevRegDefSchema(), metadata={"description": "revocation registry definition"} ) @@ -381,30 +393,40 @@ class Meta: unknown = EXCLUDE issuer_id = fields.Str( - description="Issuer Identifier of the credential definition or schema", + metadata={ + "description": "Issuer Identifier of the credential definition or schema", + "example": INDY_OR_KEY_DID_EXAMPLE, + }, data_key="issuerId", - example=INDY_OR_KEY_DID_EXAMPLE, ) rev_reg_def_id = fields.Str( - description="The ID of the revocation registry definition", + metadata={ + "description": "The ID of the revocation registry definition", + "example": INDY_REV_REG_ID_EXAMPLE, + }, data_key="revRegDefId", - example=INDY_REV_REG_ID_EXAMPLE, ) revocation_list = fields.List( fields.Int(), - description="Bit list representing revoked credentials", + metadata={ + "description": "Bit list representing revoked credentials", + "example": [0, 1, 1, 0], + }, data_key="revocationList", - example=[0, 1, 1, 0], ) current_accumulator = fields.Str( - description="The current accumalator value", - example="21 118...1FB", + metadata={ + "description": "The current accumalator value", + "example": "21 118...1FB", + }, data_key="currentAccumulator", ) timestamp = fields.Int( - description="Timestamp at which revocation list is applicable", + metadata={ + "description": "Timestamp at which revocation list is applicable", + "example": INDY_ISO8601_DATETIME_EXAMPLE, + }, required=False, - example=INDY_ISO8601_DATETIME_EXAMPLE, ) @@ -458,7 +480,9 @@ class Meta: ] ) ) - revocation_list = fields.Nested(RevListSchema(), description="revocation list") + revocation_list = fields.Nested( + RevListSchema(), metadata={"description": "revocation list"} + ) class RevListResult(BaseModel): diff --git a/aries_cloudagent/anoncreds/models/anoncreds_schema.py b/aries_cloudagent/anoncreds/models/anoncreds_schema.py index 58cb4a06f2..c9190239fa 100644 --- a/aries_cloudagent/anoncreds/models/anoncreds_schema.py +++ b/aries_cloudagent/anoncreds/models/anoncreds_schema.py @@ -61,20 +61,26 @@ class Meta: unknown = EXCLUDE issuer_id = fields.Str( - description="Issuer Identifier of the credential definition or schema", + metadata={ + "description": "Issuer Identifier of the credential definition or schema", + "example": INDY_OR_KEY_DID_EXAMPLE, + }, data_key="issuerId", - example=INDY_OR_KEY_DID_EXAMPLE, ) attr_names = fields.List( fields.Str( - description="Attribute name", - example="score", + metadata={ + "description": "Attribute name", + "example": "score", + } ), - description="Schema attribute names", + metadata={"description": "Schema attribute names"}, data_key="attrNames", ) - name = fields.Str(description="Schema name", example="Example schema") - version = fields.Str(description="Schema version", example="1.0") + name = fields.Str( + metadata={"description": "Schema name", "example": "Example schema"} + ) + version = fields.Str(metadata={"description": "Schema version", "example": "1.0"}) class GetSchemaResult(BaseModel): @@ -130,7 +136,7 @@ class Meta: schema_value = fields.Nested(AnonCredsSchemaSchema(), data_key="schema") schema_id = fields.Str( - description="Schema identifier", example=INDY_SCHEMA_ID_EXAMPLE + metadata={"description": "Schema identifier", "example": INDY_SCHEMA_ID_EXAMPLE} ) resolution_metadata = fields.Dict() schema_metadata = fields.Dict() @@ -184,8 +190,10 @@ class Meta: ) ) schema_id = fields.Str( - description="Schema identifier", - example=INDY_SCHEMA_ID_EXAMPLE, + metadata={ + "description": "Schema identifier", + "example": INDY_SCHEMA_ID_EXAMPLE, + } ) schema_value = fields.Nested(AnonCredsSchemaSchema(), data_key="schema") diff --git a/aries_cloudagent/anoncreds/revocation.py b/aries_cloudagent/anoncreds/revocation.py index c596c8a0e9..70cea45e64 100644 --- a/aries_cloudagent/anoncreds/revocation.py +++ b/aries_cloudagent/anoncreds/revocation.py @@ -483,7 +483,7 @@ async def store_revocation_registry_list(self, result: RevListResult): value_json={ "rev_list": rev_list.serialize(), "pending": None, - # TODO THIS IS A HACK; this fixes ACA-Py expecting 1-based indexes + # TODO THIS IS A HACK; this fixes ACA-Py expecting 1-based indexes # noqa: E501 "next_index": 1, }, tags={ @@ -505,16 +505,21 @@ async def store_revocation_registry_list(self, result: RevListResult): async def finish_revocation_list(self, job_id: str, rev_reg_def_id: str): """Mark a revocation list as finished.""" async with self.profile.transaction() as txn: - await self._finish_registration( - txn, + # Finish the registration if the list is new, otherwise already updated + existing_list = await txn.handle.fetch( CATEGORY_REV_LIST, - job_id, rev_reg_def_id, - state=STATE_FINISHED, ) - await txn.commit() - - await self.notify(RevListFinishedEvent.with_payload(rev_reg_def_id)) + if not existing_list: + await self._finish_registration( + txn, + CATEGORY_REV_LIST, + job_id, + rev_reg_def_id, + state=STATE_FINISHED, + ) + await txn.commit() + await self.notify(RevListFinishedEvent.with_payload(rev_reg_def_id)) async def update_revocation_list( self, @@ -566,22 +571,21 @@ async def update_revocation_list( self.profile, rev_reg_def, prev, curr, revoked, options ) - # TODO Handle `failed` state - + # # TODO Handle `failed` state try: async with self.profile.session() as session: rev_list_entry_upd = await session.handle.fetch( - CATEGORY_REV_LIST, rev_reg_def_id, for_update=True + CATEGORY_REV_LIST, result.rev_reg_def_id, for_update=True ) if not rev_list_entry_upd: raise AnonCredsRevocationError( - "Revocation list not found for id {rev_reg_def_id}" + f"Revocation list not found for id {rev_reg_def_id}" ) tags = rev_list_entry_upd.tags tags["state"] = result.revocation_list_state.state await session.handle.replace( CATEGORY_REV_LIST, - rev_reg_def_id, + result.rev_reg_def_id, value=rev_list_entry_upd.value, tags=tags, ) @@ -1205,7 +1209,7 @@ async def revoke_pending_credentials( ) failed_crids.add(rev_id) elif rev_id >= rev_info["next_index"]: - LOGGER.warn( + LOGGER.warning( "Skipping requested credential revocation" "on rev reg id %s, cred rev id=%s not yet issued", revoc_reg_id, @@ -1213,7 +1217,7 @@ async def revoke_pending_credentials( ) failed_crids.add(rev_id) elif rev_list.revocation_list[rev_id] == 1: - LOGGER.warn( + LOGGER.warning( "Skipping requested credential revocation" "on rev reg id %s, cred rev id=%s already revoked", revoc_reg_id, @@ -1255,7 +1259,7 @@ async def revoke_pending_credentials( CATEGORY_REV_LIST, revoc_reg_id, for_update=True ) if not rev_info_upd: - LOGGER.warn( + LOGGER.warning( "Revocation registry missing, skipping update: {}", revoc_reg_id, ) diff --git a/aries_cloudagent/anoncreds/revocation_setup.py b/aries_cloudagent/anoncreds/revocation_setup.py index 4e0fd96b85..8f16b382c9 100644 --- a/aries_cloudagent/anoncreds/revocation_setup.py +++ b/aries_cloudagent/anoncreds/revocation_setup.py @@ -1,5 +1,6 @@ """Automated setup process for AnonCreds credential definitions with revocation.""" +import logging from abc import ABC, abstractmethod from aries_cloudagent.protocols.endorse_transaction.v1_0.util import is_author_role @@ -16,6 +17,8 @@ RevRegDefFinishedEvent, ) +LOGGER = logging.getLogger(__name__) + class AnonCredsRevocationSetupManager(ABC): """Base class for automated setup of revocation.""" @@ -102,3 +105,4 @@ async def on_rev_reg_def(self, profile: Profile, event: RevRegDefFinishedEvent): async def on_rev_list(self, profile: Profile, event: RevListFinishedEvent): """Handle rev list finished.""" + LOGGER.debug("Revocation list finished: %s", event.payload.rev_reg_def_id) diff --git a/aries_cloudagent/anoncreds/routes.py b/aries_cloudagent/anoncreds/routes.py index 2931c18807..a0e9bf47fb 100644 --- a/aries_cloudagent/anoncreds/routes.py +++ b/aries_cloudagent/anoncreds/routes.py @@ -25,16 +25,12 @@ INDY_SCHEMA_ID_EXAMPLE, UUIDFour, ) -from ..revocation.error import RevocationError, RevocationNotSupportedError -from ..revocation_anoncreds.manager import RevocationManager, RevocationManagerError -from ..revocation_anoncreds.routes import ( - PublishRevocationsSchema, +from ..revocation.error import RevocationNotSupportedError +from ..revocation.routes import ( RevocationModuleResponseSchema, - RevokeRequestSchema, RevRegIdMatchInfoSchema, - TxnOrPublishRevocationsResultSchema, ) -from ..storage.error import StorageError, StorageNotFoundError +from ..storage.error import StorageNotFoundError from .base import ( AnonCredsObjectNotFound, AnonCredsRegistrationError, @@ -86,17 +82,17 @@ class SchemaPostOptionSchema(OpenAPISchema): endorser_connection_id = fields.Str( metadata={ "description": endorser_connection_id_description, - "required": False, "example": UUIDFour.EXAMPLE, - } + }, + required=False, ) create_transaction_for_endorser = fields.Bool( metadata={ "description": create_transaction_for_endorser_description, - "required": False, "example": False, - } + }, + required=False, ) @@ -267,8 +263,8 @@ class CredIdMatchInfo(OpenAPISchema): metadata={ "description": "Credential definition identifier", "example": INDY_CRED_DEF_ID_EXAMPLE, - "required": True, - } + }, + required=True, ) @@ -279,23 +275,23 @@ class InnerCredDefSchema(OpenAPISchema): metadata={ "description": "Credential definition tag", "example": "default", - "required": True, - } + }, + required=True, ) schema_id = fields.Str( metadata={ "description": "Schema identifier", "example": INDY_SCHEMA_ID_EXAMPLE, - "required": True, }, + required=True, data_key="schemaId", ) issuer_id = fields.Str( metadata={ "description": "Issuer Identifier of the credential definition", "example": INDY_OR_KEY_DID_EXAMPLE, - "required": True, }, + required=True, data_key="issuerId", ) @@ -307,28 +303,27 @@ class CredDefPostOptionsSchema(OpenAPISchema): metadata={ "description": endorser_connection_id_description, "example": UUIDFour.EXAMPLE, - "required": False, - } + }, + required=False, ) create_transaction_for_endorser = fields.Bool( metadata={ "description": create_transaction_for_endorser_description, "example": False, - "required": False, - } + }, + required=False, ) support_revocation = fields.Bool( metadata={ "description": "Support credential revocation", - "required": False, - } + }, + required=False, ) revocation_registry_size = fields.Int( metadata={ "description": "Maximum number of credential revocations per registry", - "example": 666, - "required": False, - } + }, + required=False, ) @@ -447,8 +442,10 @@ class GetCredDefsResponseSchema(OpenAPISchema): credential_definition_ids = fields.List( fields.Str( - description="credential definition identifiers", - example="GvLGiRogTJubmj5B36qhYz:3:CL:8:faber.agent.degree_schema", + metadata={ + "description": "credential definition identifiers", + "example": "GvLGiRogTJubmj5B36qhYz:3:CL:8:faber.agent.degree_schema", + } ) ) @@ -482,20 +479,28 @@ class InnerRevRegDefSchema(OpenAPISchema): """Request schema for revocation registry creation request.""" issuer_id = fields.Str( - description="Issuer Identifier of the credential definition or schema", + metadata={ + "description": "Issuer Identifier of the credential definition or schema", + "example": INDY_OR_KEY_DID_EXAMPLE, + }, data_key="issuerId", - example=INDY_OR_KEY_DID_EXAMPLE, ) cred_def_id = fields.Str( - description="Credential definition identifier", + metadata={ + "description": "Credential definition identifier", + "example": INDY_SCHEMA_ID_EXAMPLE, + }, data_key="credDefId", - example=INDY_SCHEMA_ID_EXAMPLE, ) - tag = fields.Str(description="tag for revocation registry", example="default") + tag = fields.Str( + metadata={"description": "tag for revocation registry", "example": "default"} + ) max_cred_num = fields.Int( - description="Maximum number of credential revocations per registry", + metadata={ + "description": "Maximum number of credential revocations per registry", + "example": 777, + }, data_key="maxCredNum", - example=666, ) @@ -506,15 +511,15 @@ class RevRegDefOptionsSchema(OpenAPISchema): metadata={ "description": endorser_connection_id_description, "example": UUIDFour.EXAMPLE, - "required": False, - } + }, + required=False, ) create_transaction_for_endorser = fields.Bool( metadata={ "description": create_transaction_for_endorser_description, "example": False, - "required": False, - } + }, + required=False, ) @@ -581,15 +586,15 @@ class RevListOptionsSchema(OpenAPISchema): metadata={ "description": endorser_connection_id_description, "example": UUIDFour.EXAMPLE, - "required": False, - } + }, + required=False, ) create_transaction_for_endorser = fields.Bool( metadata={ "description": create_transaction_for_endorser_description, "example": False, - "required": False, - } + }, + required=False, ) @@ -597,8 +602,10 @@ class RevListCreateRequestSchema(OpenAPISchema): """Request schema for revocation registry creation request.""" rev_reg_def_id = fields.Str( - description="Revocation registry definition identifier", - example=INDY_REV_REG_ID_EXAMPLE, + metadata={ + "description": "Revocation registry definition identifier", + "example": INDY_REV_REG_ID_EXAMPLE, + } ) options = fields.Nested(RevListOptionsSchema) @@ -685,91 +692,6 @@ async def set_active_registry(request: web.BaseRequest): raise web.HTTPInternalServerError(reason=str(e)) from e -@docs( - tags=["anoncreds"], - summary="Revoke an issued credential", -) -@request_schema(RevokeRequestSchema()) -@response_schema(RevocationModuleResponseSchema(), description="") -async def revoke(request: web.BaseRequest): - """Request handler for storing a credential revocation. - - Args: - request: aiohttp request object - - Returns: - The credential revocation details. - - """ - context: AdminRequestContext = request["context"] - body = await request.json() - cred_ex_id = body.get("cred_ex_id") - body["notify"] = body.get("notify", context.settings.get("revocation.notify")) - notify = body.get("notify") - connection_id = body.get("connection_id") - body["notify_version"] = body.get("notify_version", "v1_0") - notify_version = body["notify_version"] - - if notify and not connection_id: - raise web.HTTPBadRequest(reason="connection_id must be set when notify is true") - if notify and not notify_version: - raise web.HTTPBadRequest( - reason="Request must specify notify_version if notify is true" - ) - - rev_manager = RevocationManager(context.profile) - try: - if cred_ex_id: - # rev_reg_id and cred_rev_id should not be present so we can - # safely splat the body - await rev_manager.revoke_credential_by_cred_ex_id(**body) - else: - # no cred_ex_id so we can safely splat the body - await rev_manager.revoke_credential(**body) - return web.json_response({}) - except ( - RevocationManagerError, - AnonCredsRevocationError, - StorageError, - AnonCredsIssuerError, - AnonCredsRegistrationError, - ) as err: - raise web.HTTPBadRequest(reason=err.roll_up) from err - - -@docs(tags=["revocation"], summary="Publish pending revocations to ledger") -@request_schema(PublishRevocationsSchema()) -@response_schema(TxnOrPublishRevocationsResultSchema(), 200, description="") -async def publish_revocations(request: web.BaseRequest): - """Request handler for publishing pending revocations to the ledger. - - Args: - request: aiohttp request object - - Returns: - Credential revocation ids published as revoked by revocation registry id. - - """ - context: AdminRequestContext = request["context"] - body = await request.json() - rrid2crid = body.get("rrid2crid") - - rev_manager = RevocationManager(context.profile) - - try: - rev_reg_resp = await rev_manager.publish_pending_revocations( - rrid2crid, - ) - return web.json_response({"rrid2crid": rev_reg_resp}) - except ( - RevocationError, - StorageError, - AnonCredsIssuerError, - AnonCredsRevocationError, - ) as err: - raise web.HTTPBadRequest(reason=err.roll_up) from err - - def register_events(event_bus: EventBus): """Register events.""" # TODO Make this pluggable? @@ -800,8 +722,6 @@ async def register(app: web.Application): web.post("/anoncreds/revocation-list", rev_list_post), web.put("/anoncreds/registry/{rev_reg_id}/tails-file", upload_tails_file), web.put("/anoncreds/registry/{rev_reg_id}/active", set_active_registry), - web.post("/anoncreds/revoke", revoke), - web.post("/anoncreds/publish-revocations", publish_revocations), ] ) diff --git a/aries_cloudagent/anoncreds/tests/test_revocation.py b/aries_cloudagent/anoncreds/tests/test_revocation.py index 2ae61afc09..4e82db18c1 100644 --- a/aries_cloudagent/anoncreds/tests/test_revocation.py +++ b/aries_cloudagent/anoncreds/tests/test_revocation.py @@ -591,12 +591,23 @@ async def test_create_and_register_revocation_list( @mock.patch.object(test_module.AnonCredsRevocation, "_finish_registration") async def test_finish_revocation_list(self, mock_finish, mock_handle): self.profile.context.injector.bind_instance(EventBus, MockEventBus()) + + mock_handle.fetch = mock.CoroutineMock(side_effect=[None, MockEntry()]) + + # Fetch doesn't find list then it should be created await self.revocation.finish_revocation_list( job_id="test-job-id", rev_reg_def_id="test-rev-reg-def-id", ) assert mock_finish.called + # Fetch finds list then there's nothing to do, it's already finished and updated + await self.revocation.finish_revocation_list( + job_id="test-job-id", + rev_reg_def_id="test-rev-reg-def-id", + ) + assert mock_finish.call_count == 1 + @mock.patch.object(InMemoryProfileSession, "handle") async def test_update_revocation_list_get_rev_reg_errors(self, mock_handle): mock_handle.fetch = mock.CoroutineMock( diff --git a/aries_cloudagent/anoncreds/tests/test_routes.py b/aries_cloudagent/anoncreds/tests/test_routes.py index ecfa238495..b2ecda8c5d 100644 --- a/aries_cloudagent/anoncreds/tests/test_routes.py +++ b/aries_cloudagent/anoncreds/tests/test_routes.py @@ -19,7 +19,6 @@ from aries_cloudagent.core.in_memory.profile import ( InMemoryProfile, ) -from aries_cloudagent.revocation_anoncreds.manager import RevocationManager from aries_cloudagent.tests import mock from .. import routes as test_module @@ -338,45 +337,6 @@ async def test_set_active_registry(self, mock_set): with self.assertRaises(KeyError): await test_module.set_active_registry(self.request) - async def test_revoke_notify_without_connection_throws_x(self): - self.request.json = mock.CoroutineMock(return_value={"notify": True}) - with self.assertRaises(web.HTTPBadRequest): - await test_module.revoke(self.request) - - @mock.patch.object( - RevocationManager, - "revoke_credential_by_cred_ex_id", - return_value=None, - ) - @mock.patch.object( - RevocationManager, - "revoke_credential", - return_value=None, - ) - async def test_revoke(self, mock_revoke, mock_revoke_by_id): - self.request.json = mock.CoroutineMock( - return_value={"cred_ex_id": "cred_ex_id"} - ) - await test_module.revoke(self.request) - assert mock_revoke_by_id.call_count == 1 - assert mock_revoke.call_count == 0 - - self.request.json = mock.CoroutineMock(return_value={}) - await test_module.revoke(self.request) - assert mock_revoke.call_count == 1 - - @mock.patch.object( - RevocationManager, - "publish_pending_revocations", - return_value="test-rrid", - ) - async def test_publish_revocations(self, mock_publish): - self.request.json = mock.CoroutineMock(return_value={"rrid2crid": "rrid2crid"}) - result = await test_module.publish_revocations(self.request) - - assert json.loads(result.body)["rrid2crid"] == "test-rrid" - assert mock_publish.call_count == 1 - @mock.patch.object(DefaultRevocationSetup, "register_events") async def test_register_events(self, mock_revocation_setup_listeners): mock_event_bus = MockEventBus() diff --git a/aries_cloudagent/config/logging.py b/aries_cloudagent/config/logging.py index fa1a0db419..149d5456b5 100644 --- a/aries_cloudagent/config/logging.py +++ b/aries_cloudagent/config/logging.py @@ -8,7 +8,7 @@ import sys import yaml import time as mod_time -import pkg_resources +from importlib import resources from contextvars import ContextVar from datetime import datetime, timedelta @@ -21,7 +21,6 @@ ) from logging.handlers import BaseRotatingHandler from random import randint -from typing import TextIO from portalocker import LOCK_EX, lock, unlock from pythonjsonlogger import jsonlogger @@ -58,7 +57,7 @@ def filter(self, record): return True -def load_resource(path: str, encoding: str = None) -> TextIO: +def load_resource(path: str, encoding: str = None): """Open a resource file located in a python package or the local filesystem. Args: @@ -69,9 +68,12 @@ def load_resource(path: str, encoding: str = None) -> TextIO: components = path.rsplit(":", 1) try: if len(components) == 1: + # Local filesystem resource return open(components[0], encoding=encoding) else: - bstream = pkg_resources.resource_stream(components[0], components[1]) + # Package resource + package, resource = components + bstream = resources.open_binary(package, resource) if encoding: return io.TextIOWrapper(bstream, encoding=encoding) return bstream diff --git a/aries_cloudagent/config/provider.py b/aries_cloudagent/config/provider.py index c97c300cec..17a4e22b70 100644 --- a/aries_cloudagent/config/provider.py +++ b/aries_cloudagent/config/provider.py @@ -1,14 +1,12 @@ """Service provider implementations.""" import hashlib - from typing import Optional, Sequence, Union from weakref import ReferenceType from ..utils.classloader import DeferLoad from ..utils.stats import Collector - -from .base import BaseProvider, BaseSettings, BaseInjector, InjectionError +from .base import BaseInjector, BaseProvider, BaseSettings, InjectionError class InstanceProvider(BaseProvider): @@ -52,10 +50,8 @@ def __init__( self._ctor_kwargs = ctor_kwargs self._init_method = init_method if isinstance(instance_cls, str): - cls = DeferLoad(instance_cls) - else: - cls = instance_cls - self._instance_cls: Union[type, DeferLoad] = cls + instance_cls = DeferLoad(instance_cls) + self._instance_cls = instance_cls def provide(self, config: BaseSettings, injector: BaseInjector): """Provide the object instance given a config and injector.""" diff --git a/aries_cloudagent/config/tests/test_logging.py b/aries_cloudagent/config/tests/test_logging.py index 7d8d220a0c..8ae25bad2b 100644 --- a/aries_cloudagent/config/tests/test_logging.py +++ b/aries_cloudagent/config/tests/test_logging.py @@ -136,19 +136,26 @@ def test_banner_did(self): assert test_did in output def test_load_resource(self): + # Testing local file access with mock.patch("builtins.open", mock.MagicMock()) as mock_open: test_module.load_resource("abc", encoding="utf-8") mock_open.side_effect = IOError("insufficient privilege") + # load_resource should absorb IOError test_module.load_resource("abc", encoding="utf-8") - with mock.patch.object( - test_module.pkg_resources, "resource_stream", mock.MagicMock() - ) as mock_res_stream, mock.patch.object( - test_module.io, "TextIOWrapper", mock.MagicMock() + # Testing package resource access with encoding (text mode) + with mock.patch( + "importlib.resources.open_binary", mock.MagicMock() + ) as mock_open_binary, mock.patch( + "io.TextIOWrapper", mock.MagicMock() ) as mock_text_io_wrapper: test_module.load_resource("abc:def", encoding="utf-8") + mock_open_binary.assert_called_once_with("abc", "def") + mock_text_io_wrapper.assert_called_once() - with mock.patch.object( - test_module.pkg_resources, "resource_stream", mock.MagicMock() - ) as mock_res_stream: + # Testing package resource access without encoding (binary mode) + with mock.patch( + "importlib.resources.open_binary", mock.MagicMock() + ) as mock_open_binary: test_module.load_resource("abc:def", encoding=None) + mock_open_binary.assert_called_once_with("abc", "def") diff --git a/aries_cloudagent/connections/models/conn_record.py b/aries_cloudagent/connections/models/conn_record.py index 99eeaac55f..6415542aca 100644 --- a/aries_cloudagent/connections/models/conn_record.py +++ b/aries_cloudagent/connections/models/conn_record.py @@ -41,7 +41,7 @@ class ConnRecord(BaseRecord): class Meta: """ConnRecord metadata.""" - schema_class = "ConnRecordSchema" + schema_class = "MaybeStoredConnRecordSchema" class Protocol(Enum): """Supported Protocols for Connection.""" @@ -637,11 +637,11 @@ def __eq__(self, other: Any) -> bool: return super().__eq__(other) -class ConnRecordSchema(BaseRecordSchema): +class MaybeStoredConnRecordSchema(BaseRecordSchema): """Schema to allow serialization/deserialization of connection records.""" class Meta: - """ConnRecordSchema metadata.""" + """MaybeStoredConnRecordSchema metadata.""" model_class = ConnRecord @@ -763,7 +763,7 @@ class Meta: ) -class StoredConnRecordSchema(ConnRecordSchema): +class ConnRecordSchema(MaybeStoredConnRecordSchema): """Schema representing stored ConnRecords.""" class Meta: diff --git a/aries_cloudagent/core/profile.py b/aries_cloudagent/core/profile.py index 395e320d23..7b2b2f50da 100644 --- a/aries_cloudagent/core/profile.py +++ b/aries_cloudagent/core/profile.py @@ -1,20 +1,17 @@ """Classes for managing profile information within a request context.""" import logging - from abc import ABC, abstractmethod from typing import Any, Mapping, Optional, Type -from weakref import ref -from .event_bus import EventBus, Event from ..config.base import InjectionError -from ..config.injector import BaseInjector, InjectType from ..config.injection_context import InjectionContext +from ..config.injector import BaseInjector, InjectType from ..config.provider import BaseProvider from ..config.settings import BaseSettings from ..utils.classloader import ClassLoader, ClassNotFoundError - from .error import ProfileSessionInactiveError +from .event_bus import Event, EventBus LOGGER = logging.getLogger(__name__) @@ -33,14 +30,9 @@ def __init__( created: bool = False, ): """Initialize a base profile.""" - context = context or InjectionContext() - scope = "profile" - if name: - scope += ":" + name - self._context = context.start_scope(scope) + self._context = context or InjectionContext() self._created = created self._name = name or Profile.DEFAULT_NAME - self._context.injector.bind_instance(Profile, ref(self)) @property def backend(self) -> str: diff --git a/aries_cloudagent/core/tests/test_dispatcher.py b/aries_cloudagent/core/tests/test_dispatcher.py index adceb7d5ec..0644c6d3fa 100644 --- a/aries_cloudagent/core/tests/test_dispatcher.py +++ b/aries_cloudagent/core/tests/test_dispatcher.py @@ -590,8 +590,9 @@ def _smaller_scope(): with self.assertRaises(RuntimeError): await responder.send_outbound(None) - with self.assertRaises(RuntimeError): - await responder.send_webhook("test", {}) + with pytest.deprecated_call(): + with self.assertRaises(RuntimeError): + await responder.send_webhook("test", {}) # async def test_dispatch_version_with_degraded_features(self): # profile = make_profile() diff --git a/aries_cloudagent/indy/credx/issuer.py b/aries_cloudagent/indy/credx/issuer.py index 6150d9d9ca..26843c0428 100644 --- a/aries_cloudagent/indy/credx/issuer.py +++ b/aries_cloudagent/indy/credx/issuer.py @@ -456,7 +456,7 @@ async def revoke_credentials( ) failed_crids.add(rev_id) elif rev_id > rev_info["curr_id"]: - LOGGER.warn( + LOGGER.warning( "Skipping requested credential revocation" "on rev reg id %s, cred rev id=%s not yet issued", revoc_reg_id, @@ -464,7 +464,7 @@ async def revoke_credentials( ) failed_crids.add(rev_id) elif rev_id in used_ids: - LOGGER.warn( + LOGGER.warning( "Skipping requested credential revocation" "on rev reg id %s, cred rev id=%s already revoked", revoc_reg_id, @@ -500,7 +500,7 @@ async def revoke_credentials( CATEGORY_REV_REG_INFO, revoc_reg_id, for_update=True ) if not rev_reg_upd or not rev_reg_info: - LOGGER.warn( + LOGGER.warning( "Revocation registry missing, skipping update: {}", revoc_reg_id, ) diff --git a/aries_cloudagent/multitenant/admin/routes.py b/aries_cloudagent/multitenant/admin/routes.py index eb5f439eea..d4c0546779 100644 --- a/aries_cloudagent/multitenant/admin/routes.py +++ b/aries_cloudagent/multitenant/admin/routes.py @@ -8,7 +8,6 @@ request_schema, response_schema, ) - from marshmallow import ValidationError, fields, validate, validates_schema from ...admin.request_context import AdminRequestContext @@ -19,46 +18,63 @@ from ...messaging.valid import UUID4_EXAMPLE, JSONWebToken from ...multitenant.base import BaseMultitenantManager from ...storage.error import StorageError, StorageNotFoundError +from ...utils.endorsement_setup import attempt_auto_author_with_endorser_setup from ...wallet.error import WalletSettingsError from ...wallet.models.wallet_record import WalletRecord, WalletRecordSchema from ..error import WalletKeyMissingError ACAPY_LIFECYCLE_CONFIG_FLAG_MAP = { - "ACAPY_LOG_LEVEL": "log.level", - "ACAPY_INVITE_PUBLIC": "debug.invite_public", - "ACAPY_PUBLIC_INVITES": "public_invites", "ACAPY_AUTO_ACCEPT_INVITES": "debug.auto_accept_invites", "ACAPY_AUTO_ACCEPT_REQUESTS": "debug.auto_accept_requests", "ACAPY_AUTO_PING_CONNECTION": "auto_ping_connection", - "ACAPY_MONITOR_PING": "debug.monitor_ping", - "ACAPY_AUTO_RESPOND_MESSAGES": "debug.auto_respond_messages", + "ACAPY_AUTO_PROMOTE_AUTHOR_DID": "endorser.auto_promote_author_did", + "ACAPY_AUTO_REQUEST_ENDORSEMENT": "endorser.auto_request", "ACAPY_AUTO_RESPOND_CREDENTIAL_OFFER": "debug.auto_respond_credential_offer", "ACAPY_AUTO_RESPOND_CREDENTIAL_REQUEST": "debug.auto_respond_credential_request", + "ACAPY_AUTO_RESPOND_MESSAGES": "debug.auto_respond_messages", "ACAPY_AUTO_VERIFY_PRESENTATION": "debug.auto_verify_presentation", - "ACAPY_NOTIFY_REVOCATION": "revocation.notify", - "ACAPY_AUTO_REQUEST_ENDORSEMENT": "endorser.auto_request", "ACAPY_AUTO_WRITE_TRANSACTIONS": "endorser.auto_write", "ACAPY_CREATE_REVOCATION_TRANSACTIONS": "endorser.auto_create_rev_reg", + "ACAPY_EMIT_DID_PEER_2": "emit_did_peer_2", + "ACAPY_EMIT_DID_PEER_4": "emit_did_peer_4", + "ACAPY_ENDORSER_ALIAS": "endorser.endorser_alias", + "ACAPY_ENDORSER_INVITATION": "endorser.endorser_invitation", + "ACAPY_ENDORSER_PUBLIC_DID": "endorser.endorser_public_did", "ACAPY_ENDORSER_ROLE": "endorser.protocol_role", + "ACAPY_INVITE_PUBLIC": "debug.invite_public", + "ACAPY_LOG_LEVEL": "log.level", + "ACAPY_MONITOR_PING": "debug.monitor_ping", + "ACAPY_NOTIFY_REVOCATION": "revocation.notify", + "ACAPY_PRESERVE_EXCHANGE_RECORDS": "preserve_exchange_records", + "ACAPY_PUBLIC_INVITES": "public_invites", + "ACAPY_REQUESTS_THROUGH_PUBLIC_DID": "requests_through_public_did", } ACAPY_LIFECYCLE_CONFIG_FLAG_ARGS_MAP = { - "log-level": "log.level", - "invite-public": "debug.invite_public", - "public-invites": "public_invites", "auto-accept-invites": "debug.auto_accept_invites", "auto-accept-requests": "debug.auto_accept_requests", + "auto-create-revocation-transactions": "endorser.auto_create_rev_reg", "auto-ping-connection": "auto_ping_connection", - "monitor-ping": "debug.monitor_ping", - "auto-respond-messages": "debug.auto_respond_messages", + "auto-promote-author-did": "endorser.auto_promote_author_did", + "auto-request-endorsement": "endorser.auto_request", "auto-respond-credential-offer": "debug.auto_respond_credential_offer", "auto-respond-credential-request": "debug.auto_respond_credential_request", + "auto-respond-messages": "debug.auto_respond_messages", "auto-verify-presentation": "debug.auto_verify_presentation", - "notify-revocation": "revocation.notify", - "auto-request-endorsement": "endorser.auto_request", "auto-write-transactions": "endorser.auto_write", - "auto-create-revocation-transactions": "endorser.auto_create_rev_reg", + "emit-did-peer-2": "emit_did_peer_2", + "emit-did-peer-4": "emit_did_peer_4", + "endorser-alias": "endorser.endorser_alias", + "endorser-invitation": "endorser.endorser_invitation", "endorser-protocol-role": "endorser.protocol_role", + "endorser-public-did": "endorser.endorser_public_did", + "invite-public": "debug.invite_public", + "log-level": "log.level", + "monitor-ping": "debug.monitor_ping", + "notify-revocation": "revocation.notify", + "preserve-exchange-records": "preserve_exchange_records", + "public-invites": "public_invites", + "requests-through-public-did": "requests_through_public_did", } ACAPY_ENDORSER_FLAGS_DEPENDENT_ON_AUTHOR_ROLE = [ @@ -452,6 +468,11 @@ async def wallet_create(request: web.BaseRequest): ) token = await multitenant_mgr.create_auth_token(wallet_record, wallet_key) + + wallet_profile = await multitenant_mgr.get_wallet_profile( + context, wallet_record, extra_settings=settings + ) + await attempt_auto_author_with_endorser_setup(wallet_profile) except BaseError as err: raise web.HTTPBadRequest(reason=err.roll_up) from err diff --git a/aries_cloudagent/multitenant/admin/tests/test_routes.py b/aries_cloudagent/multitenant/admin/tests/test_routes.py index 2d24d902ee..cc549ade09 100644 --- a/aries_cloudagent/multitenant/admin/tests/test_routes.py +++ b/aries_cloudagent/multitenant/admin/tests/test_routes.py @@ -1,13 +1,15 @@ from unittest import IsolatedAsyncioTestCase -from aries_cloudagent.tests import mock + +import pytest from marshmallow.exceptions import ValidationError -from ...base import BaseMultitenantManager, MultitenantManagerError +from aries_cloudagent.tests import mock + from ....admin.request_context import AdminRequestContext -from ....wallet.models.wallet_record import WalletRecord from ....messaging.models.base import BaseModelError from ....storage.error import StorageError, StorageNotFoundError - +from ....wallet.models.wallet_record import WalletRecord +from ...base import BaseMultitenantManager, MultitenantManagerError from .. import routes as test_module @@ -139,6 +141,7 @@ async def test_wallets_list_query(self): } ) + @pytest.mark.asyncio(scope="module") async def test_wallet_create_tenant_settings(self): body = { "wallet_name": "test", @@ -156,6 +159,8 @@ async def test_wallet_create_tenant_settings(self): } self.request.json = mock.CoroutineMock(return_value=body) + test_module.attempt_auto_author_with_endorser_setup = mock.CoroutineMock() + with mock.patch.object(test_module.web, "json_response") as mock_response: wallet_mock = mock.MagicMock( serialize=mock.MagicMock( @@ -173,7 +178,10 @@ async def test_wallet_create_tenant_settings(self): self.mock_multitenant_mgr.create_auth_token = mock.CoroutineMock( return_value="test_token" ) - print(self.request["context"]) + self.mock_multitenant_mgr.get_wallet_profile = mock.CoroutineMock( + return_value=mock.MagicMock() + ) + await test_module.wallet_create(self.request) self.mock_multitenant_mgr.create_wallet.assert_called_once_with( @@ -195,6 +203,8 @@ async def test_wallet_create_tenant_settings(self): mock_response.assert_called_once_with( {**test_module.format_wallet_record(wallet_mock), "token": "test_token"} ) + assert self.mock_multitenant_mgr.get_wallet_profile.called + assert test_module.attempt_auto_author_with_endorser_setup.called async def test_wallet_create(self): body = { @@ -207,6 +217,7 @@ async def test_wallet_create(self): "wallet_dispatch_type": "base", } self.request.json = mock.CoroutineMock(return_value=body) + test_module.attempt_auto_author_with_endorser_setup = mock.CoroutineMock() with mock.patch.object(test_module.web, "json_response") as mock_response: wallet_mock = mock.MagicMock( @@ -225,7 +236,10 @@ async def test_wallet_create(self): self.mock_multitenant_mgr.create_auth_token = mock.CoroutineMock( return_value="test_token" ) - print(self.request["context"]) + self.mock_multitenant_mgr.get_wallet_profile = mock.CoroutineMock( + return_value=mock.MagicMock() + ) + await test_module.wallet_create(self.request) self.mock_multitenant_mgr.create_wallet.assert_called_once_with( @@ -242,8 +256,13 @@ async def test_wallet_create(self): wallet_mock, body["wallet_key"] ) mock_response.assert_called_once_with( - {**test_module.format_wallet_record(wallet_mock), "token": "test_token"} + { + **test_module.format_wallet_record(wallet_mock), + "token": "test_token", + } ) + assert self.mock_multitenant_mgr.get_wallet_profile.called + assert test_module.attempt_auto_author_with_endorser_setup.called async def test_wallet_create_x(self): body = {} @@ -277,6 +296,9 @@ async def test_wallet_create_optional_default_fields(self): return_value=mock.MagicMock() ) self.mock_multitenant_mgr.create_auth_token = mock.CoroutineMock() + self.mock_multitenant_mgr.get_wallet_profile = mock.CoroutineMock( + return_value=mock.MagicMock() + ) await test_module.wallet_create(self.request) self.mock_multitenant_mgr.create_wallet.assert_called_once_with( @@ -292,6 +314,7 @@ async def test_wallet_create_optional_default_fields(self): }, WalletRecord.MODE_MANAGED, ) + assert self.mock_multitenant_mgr.get_wallet_profile.called async def test_wallet_create_raw_key_derivation(self): body = { @@ -306,6 +329,9 @@ async def test_wallet_create_raw_key_derivation(self): return_value=mock.MagicMock() ) self.mock_multitenant_mgr.create_auth_token = mock.CoroutineMock() + self.mock_multitenant_mgr.get_wallet_profile = mock.CoroutineMock( + return_value=mock.MagicMock() + ) await test_module.wallet_create(self.request) self.mock_multitenant_mgr.create_wallet.assert_called_once_with( @@ -319,6 +345,7 @@ async def test_wallet_create_raw_key_derivation(self): }, WalletRecord.MODE_MANAGED, ) + assert self.mock_multitenant_mgr.get_wallet_profile.called async def test_wallet_update_tenant_settings(self): self.request.match_info = {"wallet_id": "test-wallet-id"} @@ -651,6 +678,7 @@ async def test_wallet_create_token_x(self): ) await test_module.wallet_create_token(self.request) + @pytest.mark.asyncio(scope="module") async def test_wallet_remove_managed(self): self.request.has_body = False self.request.match_info = {"wallet_id": "dummy"} @@ -662,12 +690,13 @@ async def test_wallet_remove_managed(self): ): self.mock_multitenant_mgr.remove_wallet = mock.CoroutineMock() - await test_module.wallet_remove(self.request) + result = await test_module.wallet_remove(self.request) self.mock_multitenant_mgr.remove_wallet.assert_called_once_with( "dummy", None ) mock_response.assert_called_once_with({}) + assert result == mock_response.return_value async def test_wallet_remove_unmanaged(self): self.request.match_info = {"wallet_id": "dummy"} @@ -680,12 +709,13 @@ async def test_wallet_remove_unmanaged(self): ): self.mock_multitenant_mgr.remove_wallet = mock.CoroutineMock() - await test_module.wallet_remove(self.request) + result = await test_module.wallet_remove(self.request) self.mock_multitenant_mgr.remove_wallet.assert_called_once_with( "dummy", "dummy_key" ) mock_response.assert_called_once_with({}) + assert result == mock_response.return_value async def test_wallet_remove_managed_wallet_key_provided_throws(self): self.request.match_info = {"wallet_id": "dummy"} diff --git a/aries_cloudagent/multitenant/askar_profile_manager.py b/aries_cloudagent/multitenant/askar_profile_manager.py index 93f2456609..22a118cf24 100644 --- a/aries_cloudagent/multitenant/askar_profile_manager.py +++ b/aries_cloudagent/multitenant/askar_profile_manager.py @@ -2,6 +2,7 @@ from typing import Iterable, Optional, cast +from ..askar.profile_anon import AskarAnoncredsProfile from ..askar.profile import AskarProfile from ..config.injection_context import InjectionContext from ..config.wallet import wallet_config @@ -104,6 +105,14 @@ async def get_wallet_profile( assert self._multitenant_profile.opened + # return anoncreds profile if explicitly set as wallet type + if profile_context.settings.get("wallet.type") == "askar-anoncreds": + return AskarAnoncredsProfile( + self._multitenant_profile.opened, + profile_context, + profile_id=wallet_record.wallet_id, + ) + return AskarProfile( self._multitenant_profile.opened, profile_context, diff --git a/aries_cloudagent/multitenant/tests/test_askar_profile_manager.py b/aries_cloudagent/multitenant/tests/test_askar_profile_manager.py index 2070f835ab..30892c1b2b 100644 --- a/aries_cloudagent/multitenant/tests/test_askar_profile_manager.py +++ b/aries_cloudagent/multitenant/tests/test_askar_profile_manager.py @@ -99,6 +99,42 @@ def side_effect(context, provision): == wallet_record.wallet_id ) + async def test_get_anoncreds_wallet_profile_should_open_store_and_return_anoncreds_profile( + self, + ): + askar_profile_mock_name = "AskarProfile" + wallet_record = WalletRecord( + wallet_id="test", + settings={ + "wallet.recreate": True, + "wallet.seed": "test_seed", + "wallet.name": "test_name", + "wallet.type": "askar-anoncreds", + "wallet.rekey": "test_rekey", + }, + ) + + with mock.patch( + "aries_cloudagent.multitenant.askar_profile_manager.wallet_config" + ) as wallet_config, mock.patch( + "aries_cloudagent.multitenant.askar_profile_manager.AskarAnoncredsProfile", + ) as AskarAnoncredsProfile: + sub_wallet_profile_context = InjectionContext() + sub_wallet_profile = AskarAnoncredsProfile(None, None) + sub_wallet_profile.context.copy.return_value = sub_wallet_profile_context + + def side_effect(context, provision): + sub_wallet_profile.name = askar_profile_mock_name + return sub_wallet_profile, None + + wallet_config.side_effect = side_effect + + await self.manager.get_wallet_profile(self.profile.context, wallet_record) + + AskarAnoncredsProfile.assert_called_with( + sub_wallet_profile.opened, sub_wallet_profile_context, profile_id="test" + ) + async def test_get_wallet_profile_should_create_profile(self): wallet_record = WalletRecord(wallet_id="test", settings={}) create_profile_stub = asyncio.Future() diff --git a/aries_cloudagent/protocols/connections/v1_0/routes.py b/aries_cloudagent/protocols/connections/v1_0/routes.py index c779955b01..ad0503efa7 100644 --- a/aries_cloudagent/protocols/connections/v1_0/routes.py +++ b/aries_cloudagent/protocols/connections/v1_0/routes.py @@ -16,7 +16,7 @@ from ....admin.request_context import AdminRequestContext from ....cache.base import BaseCache -from ....connections.models.conn_record import ConnRecord, StoredConnRecordSchema +from ....connections.models.conn_record import ConnRecord, ConnRecordSchema from ....messaging.models.base import BaseModelError from ....messaging.models.openapi import OpenAPISchema from ....messaging.valid import ( @@ -48,7 +48,7 @@ class ConnectionListSchema(OpenAPISchema): """Result schema for connection list.""" results = fields.List( - fields.Nested(StoredConnRecordSchema()), + fields.Nested(ConnRecordSchema()), required=True, metadata={"description": "List of connection records"}, ) @@ -233,7 +233,7 @@ class ConnectionStaticResultSchema(OpenAPISchema): "example": INDY_RAW_PUBLIC_KEY_EXAMPLE, }, ) - record = fields.Nested(StoredConnRecordSchema(), required=True) + record = fields.Nested(ConnRecordSchema(), required=True) class ConnectionsListQueryStringSchema(OpenAPISchema): @@ -485,7 +485,7 @@ async def connections_list(request: web.BaseRequest): @docs(tags=["connection"], summary="Fetch a single connection record") @match_info_schema(ConnectionsConnIdMatchInfoSchema()) -@response_schema(StoredConnRecordSchema(), 200, description="") +@response_schema(ConnRecordSchema(), 200, description="") async def connections_retrieve(request: web.BaseRequest): """Request handler for fetching a single connection record. @@ -670,7 +670,7 @@ async def connections_create_invitation(request: web.BaseRequest): ) @querystring_schema(ReceiveInvitationQueryStringSchema()) @request_schema(ReceiveInvitationRequestSchema()) -@response_schema(StoredConnRecordSchema(), 200, description="") +@response_schema(ConnRecordSchema(), 200, description="") async def connections_receive_invitation(request: web.BaseRequest): """Request handler for receiving a new connection invitation. @@ -711,7 +711,7 @@ async def connections_receive_invitation(request: web.BaseRequest): ) @match_info_schema(ConnectionsConnIdMatchInfoSchema()) @querystring_schema(AcceptInvitationQueryStringSchema()) -@response_schema(StoredConnRecordSchema(), 200, description="") +@response_schema(ConnRecordSchema(), 200, description="") async def connections_accept_invitation(request: web.BaseRequest): """Request handler for accepting a stored connection invitation. @@ -761,7 +761,7 @@ async def connections_accept_invitation(request: web.BaseRequest): ) @match_info_schema(ConnectionsConnIdMatchInfoSchema()) @querystring_schema(AcceptRequestQueryStringSchema()) -@response_schema(StoredConnRecordSchema(), 200, description="") +@response_schema(ConnRecordSchema(), 200, description="") async def connections_accept_request(request: web.BaseRequest): """Request handler for accepting a stored connection request. diff --git a/aries_cloudagent/protocols/didexchange/v1_0/handlers/tests/test_response_handler.py b/aries_cloudagent/protocols/didexchange/v1_0/handlers/tests/test_response_handler.py index 0fd802b2b1..87369fbaf2 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/handlers/tests/test_response_handler.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/handlers/tests/test_response_handler.py @@ -77,7 +77,7 @@ async def asyncSetUp(self): did_doc_attach=self.did_doc_attach, ) - @pytest.mark.asyncio + @pytest.mark.asyncio(scope="module") @mock.patch.object(test_module, "DIDXManager") async def test_called(self, mock_didx_mgr): mock_didx_mgr.return_value.accept_response = mock.CoroutineMock() @@ -91,7 +91,7 @@ async def test_called(self, mock_didx_mgr): ) assert not responder.messages - @pytest.mark.asyncio + @pytest.mark.asyncio(scope="module") @mock.patch.object(test_module, "DIDXManager") async def test_called_auto_ping(self, mock_didx_mgr): self.ctx.update_settings({"auto_ping_connection": True}) @@ -109,7 +109,7 @@ async def test_called_auto_ping(self, mock_didx_mgr): result, target = messages[0] assert isinstance(result, Ping) - @pytest.mark.asyncio + @pytest.mark.asyncio(scope="module") @mock.patch.object(test_module, "DIDXManager") @mock.patch.object(connection_target, "ConnectionTarget") async def test_problem_report(self, mock_conn_target, mock_didx_mgr): @@ -146,7 +146,7 @@ async def test_problem_report(self, mock_conn_target, mock_didx_mgr): ) assert target == {"target_list": [mock_conn_target]} - @pytest.mark.asyncio + @pytest.mark.asyncio(scope="module") @mock.patch.object(test_module, "DIDXManager") @mock.patch.object(connection_target, "ConnectionTarget") async def test_problem_report_did_doc( @@ -193,7 +193,7 @@ async def test_problem_report_did_doc( ) assert target == {"target_list": [mock_conn_target]} - @pytest.mark.asyncio + @pytest.mark.asyncio(scope="module") @mock.patch.object(test_module, "DIDXManager") @mock.patch.object(connection_target, "ConnectionTarget") async def test_problem_report_did_doc_no_conn_target( diff --git a/aries_cloudagent/protocols/didexchange/v1_0/manager.py b/aries_cloudagent/protocols/didexchange/v1_0/manager.py index 60bdbc8808..65d8051b06 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/manager.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/manager.py @@ -6,6 +6,7 @@ from did_peer_4 import LONG_PATTERN, long_to_short +from ....admin.server import AdminResponder from ....connections.base_manager import BaseConnectionManager from ....connections.models.conn_record import ConnRecord from ....connections.models.connection_target import ConnectionTarget @@ -150,7 +151,8 @@ async def receive_invitation( if conn_rec.accept == ConnRecord.ACCEPT_AUTO: request = await self.create_request(conn_rec, mediation_id=mediation_id) - responder = self.profile.inject_or(BaseResponder) + base_responder = self.profile.inject(BaseResponder) + responder = AdminResponder(self.profile, base_responder.send_fn) if responder: await responder.send_reply( request, diff --git a/aries_cloudagent/protocols/didexchange/v1_0/routes.py b/aries_cloudagent/protocols/didexchange/v1_0/routes.py index 5c4bb807fa..4c3197bd57 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/routes.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/routes.py @@ -14,7 +14,7 @@ from marshmallow import fields from ....admin.request_context import AdminRequestContext -from ....connections.models.conn_record import ConnRecord, StoredConnRecordSchema +from ....connections.models.conn_record import ConnRecord, ConnRecordSchema from ....messaging.models.base import BaseModelError from ....messaging.models.openapi import OpenAPISchema from ....messaging.valid import ( @@ -185,9 +185,11 @@ class DIDXRejectRequestSchema(OpenAPISchema): """Parameters and validators for reject-request request string.""" reason = fields.Str( - description="Reason for rejecting the DID Exchange", + metadata={ + "description": "Reason for rejecting the DID Exchange", + "example": "Request rejected", + }, required=False, - example="Request rejected", ) @@ -197,7 +199,7 @@ class DIDXRejectRequestSchema(OpenAPISchema): ) @match_info_schema(DIDXConnIdMatchInfoSchema()) @querystring_schema(DIDXAcceptInvitationQueryStringSchema()) -@response_schema(StoredConnRecordSchema(), 200, description="") +@response_schema(ConnRecordSchema(), 200, description="") async def didx_accept_invitation(request: web.BaseRequest): """Request handler for accepting a stored connection invitation. @@ -243,7 +245,7 @@ async def didx_accept_invitation(request: web.BaseRequest): summary="Create and send a request against public DID's implicit invitation", ) @querystring_schema(DIDXCreateRequestImplicitQueryStringSchema()) -@response_schema(StoredConnRecordSchema(), 200, description="") +@response_schema(ConnRecordSchema(), 200, description="") async def didx_create_request_implicit(request: web.BaseRequest): """Request handler for creating and sending a request to an implicit invitation. @@ -294,7 +296,7 @@ async def didx_create_request_implicit(request: web.BaseRequest): ) @querystring_schema(DIDXReceiveRequestImplicitQueryStringSchema()) @request_schema(DIDXRequestSchema()) -@response_schema(StoredConnRecordSchema(), 200, description="") +@response_schema(ConnRecordSchema(), 200, description="") async def didx_receive_request_implicit(request: web.BaseRequest): """Request handler for receiving a request against public DID's implicit invitation. @@ -340,7 +342,7 @@ async def didx_receive_request_implicit(request: web.BaseRequest): ) @match_info_schema(DIDXConnIdMatchInfoSchema()) @querystring_schema(DIDXAcceptRequestQueryStringSchema()) -@response_schema(StoredConnRecordSchema(), 200, description="") +@response_schema(ConnRecordSchema(), 200, description="") async def didx_accept_request(request: web.BaseRequest): """Request handler for accepting a stored connection request. @@ -385,7 +387,7 @@ async def didx_accept_request(request: web.BaseRequest): ) @match_info_schema(DIDXConnIdMatchInfoSchema()) @request_schema(DIDXRejectRequestSchema()) -@response_schema(StoredConnRecordSchema(), 200, description="") +@response_schema(ConnRecordSchema(), 200, description="") async def didx_reject(request: web.BaseRequest): """Abandon or reject a DID Exchange.""" context: AdminRequestContext = request["context"] diff --git a/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py index 1c08be393d..5f70dbc2a8 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py @@ -5,6 +5,7 @@ from aries_cloudagent.tests import mock +from .....admin.server import AdminResponder from .....cache.base import BaseCache from .....cache.in_memory import InMemoryCache from .....connections.models.conn_record import ConnRecord @@ -72,7 +73,7 @@ def make_did_doc(self, did, verkey): class TestDidExchangeManager(IsolatedAsyncioTestCase, TestConfig): async def asyncSetUp(self): self.responder = MockResponder() - + self.responder.send_fn = mock.CoroutineMock() self.oob_mock = mock.MagicMock( clean_finished_oob_record=mock.CoroutineMock(return_value=None) ) @@ -181,7 +182,9 @@ async def test_receive_invitation(self): test_module, "AttachDecorator", autospec=True ) as mock_attach_deco, mock.patch.object( self.multitenant_mgr, "get_default_mediator" - ) as mock_get_default_mediator: + ) as mock_get_default_mediator, mock.patch.object( + AdminResponder, "send_reply" + ) as mock_send_reply: mock_get_default_mediator.return_value = mediation_record invi_rec = await self.oob_manager.create_invitation( my_endpoint="testendpoint", @@ -195,6 +198,7 @@ async def test_receive_invitation(self): ) invitee_record = await self.manager.receive_invitation(invi_msg) assert invitee_record.state == ConnRecord.State.REQUEST.rfc23 + assert mock_send_reply.called async def test_receive_invitation_oob_public_did(self): async with self.profile.session() as session: @@ -211,7 +215,9 @@ async def test_receive_invitation_oob_public_did(self): self.multitenant_mgr, "get_default_mediator" ) as mock_get_default_mediator, mock.patch.object( self.manager, "resolve_connection_targets", mock.CoroutineMock() - ) as mock_resolve_targets: + ) as mock_resolve_targets, mock.patch.object( + AdminResponder, "send_reply" + ) as mock_send_reply: mock_resolve_targets.return_value = [ mock.MagicMock(recipient_keys=["test"]) ] @@ -231,6 +237,7 @@ async def test_receive_invitation_oob_public_did(self): invi_msg, their_public_did=public_did_info.did ) assert invitee_record.state == ConnRecord.State.REQUEST.rfc23 + assert mock_send_reply.called async def test_receive_invitation_no_auto_accept(self): async with self.profile.session() as session: diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py index 1013320117..5631c161e2 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py @@ -10,7 +10,6 @@ request_schema, response_schema, ) - from marshmallow import fields, validate from ....admin.request_context import AdminRequestContext @@ -23,17 +22,11 @@ from ....messaging.models.base import BaseModelError from ....messaging.models.openapi import OpenAPISchema from ....messaging.valid import UUID4_EXAMPLE -from ....protocols.connections.v1_0.manager import ConnectionManager -from ....protocols.connections.v1_0.messages.connection_invitation import ( - ConnectionInvitation, -) -from ....protocols.out_of_band.v1_0.manager import OutOfBandManager -from ....protocols.out_of_band.v1_0.messages.invitation import InvitationMessage from ....storage.error import StorageError, StorageNotFoundError +from ....utils.endorsement_setup import attempt_auto_author_with_endorser_setup from .manager import TransactionManager, TransactionManagerError from .models.transaction_record import TransactionRecord, TransactionRecordSchema from .transaction_jobs import TransactionJob -from .util import get_endorser_connection_id, is_author_role LOGGER = logging.getLogger(__name__) @@ -707,85 +700,7 @@ def register_events(event_bus: EventBus): async def on_startup_event(profile: Profile, event: Event): """Handle any events we need to support.""" - # auto setup is only for authors - if not is_author_role(profile): - return - - # see if we have an invitation to connect to the endorser - endorser_invitation = profile.settings.get_value("endorser.endorser_invitation") - if not endorser_invitation: - # no invitation, we can't connect automatically - return - - # see if we need to initiate an endorser connection - endorser_alias = profile.settings.get_value("endorser.endorser_alias") - if not endorser_alias: - # no alias is specified for the endorser connection - # note that alias is required if invitation is specified - return - - connection_id = await get_endorser_connection_id(profile) - if connection_id: - # there is already a connection - return - - endorser_did = profile.settings.get_value("endorser.endorser_public_did") - if not endorser_did: - # no DID, we can connect but we can't properly setup the connection metadata - # note that DID is required if invitation is specified - return - - try: - # OK, we are an author, we have no endorser connection but we have enough info - # to automatically initiate the connection - invite = InvitationMessage.from_url(endorser_invitation) - if invite: - oob_mgr = OutOfBandManager(profile) - oob_record = await oob_mgr.receive_invitation( - invitation=invite, - auto_accept=True, - alias=endorser_alias, - ) - async with profile.session() as session: - conn_record = await ConnRecord.retrieve_by_id( - session, oob_record.connection_id - ) - else: - invite = ConnectionInvitation.from_url(endorser_invitation) - if invite: - conn_mgr = ConnectionManager(profile) - conn_record = await conn_mgr.receive_invitation( - invitation=invite, - auto_accept=True, - alias=endorser_alias, - ) - else: - raise Exception( - "Failed to establish endorser connection, invalid " - "invitation format." - ) - - # configure the connection role and info (don't need to wait for the connection) - transaction_mgr = TransactionManager(profile) - await transaction_mgr.set_transaction_my_job( - record=conn_record, - transaction_my_job=TransactionJob.TRANSACTION_AUTHOR.name, - ) - - async with profile.session() as session: - value = await conn_record.metadata_get(session, "endorser_info") - if value: - value["endorser_did"] = endorser_did - value["endorser_name"] = endorser_alias - else: - value = {"endorser_did": endorser_did, "endorser_name": endorser_alias} - await conn_record.metadata_set(session, key="endorser_info", value=value) - - except Exception: - # log the error, but continue - LOGGER.exception( - "Error accepting endorser invitation/configuring endorser connection: %s", - ) + await attempt_auto_author_with_endorser_setup(profile) async def on_shutdown_event(profile: Profile, event: Event): diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py index c1949d2486..9640101e5a 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py @@ -132,7 +132,7 @@ async def create_proposal( self, cred_ex_record: V20CredExRecord, proposal_data: Mapping ) -> CredFormatAttachment: """Create linked data proof credential proposal.""" - manager = self.profile.inject(VcLdpManager) + manager = VcLdpManager(self.profile) detail = LDProofVCDetail.deserialize(proposal_data) assert detail.options and isinstance(detail.options, LDProofVCOptions) assert detail.credential and isinstance(detail.credential, VerifiableCredential) @@ -164,7 +164,7 @@ async def create_offer( # but also when we create an offer (manager does some weird stuff) offer_data = cred_proposal_message.attachment(LDProofCredFormatHandler.format) detail = LDProofVCDetail.deserialize(offer_data) - manager = self.profile.inject(VcLdpManager) + manager = VcLdpManager(self.profile) assert detail.options and isinstance(detail.options, LDProofVCOptions) assert detail.credential and isinstance(detail.credential, VerifiableCredential) try: @@ -224,7 +224,7 @@ async def create_request( ) detail = LDProofVCDetail.deserialize(request_data) - manager = self.profile.inject(VcLdpManager) + manager = VcLdpManager(self.profile) assert detail.options and isinstance(detail.options, LDProofVCOptions) assert detail.credential and isinstance(detail.credential, VerifiableCredential) try: @@ -290,7 +290,7 @@ async def issue_credential( LDProofCredFormatHandler.format ) detail = LDProofVCDetail.deserialize(detail_dict) - manager = self.profile.inject(VcLdpManager) + manager = VcLdpManager(self.profile) assert detail.options and isinstance(detail.options, LDProofVCOptions) assert detail.credential and isinstance(detail.credential, VerifiableCredential) try: @@ -381,7 +381,7 @@ async def store_credential( credential = VerifiableCredential.deserialize(cred_dict, unknown=INCLUDE) # Get signature suite, proof purpose and document loader - manager = self.profile.inject(VcLdpManager) + manager = VcLdpManager(self.profile) try: result = await manager.verify_credential(credential) except VcLdpManagerError as err: diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py index be1d5c86ec..3472f1f3f5 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py @@ -1,11 +1,11 @@ from copy import deepcopy -from .......vc.ld_proofs.error import LinkedDataProofException from unittest import IsolatedAsyncioTestCase -from aries_cloudagent.tests import mock from unittest.mock import patch + from marshmallow import ValidationError -from .. import handler as test_module +from aries_cloudagent.tests import mock + from .......core.in_memory import InMemoryProfile from .......messaging.decorators.attach_decorator import AttachDecorator from .......storage.vc_holder.base import VCHolder @@ -15,6 +15,7 @@ SECURITY_CONTEXT_BBS_URL, SECURITY_CONTEXT_ED25519_2020_URL, ) +from .......vc.ld_proofs.error import LinkedDataProofException from .......vc.tests.document_loader import custom_document_loader from .......vc.vc_ld.manager import VcLdpManager from .......vc.vc_ld.models.credential import VerifiableCredential @@ -39,8 +40,9 @@ from ....models.cred_ex_record import V20CredExRecord from ....models.detail.ld_proof import V20CredExRecordLDProof from ...handler import V20CredFormatError -from ..handler import LDProofCredFormatHandler +from .. import handler as test_module from ..handler import LOGGER as LD_PROOF_LOGGER +from ..handler import LDProofCredFormatHandler from ..models.cred_detail import LDProofVCDetail TEST_DID_SOV = "did:sov:LjgpST2rjsoxYegQDRm7EL" @@ -249,7 +251,7 @@ async def test_receive_proposal(self): async def test_create_offer(self): with mock.patch.object( - self.manager, + VcLdpManager, "assert_can_issue_with_id_and_proof_type", mock.CoroutineMock(), ) as mock_can_issue, patch.object( @@ -289,7 +291,7 @@ async def test_create_offer_adds_bbs_context(self): ) with mock.patch.object( - self.manager, + VcLdpManager, "assert_can_issue_with_id_and_proof_type", mock.CoroutineMock(), ), patch.object(test_module, "get_properties_without_context", return_value=[]): @@ -314,7 +316,7 @@ async def test_create_offer_adds_ed25519_2020_context(self): ) with mock.patch.object( - self.manager, + VcLdpManager, "assert_can_issue_with_id_and_proof_type", mock.CoroutineMock(), ), patch.object(test_module, "get_properties_without_context", return_value=[]): @@ -585,7 +587,7 @@ async def test_issue_credential(self): ) with mock.patch.object( - self.manager, + VcLdpManager, "issue", mock.CoroutineMock( return_value=VerifiableCredential.deserialize(LD_PROOF_VC) @@ -843,7 +845,7 @@ async def test_store_credential(self): self.holder.store_credential = mock.CoroutineMock() with mock.patch.object( - self.manager, + VcLdpManager, "verify_credential", mock.CoroutineMock(return_value=DocumentVerificationResult(verified=True)), ) as mock_verify_credential: diff --git a/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py b/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py index 207cbca5e0..d7f2b0dd2c 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py @@ -446,7 +446,7 @@ async def verify_pres(self, pres_ex_record: V20PresExRecord) -> V20PresExRecord: pres_request = pres_ex_record.pres_request.attachment( DIFPresFormatHandler.format ) - manager = self.profile.inject(VcLdpManager) + manager = VcLdpManager(self.profile) options = LDProofVCOptions.deserialize(pres_request["options"]) if not options.challenge: diff --git a/aries_cloudagent/revocation/models/issuer_rev_reg_record.py b/aries_cloudagent/revocation/models/issuer_rev_reg_record.py index a027010937..60c1ffd1fc 100644 --- a/aries_cloudagent/revocation/models/issuer_rev_reg_record.py +++ b/aries_cloudagent/revocation/models/issuer_rev_reg_record.py @@ -330,15 +330,15 @@ async def send_entry( # Ledger rejected transaction request: client request invalid: # InvalidClientRequest(...) # In this scenario we try to post a correction - LOGGER.warn("Retry ledger update/fix due to error") - LOGGER.warn(err) + LOGGER.warning("Retry ledger update/fix due to error") + LOGGER.warning(err) (_, _, res) = await self.fix_ledger_entry( profile, True, ledger.pool.genesis_txns, ) rev_entry_res = {"result": res} - LOGGER.warn("Ledger update/fix applied") + LOGGER.warning("Ledger update/fix applied") elif "InvalidClientTaaAcceptanceError" in err.roll_up: # if no write access (with "InvalidClientTaaAcceptanceError") # e.g. aries_cloudagent.ledger.error.LedgerTransactionError: diff --git a/aries_cloudagent/revocation/recover.py b/aries_cloudagent/revocation/recover.py index f2bf38267c..eca105a1b4 100644 --- a/aries_cloudagent/revocation/recover.py +++ b/aries_cloudagent/revocation/recover.py @@ -100,7 +100,7 @@ async def generate_ledger_rrrecovery_txn( set_revoked = set(set_revoked) mismatch = prev_revoked - set_revoked if mismatch: - LOGGER.warn( + LOGGER.warning( "Credential index(es) revoked on the ledger, but not in wallet: %s", mismatch, ) diff --git a/aries_cloudagent/revocation_anoncreds/manager.py b/aries_cloudagent/revocation_anoncreds/manager.py index a70a797998..a846396146 100644 --- a/aries_cloudagent/revocation_anoncreds/manager.py +++ b/aries_cloudagent/revocation_anoncreds/manager.py @@ -47,6 +47,7 @@ async def revoke_credential_by_cred_ex_id( thread_id: str = None, connection_id: str = None, comment: str = None, + options: Optional[dict] = None, ): """Revoke a credential by its credential exchange identifier at issue. @@ -79,6 +80,7 @@ async def revoke_credential_by_cred_ex_id( thread_id=thread_id, connection_id=connection_id, comment=comment, + options=options, ) async def revoke_credential( @@ -91,6 +93,7 @@ async def revoke_credential( thread_id: str = None, connection_id: str = None, comment: str = None, + options: Optional[dict] = None, ): """Revoke a credential. @@ -120,7 +123,11 @@ async def revoke_credential( if result.curr and result.revoked: await self.set_cred_revoked_state(rev_reg_id, result.revoked) await revoc.update_revocation_list( - rev_reg_id, result.prev, result.curr, result.revoked + rev_reg_id, + result.prev, + result.curr, + result.revoked, + options=options, ) await notify_revocation_published_event( self._profile, rev_reg_id, [cred_rev_id] @@ -128,7 +135,6 @@ async def revoke_credential( else: await revoc.mark_pending_revocations(rev_reg_id, int(cred_rev_id)) - if notify: thread_id = thread_id or f"indy::{rev_reg_id}::{cred_rev_id}" rev_notify_rec = RevNotificationRecord( @@ -185,6 +191,7 @@ async def update_rev_reg_revoked_state( async def publish_pending_revocations( self, rrid2crid: Optional[Mapping[Text, Sequence[Text]]] = None, + options: Optional[dict] = None, ) -> Mapping[Text, Sequence[Text]]: """Publish pending revocations to the ledger. @@ -208,6 +215,7 @@ async def publish_pending_revocations( Returns: mapping from each revocation registry id to its cred rev ids published. """ + options = options or {} published_crids = {} revoc = AnonCredsRevocation(self._profile) @@ -226,7 +234,7 @@ async def publish_pending_revocations( if result.curr and result.revoked: await self.set_cred_revoked_state(rrid, result.revoked) await revoc.update_revocation_list( - rrid, result.prev, result.curr, result.revoked + rrid, result.prev, result.curr, result.revoked, options ) published_crids[rrid] = sorted(result.revoked) await notify_revocation_published_event( diff --git a/aries_cloudagent/revocation_anoncreds/models/issuer_cred_rev_record.py b/aries_cloudagent/revocation_anoncreds/models/issuer_cred_rev_record.py index 49a68b9209..bab3909b83 100644 --- a/aries_cloudagent/revocation_anoncreds/models/issuer_cred_rev_record.py +++ b/aries_cloudagent/revocation_anoncreds/models/issuer_cred_rev_record.py @@ -154,15 +154,15 @@ class Meta: ) rev_reg_id = fields.Str( required=False, - description="Revocation registry identifier", + metadata={"description": "Revocation registry identifier"}, ) cred_def_id = fields.Str( required=False, - description="Credential definition identifier", + metadata={"description": "Credential definition identifier"}, ) cred_rev_id = fields.Str( required=False, - description="Credential revocation identifier", + metadata={"description": "Credential revocation identifier"}, ) cred_ex_version = fields.Str( required=False, metadata={"description": "Credential exchange version"} diff --git a/aries_cloudagent/revocation_anoncreds/recover.py b/aries_cloudagent/revocation_anoncreds/recover.py index 2a891d265a..2d9eca8755 100644 --- a/aries_cloudagent/revocation_anoncreds/recover.py +++ b/aries_cloudagent/revocation_anoncreds/recover.py @@ -102,7 +102,7 @@ async def generate_ledger_rrrecovery_txn( set_revoked = set(set_revoked) mismatch = prev_revoked - set_revoked if mismatch: - LOGGER.warn( + LOGGER.warning( "Credential index(es) revoked on the ledger, but not in wallet: %s", mismatch, ) diff --git a/aries_cloudagent/revocation_anoncreds/routes.py b/aries_cloudagent/revocation_anoncreds/routes.py index b3b5b9e52a..cac93d0104 100644 --- a/aries_cloudagent/revocation_anoncreds/routes.py +++ b/aries_cloudagent/revocation_anoncreds/routes.py @@ -12,28 +12,29 @@ request_schema, response_schema, ) - from marshmallow import fields, validate, validates_schema from marshmallow.exceptions import ValidationError -from ..anoncreds.models.anoncreds_revocation import RevRegDefState - -from ..anoncreds.default.legacy_indy.registry import LegacyIndyRegistry +from ..admin.request_context import AdminRequestContext from ..anoncreds.base import ( AnonCredsObjectNotFound, AnonCredsRegistrationError, AnonCredsResolutionError, ) +from ..anoncreds.default.legacy_indy.registry import LegacyIndyRegistry from ..anoncreds.issuer import AnonCredsIssuerError +from ..anoncreds.models.anoncreds_revocation import RevRegDefState from ..anoncreds.revocation import AnonCredsRevocation, AnonCredsRevocationError +from ..anoncreds.routes import ( + create_transaction_for_endorser_description, + endorser_connection_id_description, +) from ..askar.profile import AskarProfile -from ..indy.models.revocation import IndyRevRegDef - -from ..admin.request_context import AdminRequestContext from ..indy.issuer import IndyIssuerError +from ..indy.models.revocation import IndyRevRegDef from ..ledger.base import BaseLedger -from ..ledger.multiple_ledger.base_manager import BaseMultipleLedgerManager from ..ledger.error import LedgerError +from ..ledger.multiple_ledger.base_manager import BaseMultipleLedgerManager from ..messaging.models.openapi import OpenAPISchema from ..messaging.valid import ( INDY_CRED_DEF_ID_EXAMPLE, @@ -48,28 +49,27 @@ UUID4_VALIDATE, WHOLE_NUM_EXAMPLE, WHOLE_NUM_VALIDATE, + UUIDFour, ) from ..protocols.endorse_transaction.v1_0.models.transaction_record import ( TransactionRecordSchema, ) -from ..storage.error import StorageError, StorageNotFoundError - from ..revocation.error import RevocationError +from ..revocation.models.issuer_rev_reg_record import ( + IssuerRevRegRecord, + IssuerRevRegRecordSchema, +) +from ..storage.error import StorageError, StorageNotFoundError from .manager import RevocationManager, RevocationManagerError from .models.issuer_cred_rev_record import ( IssuerCredRevRecord, IssuerCredRevRecordSchema, ) -from ..revocation.models.issuer_rev_reg_record import ( - IssuerRevRegRecord, - IssuerRevRegRecordSchema, -) - LOGGER = logging.getLogger(__name__) -class RevocationModuleResponseSchema(OpenAPISchema): +class RevocationAnoncredsModuleResponseSchema(OpenAPISchema): """Response schema for Revocation Module.""" @@ -191,113 +191,6 @@ def validate_fields(self, data, **kwargs): ) -class RevokeRequestSchema(CredRevRecordQueryStringSchema): - """Parameters and validators for revocation request.""" - - @validates_schema - def validate_fields(self, data, **kwargs): - """Validate fields - connection_id and thread_id must be present if notify.""" - super().validate_fields(data, **kwargs) - - notify = data.get("notify") - connection_id = data.get("connection_id") - notify_version = data.get("notify_version", "v1_0") - - if notify and not connection_id: - raise ValidationError( - "Request must specify connection_id if notify is true" - ) - if notify and not notify_version: - raise ValidationError( - "Request must specify notify_version if notify is true" - ) - - publish = fields.Boolean( - required=False, - metadata={ - "description": ( - "(True) publish revocation to ledger immediately, or (default, False)" - " mark it pending" - ) - }, - ) - notify = fields.Boolean( - required=False, - metadata={"description": "Send a notification to the credential recipient"}, - ) - notify_version = fields.String( - validate=validate.OneOf(["v1_0", "v2_0"]), - required=False, - metadata={ - "description": ( - "Specify which version of the revocation notification should be sent" - ) - }, - ) - connection_id = fields.Str( - required=False, - validate=UUID4_VALIDATE, - metadata={ - "description": ( - "Connection ID to which the revocation notification will be sent;" - " required if notify is true" - ), - "example": UUID4_EXAMPLE, - }, - ) - thread_id = fields.Str( - required=False, - metadata={ - "description": ( - "Thread ID of the credential exchange message thread resulting in the" - " credential now being revoked; required if notify is true" - ) - }, - ) - comment = fields.Str( - required=False, - metadata={ - "description": "Optional comment to include in revocation notification" - }, - ) - - -class PublishRevocationsSchema(OpenAPISchema): - """Request and result schema for revocation publication API call.""" - - rrid2crid = fields.Dict( - required=False, - keys=fields.Str(metadata={"example": INDY_REV_REG_ID_EXAMPLE}), - values=fields.List( - fields.Str( - validate=INDY_CRED_REV_ID_VALIDATE, - metadata={ - "description": "Credential revocation identifier", - "example": INDY_CRED_REV_ID_EXAMPLE, - }, - ) - ), - metadata={"description": "Credential revocation ids by revocation registry id"}, - ) - - -class TxnOrPublishRevocationsResultSchema(OpenAPISchema): - """Result schema for credential definition send request.""" - - sent = fields.Nested( - PublishRevocationsSchema(), - required=False, - metadata={"definition": "Content sent"}, - ) - txn = fields.Nested( - TransactionRecordSchema(), - required=False, - metadata={ - "description": "Revocation registry revocations transaction to endorse" - }, - ) - - class ClearPendingRevocationsRequestSchema(OpenAPISchema): """Request schema for clear pending revocations API call.""" @@ -491,12 +384,143 @@ class RevRegConnIdMatchInfoSchema(OpenAPISchema): ) +class PublishRevocationsOptions(OpenAPISchema): + """Options for publishing revocations to ledger.""" + + endorser_connection_id = fields.Str( + metadata={ + "description": endorser_connection_id_description, # noqa: F821 + "required": False, + "example": UUIDFour.EXAMPLE, + } + ) + + create_transaction_for_endorser = fields.Bool( + metadata={ + "description": create_transaction_for_endorser_description, + "required": False, + "example": False, + } + ) + + +class PublishRevocationsSchema(OpenAPISchema): + """Request and result schema for revocation publication API call.""" + + rrid2crid = fields.Dict( + required=False, + keys=fields.Str(metadata={"example": INDY_REV_REG_ID_EXAMPLE}), + values=fields.List( + fields.Str( + validate=INDY_CRED_REV_ID_VALIDATE, + metadata={ + "description": "Credential revocation identifier", + "example": INDY_CRED_REV_ID_EXAMPLE, + }, + ) + ), + metadata={"description": "Credential revocation ids by revocation registry id"}, + ) + options = fields.Nested(PublishRevocationsOptions()) + + +class PublishRevocationsResultSchema(OpenAPISchema): + """Result schema for credential definition send request.""" + + rrid2crid = fields.Dict( + required=False, + keys=fields.Str(metadata={"example": INDY_REV_REG_ID_EXAMPLE}), + values=fields.List( + fields.Str( + validate=INDY_CRED_REV_ID_VALIDATE, + metadata={ + "description": "Credential revocation identifier", + "example": INDY_CRED_REV_ID_EXAMPLE, + }, + ) + ), + metadata={"description": "Credential revocation ids by revocation registry id"}, + ) + + +class RevokeRequestSchema(CredRevRecordQueryStringSchema): + """Parameters and validators for revocation request.""" + + @validates_schema + def validate_fields(self, data, **kwargs): + """Validate fields - connection_id and thread_id must be present if notify.""" + super().validate_fields(data, **kwargs) + + notify = data.get("notify") + connection_id = data.get("connection_id") + notify_version = data.get("notify_version", "v1_0") + + if notify and not connection_id: + raise ValidationError( + "Request must specify connection_id if notify is true" + ) + if notify and not notify_version: + raise ValidationError( + "Request must specify notify_version if notify is true" + ) + + publish = fields.Boolean( + required=False, + metadata={ + "description": ( + "(True) publish revocation to ledger immediately, or (default, False)" + " mark it pending" + ) + }, + ) + notify = fields.Boolean( + required=False, + metadata={"description": "Send a notification to the credential recipient"}, + ) + notify_version = fields.String( + validate=validate.OneOf(["v1_0", "v2_0"]), + required=False, + metadata={ + "description": ( + "Specify which version of the revocation notification should be sent" + ) + }, + ) + connection_id = fields.Str( + required=False, + validate=UUID4_VALIDATE, + metadata={ + "description": ( + "Connection ID to which the revocation notification will be sent;" + " required if notify is true" + ), + "example": UUID4_EXAMPLE, + }, + ) + thread_id = fields.Str( + required=False, + metadata={ + "description": ( + "Thread ID of the credential exchange message thread resulting in the" + " credential now being revoked; required if notify is true" + ) + }, + ) + comment = fields.Str( + required=False, + metadata={ + "description": "Optional comment to include in revocation notification" + }, + ) + options = PublishRevocationsOptions() + + @docs( tags=["revocation"], summary="Revoke an issued credential", ) @request_schema(RevokeRequestSchema()) -@response_schema(RevocationModuleResponseSchema(), description="") +@response_schema(RevocationAnoncredsModuleResponseSchema(), description="") async def revoke(request: web.BaseRequest): """Request handler for storing a credential revocation. @@ -507,12 +531,6 @@ async def revoke(request: web.BaseRequest): The credential revocation details. """ - # - # this is exactly what is in anoncreds /revocation/revoke. - # we cannot import the revoke function as it imports classes from here, - # so circular dependency. - # we will clean this up and DRY at some point. - # context: AdminRequestContext = request["context"] body = await request.json() cred_ex_id = body.get("cred_ex_id") @@ -538,6 +556,7 @@ async def revoke(request: web.BaseRequest): else: # no cred_ex_id so we can safely splat the body await rev_manager.revoke_credential(**body) + return web.json_response({}) except ( RevocationManagerError, AnonCredsRevocationError, @@ -547,12 +566,10 @@ async def revoke(request: web.BaseRequest): ) as err: raise web.HTTPBadRequest(reason=err.roll_up) from err - return web.json_response({}) - @docs(tags=["revocation"], summary="Publish pending revocations to ledger") @request_schema(PublishRevocationsSchema()) -@response_schema(TxnOrPublishRevocationsResultSchema(), 200, description="") +@response_schema(PublishRevocationsResultSchema(), 200, description="") async def publish_revocations(request: web.BaseRequest): """Request handler for publishing pending revocations to the ledger. @@ -563,22 +580,16 @@ async def publish_revocations(request: web.BaseRequest): Credential revocation ids published as revoked by revocation registry id. """ - # - # this is exactly what is in anoncreds /revocation/publish-revocations. - # we cannot import the function as it imports classes from here, - # so circular dependency. - # we will clean this up and DRY at some point. - # context: AdminRequestContext = request["context"] body = await request.json() + options = body.get("options", {}) rrid2crid = body.get("rrid2crid") rev_manager = RevocationManager(context.profile) try: - rev_reg_resp = await rev_manager.publish_pending_revocations( - rrid2crid, - ) + rev_reg_resp = await rev_manager.publish_pending_revocations(rrid2crid, options) + return web.json_response({"rrid2crid": rev_reg_resp}) except ( RevocationError, StorageError, @@ -587,8 +598,6 @@ async def publish_revocations(request: web.BaseRequest): ) as err: raise web.HTTPBadRequest(reason=err.roll_up) from err - return web.json_response({"rrid2crid": rev_reg_resp}) - @docs( tags=["revocation"], @@ -1004,7 +1013,7 @@ async def get_cred_rev_record(request: web.BaseRequest): produces=["application/octet-stream"], ) @match_info_schema(RevRegIdMatchInfoSchema()) -@response_schema(RevocationModuleResponseSchema, description="tails file") +@response_schema(RevocationAnoncredsModuleResponseSchema, description="tails file") async def get_tails_file(request: web.BaseRequest) -> web.FileResponse: """Request handler to download tails file for revocation registry. diff --git a/aries_cloudagent/revocation_anoncreds/tests/test_routes.py b/aries_cloudagent/revocation_anoncreds/tests/test_routes.py index 97f51e7339..624313b919 100644 --- a/aries_cloudagent/revocation_anoncreds/tests/test_routes.py +++ b/aries_cloudagent/revocation_anoncreds/tests/test_routes.py @@ -1,20 +1,18 @@ import os -import pytest import shutil +from unittest import IsolatedAsyncioTestCase +import pytest from aiohttp.web import HTTPNotFound -from unittest import IsolatedAsyncioTestCase -from aries_cloudagent.tests import mock +from aries_cloudagent.tests import mock from ...anoncreds.models.anoncreds_revocation import ( RevRegDef, RevRegDefValue, ) from ...askar.profile import AskarProfile - from ...core.in_memory import InMemoryProfile - from .. import routes as test_module diff --git a/aries_cloudagent/storage/tests/test_askar_storage.py b/aries_cloudagent/storage/tests/test_askar_storage.py index a7d9883273..f257c60180 100644 --- a/aries_cloudagent/storage/tests/test_askar_storage.py +++ b/aries_cloudagent/storage/tests/test_askar_storage.py @@ -358,7 +358,7 @@ async def test_postgres_wallet_storage_works(self): class TestAskarStorageSearchSession(IsolatedAsyncioTestCase): - @pytest.mark.asyncio + @pytest.mark.asyncio(scope="module") async def test_askar_storage_search_session(self): profile = "profileId" diff --git a/aries_cloudagent/utils/classloader.py b/aries_cloudagent/utils/classloader.py index b2a24e62a3..12a4d6fe01 100644 --- a/aries_cloudagent/utils/classloader.py +++ b/aries_cloudagent/utils/classloader.py @@ -1,7 +1,7 @@ """The classloader provides utilties to dynamically load classes and modules.""" import inspect -import pkg_resources +from importlib import resources import sys from importlib import import_module @@ -158,20 +158,25 @@ def load_subclass_of(cls, base_class: Type, mod_path: str, package: str = None): @classmethod def scan_subpackages(cls, package: str) -> Sequence[str]: """Return a list of sub-packages defined under a named package.""" - # FIXME use importlib.resources in python 3.7 if "." in package: package, sub_pkg = package.split(".", 1) else: sub_pkg = "." - if not pkg_resources.resource_isdir(package, sub_pkg): + + try: + package_path = resources.files(package) + except FileNotFoundError: raise ModuleLoadError(f"Undefined package {package}") + + if not (package_path / sub_pkg).is_dir(): + raise ModuleLoadError(f"Undefined package {package}") + found = [] joiner = "" if sub_pkg == "." else f"{sub_pkg}." - for sub_path in pkg_resources.resource_listdir(package, sub_pkg): - if pkg_resources.resource_exists( - package, f"{sub_pkg}/{sub_path}/__init__.py" - ): - found.append(f"{package}.{joiner}{sub_path}") + sub_path = package_path / sub_pkg + for item in sub_path.iterdir(): + if (item / "__init__.py").exists(): + found.append(f"{package}.{joiner}{item.name}") return found diff --git a/aries_cloudagent/utils/endorsement_setup.py b/aries_cloudagent/utils/endorsement_setup.py new file mode 100644 index 0000000000..69e7facedb --- /dev/null +++ b/aries_cloudagent/utils/endorsement_setup.py @@ -0,0 +1,104 @@ +"""Common endorsement utilities.""" + +import logging + +from ..connections.models.conn_record import ConnRecord +from ..core.profile import Profile +from ..protocols.connections.v1_0.manager import ConnectionManager +from ..protocols.connections.v1_0.messages.connection_invitation import ( + ConnectionInvitation, +) +from ..protocols.endorse_transaction.v1_0.manager import TransactionManager +from ..protocols.endorse_transaction.v1_0.transaction_jobs import TransactionJob +from ..protocols.endorse_transaction.v1_0.util import ( + get_endorser_connection_id, + is_author_role, +) +from ..protocols.out_of_band.v1_0.manager import OutOfBandManager +from ..protocols.out_of_band.v1_0.messages.invitation import InvitationMessage + +LOGGER = logging.getLogger(__name__) + + +async def attempt_auto_author_with_endorser_setup(profile: Profile): + """Automatically setup the author's endorser connection if possible.""" + + if not is_author_role(profile): + return + + endorser_invitation = profile.settings.get_value("endorser.endorser_invitation") + if not endorser_invitation: + LOGGER.info("No endorser invitation, can't connect automatically.") + return + + endorser_alias = profile.settings.get_value("endorser.endorser_alias") + if not endorser_alias: + LOGGER.info("No endorser alias, alias is required if invitation is specified.") + return + + connection_id = await get_endorser_connection_id(profile) + if connection_id: + LOGGER.info("Connected to endorser from previous connection.") + return + + endorser_did = profile.settings.get_value("endorser.endorser_public_did") + if not endorser_did: + LOGGER.info( + "No endorser DID, can connect, but can't setup connection metadata." + ) + return + + try: + # OK, we are an author, we have no endorser connection but we have enough info + # to automatically initiate the connection + invite = InvitationMessage.from_url(endorser_invitation) + if invite: + oob_mgr = OutOfBandManager(profile) + oob_record = await oob_mgr.receive_invitation( + invitation=invite, + auto_accept=True, + alias=endorser_alias, + ) + async with profile.session() as session: + conn_record = await ConnRecord.retrieve_by_id( + session, oob_record.connection_id + ) + else: + invite = ConnectionInvitation.from_url(endorser_invitation) + if invite: + conn_mgr = ConnectionManager(profile) + conn_record = await conn_mgr.receive_invitation( + invitation=invite, + auto_accept=True, + alias=endorser_alias, + ) + else: + raise Exception( + "Failed to establish endorser connection, invalid " + "invitation format." + ) + + # configure the connection role and info (don't need to wait for the connection) + transaction_mgr = TransactionManager(profile) + await transaction_mgr.set_transaction_my_job( + record=conn_record, + transaction_my_job=TransactionJob.TRANSACTION_AUTHOR.name, + ) + + async with profile.session() as session: + value = await conn_record.metadata_get(session, "endorser_info") + if value: + value["endorser_did"] = endorser_did + value["endorser_name"] = endorser_alias + else: + value = {"endorser_did": endorser_did, "endorser_name": endorser_alias} + await conn_record.metadata_set(session, key="endorser_info", value=value) + + LOGGER.info( + "Successfully connected to endorser from invitation, and setup connection metadata." # noqa: E501 + ) + + except Exception: + LOGGER.info( + "Error accepting endorser invitation/configuring endorser connection" + ) diff --git a/aries_cloudagent/utils/tests/test_endorsement_setup.py b/aries_cloudagent/utils/tests/test_endorsement_setup.py new file mode 100644 index 0000000000..3e4f17de5b --- /dev/null +++ b/aries_cloudagent/utils/tests/test_endorsement_setup.py @@ -0,0 +1,64 @@ +from unittest import IsolatedAsyncioTestCase + +from aries_cloudagent.tests import mock + +from ...connections.models.conn_record import ConnRecord +from ...core.in_memory.profile import InMemoryProfile +from .. import endorsement_setup +from ..endorsement_setup import attempt_auto_author_with_endorser_setup + +mock_invitation = "http://localhost:9030?oob=eyJAdHlwZSI6ICJodHRwczovL2RpZGNvbW0ub3JnL291dC1vZi1iYW5kLzEuMS9pbnZpdGF0aW9uIiwgIkBpZCI6ICI2MWU1MmYzZS1kNTliLTQ3OWYtYmYwNC04NjJlOTk1MmM4MDYiLCAibGFiZWwiOiAiZW5kb3JzZXIiLCAiaGFuZHNoYWtlX3Byb3RvY29scyI6IFsiaHR0cHM6Ly9kaWRjb21tLm9yZy9kaWRleGNoYW5nZS8xLjAiXSwgInNlcnZpY2VzIjogW3siaWQiOiAiI2lubGluZSIsICJ0eXBlIjogImRpZC1jb21tdW5pY2F0aW9uIiwgInJlY2lwaWVudEtleXMiOiBbImRpZDprZXk6ejZNa2VkRDMyZlZmOG5ReG5SS2QzUmQ5S1hZQnVETEJiOHUyM1JWMm1ReFlpanR2I3o2TWtlZEQzMmZWZjhuUXhuUktkM1JkOUtYWUJ1RExCYjh1MjNSVjJtUXhZaWp0diJdLCAic2VydmljZUVuZHBvaW50IjogImh0dHA6Ly9sb2NhbGhvc3Q6OTAzMCJ9XX0=" + + +class MockConnRecord: + connection_id = "test-connection-id" + + +class TestEndorsementSetupUtil(IsolatedAsyncioTestCase): + def setUp(self) -> None: + self.profile = InMemoryProfile.test_profile() + + @mock.patch.object(endorsement_setup.LOGGER, "info", return_value=mock.MagicMock()) + async def test_not_enough_configs_for_connection(self, mock_logger): + await endorsement_setup.attempt_auto_author_with_endorser_setup(self.profile) + + # No invitation + self.profile.settings.set_value("endorser.author", True) + await endorsement_setup.attempt_auto_author_with_endorser_setup(self.profile) + + # No endorser alias + self.profile.settings.set_value("endorser.endorser_invitation", mock_invitation) + await endorsement_setup.attempt_auto_author_with_endorser_setup(self.profile) + + # No endorser DID + self.profile.settings.set_value("endorser.endorser_alias", "test-alias") + await endorsement_setup.attempt_auto_author_with_endorser_setup(self.profile) + + assert mock_logger.call_count == 3 + for call in mock_logger.call_args_list: + assert "Error accepting endorser invitation" not in call[0][0] + + @mock.patch.object(endorsement_setup.LOGGER, "info", return_value=mock.MagicMock()) + @mock.patch.object(endorsement_setup, "OutOfBandManager") + @mock.patch.object( + ConnRecord, + "retrieve_by_id", + return_value=ConnRecord(connection_id="test-connection-id"), + ) + async def test_create_connection_with_valid_invitation( + self, mock_conn_record, mock_oob_manager, mock_logger + ): + mock_oob_manager.return_value.receive_invitation = mock.CoroutineMock( + return_value=MockConnRecord() + ) + self.profile.settings.set_value("endorser.author", True) + self.profile.settings.set_value("endorser.endorser_invitation", mock_invitation) + self.profile.settings.set_value("endorser.endorser_alias", "test-alias") + self.profile.settings.set_value("endorser.endorser_public_did", "test-did") + + await attempt_auto_author_with_endorser_setup(self.profile) + + for call in mock_logger.call_args_list: + assert "Error accepting endorser invitation" not in call[0][0] + + assert mock_conn_record.called diff --git a/aries_cloudagent/vc/routes.py b/aries_cloudagent/vc/routes.py index 0676fe9f92..8a021223ae 100644 --- a/aries_cloudagent/vc/routes.py +++ b/aries_cloudagent/vc/routes.py @@ -75,7 +75,7 @@ async def issue_credential_route(request: web.BaseRequest): """ body = await request.json() context: AdminRequestContext = request["context"] - manager = context.inject(VcLdpManager) + manager = VcLdpManager(context.profile) try: credential = body["credential"] options = {} if "options" not in body else body["options"] @@ -120,7 +120,7 @@ async def verify_credential_route(request: web.BaseRequest): """ body = await request.json() context: AdminRequestContext = request["context"] - manager = context.inject(VcLdpManager) + manager = VcLdpManager(context.profile) try: vc = VerifiableCredential.deserialize(body["verifiableCredential"]) result = await manager.verify_credential(vc) @@ -147,7 +147,7 @@ async def store_credential_route(request: web.BaseRequest): """ body = await request.json() context: AdminRequestContext = request["context"] - manager = context.inject(VcLdpManager) + manager = VcLdpManager(context.profile) try: vc = body["verifiableCredential"] @@ -183,7 +183,7 @@ async def prove_presentation_route(request: web.BaseRequest): """ context: AdminRequestContext = request["context"] - manager = context.inject(VcLdpManager) + manager = VcLdpManager(context.profile) body = await request.json() try: presentation = body["presentation"] @@ -225,7 +225,7 @@ async def verify_presentation_route(request: web.BaseRequest): """ context: AdminRequestContext = request["context"] - manager = context.inject(VcLdpManager) + manager = VcLdpManager(context.profile) body = await request.json() try: vp = VerifiablePresentation.deserialize(body["verifiablePresentation"]) diff --git a/demo/features/0586-sign-transaction.feature b/demo/features/0586-sign-transaction.feature index 97626d2766..3a57b76e08 100644 --- a/demo/features/0586-sign-transaction.feature +++ b/demo/features/0586-sign-transaction.feature @@ -36,12 +36,11 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions @WalletType_Askar_AnonCreds @GHA Examples: | Acme_capabilities | Bob_capabilities | Schema_name | - | --wallet-type askar-anoncreds | --wallet-type askar-anoncreds | anoncreds-testing | | --wallet-type askar-anoncreds | | driverslicense | | | --wallet-type askar-anoncreds | anoncreds-testing | - @T001.1-RFC0586 @GHA + @T001.1-RFC0586 Scenario Outline: endorse a transaction and write to the ledger Given we have "2" agents | name | role | capabilities | @@ -102,6 +101,7 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions And "Bob" authors a revocation registry entry publishing transaction Then "Acme" can verify the credential from "Bob" was revoked + @GHA Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --revocation --public-did --did-exchange | --revocation --did-exchange | driverslicense | Data_DL_NormalizedValues | @@ -114,12 +114,12 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions | Acme_capabilities | Bob_capabilties | Schema_name | Credential_data | | --multitenant --multi-ledger --revocation --public-did | --multitenant --multi-ledger --revocation | driverslicense | Data_DL_NormalizedValues | - @WalletType_Askar_AnonCreds + @WalletType_Askar_AnonCreds @GHA Examples: | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | | --revocation --public-did --did-exchange | --revocation --did-exchange --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | - @T002.1-RFC0586 @GHA + @T002.1-RFC0586 Scenario Outline: endorse a schema and cred def transaction, write to the ledger, issue and revoke a credential, manually invoking each endorsement endpoint Given we have "2" agents | name | role | capabilities | @@ -196,6 +196,11 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions | --endorser-role endorser --revocation --public-did --multitenant | --endorser-role author --revocation --multitenant | driverslicense | Data_DL_NormalizedValues | | --endorser-role endorser --revocation --public-did --mediation --multitenant | --endorser-role author --revocation --mediation --multitenant | driverslicense | Data_DL_NormalizedValues | + @WalletType_Askar_AnonCreds + Examples: + | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | + | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | + @T003.1-RFC0586 @GHA Scenario Outline: endorse a schema and cred def transaction, write to the ledger, issue and revoke a credential, with auto endorsing workflow Given we have "2" agents @@ -224,26 +229,8 @@ Feature: RFC 0586 Aries sign (endorse) transactions functions | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation | driverslicense | Data_DL_NormalizedValues | | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation --multitenant | driverslicense | Data_DL_NormalizedValues | - @T003.2-RFC0586 @GHA - Scenario Outline: endorse a schema and cred def transaction, write to the ledger, and issue a credential, with auto endorsing workflow - Given we have "2" agents - | name | role | capabilities | - | Acme | endorser | | - | Bob | author | | - And "Acme" and "Bob" have an existing connection - When "Acme" has a DID with role "ENDORSER" - And "Acme" connection has job role "TRANSACTION_ENDORSER" - And "Bob" connection has job role "TRANSACTION_AUTHOR" - And "Bob" connection sets endorser info - And "Bob" has a DID with role "AUTHOR" - And "Bob" authors a schema transaction with - And "Bob" has written the schema to the ledger - And "Bob" authors a credential definition transaction with - And "Bob" has written the credential definition for to the ledger - And "Bob" has written the revocation registry definition to the ledger - And "Bob" has written the revocation registry entry transaction to the ledger - And "Acme" has an issued credential from "Bob" - + @WalletType_Askar_AnonCreds Examples: - | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | - | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | + | Acme_capabilities | Bob_capabilities | Schema_name | Credential_data | + | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | + | --endorser-role endorser --revocation --public-did | --endorser-role author --revocation --multitenant --wallet-type askar-anoncreds | anoncreds-testing | Data_AC_NormalizedValues | diff --git a/demo/features/steps/0586-sign-transaction.py b/demo/features/steps/0586-sign-transaction.py index c43d545062..e0786afc38 100644 --- a/demo/features/steps/0586-sign-transaction.py +++ b/demo/features/steps/0586-sign-transaction.py @@ -576,14 +576,13 @@ def step_impl(context, agent_name): ) context.cred_exchange = cred_exchange - # revoke the credential agent_container_POST( agent["agent"], "/revocation/revoke", data={ - "rev_reg_id": cred_exchange["indy"]["rev_reg_id"], "cred_rev_id": cred_exchange["indy"]["cred_rev_id"], "publish": False, + "rev_reg_id": cred_exchange["indy"]["rev_reg_id"], "connection_id": cred_exchange["cred_ex_record"]["connection_id"], }, ) @@ -611,16 +610,36 @@ def step_impl(context, agent_name): connection_id = agent["agent"].agent.connection_id # revoke the credential - agent_container_POST( - agent["agent"], - "/revocation/revoke", - data={ + if not is_anoncreds(agent): + data = { "rev_reg_id": cred_exchange["indy"]["rev_reg_id"], "cred_rev_id": cred_exchange["indy"]["cred_rev_id"], "publish": False, "connection_id": cred_exchange["cred_ex_record"]["connection_id"], - }, - params={"conn_id": connection_id, "create_transaction_for_endorser": "true"}, + } + params = { + "conn_id": connection_id, + "create_transaction_for_endorser": "true", + } + + else: + data = { + "cred_rev_id": cred_exchange["indy"]["cred_rev_id"], + "publish": False, + "rev_reg_id": cred_exchange["indy"]["rev_reg_id"], + "connection_id": cred_exchange["cred_ex_record"]["connection_id"], + "options": { + "endorser_connection_id": connection_id, + "create_transaction_for_endorser": "true", + }, + } + params = {} + + agent_container_POST( + agent["agent"], + "/revocation/revoke", + data=data, + params=params, ) # pause for a few seconds @@ -644,8 +663,8 @@ def step_impl(context, agent_name): ] } }, + params={}, ) - # check that rev reg entry was written assert "rrid2crid" in created_rev_reg @@ -662,21 +681,38 @@ def step_impl(context, agent_name): connection_id = agent["agent"].agent.connection_id # create rev_reg entry transaction - created_rev_reg = agent_container_POST( - agent["agent"], - "/revocation/publish-revocations", - data={ + if not is_anoncreds(agent): + data = { "rrid2crid": { context.cred_exchange["indy"]["rev_reg_id"]: [ context.cred_exchange["indy"]["cred_rev_id"] ] } - }, - params={"conn_id": connection_id, "create_transaction_for_endorser": "true"}, - ) + } + params = { + "conn_id": connection_id, + "create_transaction_for_endorser": "true", + } + else: + data = { + "rrid2crid": { + context.cred_exchange["indy"]["rev_reg_id"]: [ + context.cred_exchange["indy"]["cred_rev_id"] + ] + }, + "options": { + "endorser_connection_id": connection_id, + "create_transaction_for_endorser": "true", + }, + } + params = {} - # check that transaction request has been sent - assert created_rev_reg["txn"]["state"] == "request_sent" + agent_container_POST( + agent["agent"], + "/revocation/publish-revocations", + data=data, + params=params, + ) # pause for a few seconds async_sleep(3.0) diff --git a/docs/aca-py.org.md b/docs/aca-py.org.md new file mode 100644 index 0000000000..5c6c394712 --- /dev/null +++ b/docs/aca-py.org.md @@ -0,0 +1,29 @@ +# Welcome! + +![Hyperledger Aries](https://raw.githubusercontent.com/hyperledger/aries-acapy-docs/main/assets/Hyperledger_Aries_Logo_Color.png) + +Welcome to the Aries Cloud Agent Python documentation site. On this site you +will find documentation for recent releases of ACA-Py. You'll find a few of the +older versions of ACA-Py (pre-`0.8.0`), all versions since `0.8.0`, and the +`main` branch, which is the latest and greatest. + +All of the documentation here is extracted from the [Aries Cloud Agent Python repository]. +If you want to contribute to the documentation, please start there. + +Ready to go? Scan the tabs in the page header to find the documentation you need now! + +## Code Internals Documentation + +In addition to this documentation site, the ACA-Py community also maintains an +ACA-Py internals documentation site. The internals documentation consists of the +`docstrings` extracted from the ACA-Py Python code and covers all of the +(non-test) modules in the codebase. Check it out on the [Aries Cloud +Agent-Python ReadTheDocs site](https://aries-cloud-agent-python.readthedocs.io/en/latest/). +As with this site, the ReadTheDocs documentation is version specific. + +Got questions? + +- Join us on the [Hyperledger Discord Server](https://chat.hyperledger.org), in the `#aries-cloudagent-python` channel. +- Add an issue in the [Aries Cloud Agent Python repository]. + +[Aries Cloud Agent Python repository]: https://github.com/hyperledger/aries-cloudagent-python diff --git a/docs/assets/aries-favicon.png b/docs/assets/aries-favicon.png new file mode 100644 index 0000000000..5b10050e05 --- /dev/null +++ b/docs/assets/aries-favicon.png @@ -0,0 +1,89 @@ + + + + + + Page not found · GitHub Pages + + + + +
+ +

404

+

File not found

+ +

+ The site configured at this address does not + contain the requested file. +

+ +

+ If this is your site, make sure that the filename case matches the URL + as well as any file permissions.
+ For root URLs (like http://example.com/) you must provide an + index.html file. +

+ +

+ Read the full documentation + for more information about using GitHub Pages. +

+ + + + + + +
+ + diff --git a/docs/conf.py b/docs/conf.py index 5199e18ab5..336d7e620d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -57,6 +57,9 @@ "pytz", "multiformats", "sd_jwt", + "anoncreds", + "did_peer_2", + "did_peer_4", ] # "aries_cloudagent.tests.test_conductor", @@ -101,7 +104,7 @@ # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # -source_suffix = [".rst", ".md"] +source_suffix = [".rst"] # source_suffix = '.rst' # The master toctree document. diff --git a/docs/demo/ReusingAConnection.md b/docs/demo/ReusingAConnection.md new file mode 100644 index 0000000000..a28ba88c9b --- /dev/null +++ b/docs/demo/ReusingAConnection.md @@ -0,0 +1,139 @@ +# Reusing a Connection + +The Aries [RFC 0434 Out of Band] protocol enables the concept of reusing a +connection such that when using [RFC 0023 DID Exchange] to establish a +connection with an agent with which you already have a connection, you can reuse +the existing connection instead of creating a new one. This is something you +couldn't do a with the older [RFC 0160 Connection Protocol] that we used in the +early days of Aries. It was a pain, and made for a lousy user experience, as on +every visit to an existing contact, the invitee got a new connection. + +The requirements on your invitations (such as in the example below) are: + +- The invitation `services` item **MUST** be a resolvable DID. + - Or alternatively, the invitation `services` item **MUST NOT** be an `inline` service. +- The DID in the invitation `services` item is the same one in every invitation. + +Example invitation: + +```jsonc +{ + "@type": "https://didcomm.org/out-of-band/1.1/invitation", + "@id": "77489d63-caff-41fe-a4c1-ec7e2ff00695", + "label": "faber.agent", + "handshake_protocols": [ + "https://didcomm.org/didexchange/1.0" + ], + "services": [ + "did:sov:4JiUsoK85pVkkB1bAPzFaP" + ] +} +``` + +[RFC 0434 Out of Band]: https://github.com/hyperledger/aries-rfcs/tree/main/features/0434-outofband +[RFC 0023 DID Exchange]: https://github.com/hyperledger/aries-rfcs/tree/main/features/0023-did-exchange +[RFC 0160 Connection Protocol]: https://github.com/hyperledger/aries-rfcs/tree/main/features/0160-connection-protocol +[RFC 0434 Out of Band invitation]: https://github.com/hyperledger/aries-rfcs/tree/main/features/0434-outofband#invitation-httpsdidcommorgout-of-bandverinvitation +[RFC 0023 DID Exchange request]: https://github.com/hyperledger/aries-rfcs/tree/main/features/0023-did-exchange#1-exchange-request +[RFC 0434 Out of Band reuse]: https://github.com/hyperledger/aries-rfcs/tree/main/features/0434-outofband#reuse-messages + +Here's the flow that demonstrates where reuse helps. For simplicity, we'll use the terms "Issuer" +and "Wallet" in this example, but it applies to any connection between any two +agents (the inviter and the invitee) that establish connections with one another. + +- The Wallet user is using a browser on the Issuers website and gets to the + point where they are going to be offered a credential. As part of that flow, + they are presented with a QR code that they scan with their wallet app. +- The QR contains an [RFC 0434 Out of Band invitation] to connect that the + Wallet processes as the *invitee*. +- The Wallet uses the information in the invitation to send an [RFC 0023 DID Exchange request] + DIDComm message back to the Issuer to initiate establishing a connection. +- The Issuer responds back to the `request` with a `response` message, and the + connection is established. +- Later, the Wallet user returns to the Issuer's website, and does something + (perhaps starts the process to get another credential) that results in the + same QR code being displayed, and again the users scans the QR code with their + Wallet app. +- The Wallet recognizes (based on the DID in the `services` item in the + invitation -- see example below) that it already has a connection to the + Issuer, so instead of sending a DID Exchange `request` message back to the + Issuer, they send an [RFC 0434 Out of Band reuse] DIDComm message, and both + parties know to use the existing connection. + - Had the Wallet used the DID Exchange `request` message, a new connection + would have been established. + +The [RFC 0434 Out of Band] protocol requirement enables `reuse` message by the +invitee (the Wallet in the flow above) is that the `service` in the invitation +**MUST** be a resolvable DID that is the same in all of the invitations. In the +example invitation above, the DID is a `did:sov` DID that is resolvable on a public +Hyperledger Indy network. The DID could also be a [Peer DID] of types 2 or 4, +which encode the entire DIDDoc contents into the DID identifier (thus they are +"resolvable DIDs"). What cannot be used is either the old "unqualified" DIDs +that were commonly used in Aries prior to 2024, and [Peer DID] type 1. Both of +those have DID types include both an identifier and a DIDDoc in the `services` +item of the Out of Band invitation. As noted in the Out of Band specification, +`reuse` cannot be used with such DID types even if the contents are the same. + +[Peer DID]: https://identity.foundation/peer-did-method-spec/ + +Example invitation: + +```jsonc +{ + "@type": "https://didcomm.org/out-of-band/1.1/invitation", + "@id": "77489d63-caff-41fe-a4c1-ec7e2ff00695", + "label": "faber.agent", + "handshake_protocols": [ + "https://didcomm.org/didexchange/1.0" + ], + "services": [ + "did:sov:4JiUsoK85pVkkB1bAPzFaP" + ] +} +``` + +The use of conenction reuse can be demonstrated with the Alice / Faber demos as +follows. We assume you have already somewhat familiar with your options for +running the [Alice Faber Demo] (e.g. locally or in a browser). Follow those +instruction up to the point where you are about to start the Faber and Alice agents. + +[Alice Faber Demo]: ./README.md + +1. On a command line, run Faber with these parameters: `./run_demo faber + --reuse-connection --events`. +2. On a second command line, run Alice as normal, perhaps with the `events` + option: `./run_demo alice --events` +3. Copy the invitation from the Faber terminal and paste it into the Alice + terminal at the prompt. +4. Verify that the connection was established. + 1. If you want, go to the Alice OpenAPI screen (port `8031`, path + `api/docs`), and then use the `GET Connections` to see that Alice has one + connection to Faber. +5. In the Alice terminal, type `4` to get a prompt for a new connection, and + paste the same invitation as in Step 3 (above). +6. Note from the webhook events in the Faber terminal that the `reuse` message + is received from Alice, and as a result, no new connection was created. + 1. Execute again the `GET Connections` endpoint on the Alice OpenAPI screen + to confirm that there is still just one established connection. +7. In the Faber terminal, type `4` to get a new invitation, copy the invitation, + in the Alice terminal, type `4` to get prompted for an invitation, and paste + in the new invitation from Faber. Again, the `reuse` webhook event will be + visible in the Faber terminal. + 1. Execute again the `GET Connections` endpoint on the Alice OpenAPI screen + to confirm that there is still just one established connection. + 2. Notice that in the invitations in Step 3 and 7 both have the same DID in + the `services`. +8. Try running the demo again **without** the `--reuse-connection` parameter and + compare the `services` value in the new invitation vs. what was generated in + Steps 3 and 7. It is not a DID, but rather a one time use, inline DIDDoc + item. + +While in the demo Faber uses in the invitation the same DID they publish as an +issuer (and uses in creating the schema and Cred Def for the demo), Faber could +use any *resolvable* (not inline) DID, including DID Peer types 2 or 4 DIDs, as +long as the DID is the same in every invitation. It is the fact that the DID is +always the same that tells the invitee that they can reuse an existing connection. + +Note that the invitation does **NOT** have to be a multi-use invitation for +reuse to be useful, as long as the other requirements (at the top of this +document) are met. diff --git a/docs/features/DIDResolution.md b/docs/features/DIDResolution.md index cd84ecbc91..30e8f9210a 100644 --- a/docs/features/DIDResolution.md +++ b/docs/features/DIDResolution.md @@ -176,7 +176,7 @@ plugin: The following is a fully functional Dockerfile encapsulating this setup: ```dockerfile= -FROM ghcr.io/hyperledger/aries-cloudagent-python:py3.9-0.12.0rc0 +FROM ghcr.io/hyperledger/aries-cloudagent-python:py3.9-0.12.0rc1 RUN pip3 install git+https://github.com/dbluhm/acapy-resolver-github CMD ["aca-py", "start", "-it", "http", "0.0.0.0", "3000", "-ot", "http", "-e", "http://localhost:3000", "--admin", "0.0.0.0", "3001", "--admin-insecure-mode", "--no-ledger", "--plugin", "acapy_resolver_github"] diff --git a/docs/features/Multiledger.md b/docs/features/Multiledger.md index db70a3e42b..8568dd24a7 100644 --- a/docs/features/Multiledger.md +++ b/docs/features/Multiledger.md @@ -16,6 +16,7 @@ More background information including problem statement, design (algorithm) and - [Write Requests](#write-requests) - [A Special Warning for TAA Acceptance](#a-special-warning-for-taa-acceptance) - [Impact on other ACA-Py function](#impact-on-other-aca-py-function) +- [Known Issues](#known-issues) ## Usage @@ -207,3 +208,8 @@ These changes are made here: - `./aries_cloudagent/protocols/trustping/v1_0/routes.py` - `./aries_cloudagent/resolver/routes.py` - `./aries_cloudagent/revocation/routes.py` + + +## Known Issues + +* When in multi-ledger mode and switching ledgers (e.g.: the agent is registered on Ledger A and has published its DID there, and now wants to "move" to Ledger B) there is an [issue](https://github.com/hyperledger/aries-cloudagent-python/issues/2473) that will cause the registration to the new ledger to fail. \ No newline at end of file diff --git a/docs/features/SupportedRFCs.md b/docs/features/SupportedRFCs.md index 245f881cea..022999fcf1 100644 --- a/docs/features/SupportedRFCs.md +++ b/docs/features/SupportedRFCs.md @@ -8,7 +8,7 @@ ACA-Py or the repository `main` branch. Reminders (and PRs!) to update this page welcome! If you have any questions, please contact us on the #aries channel on [Hyperledger Discord](https://discord.gg/hyperledger) or through an issue in this repo. -**Last Update**: 2024-01-17, Release 0.12.0rc0 +**Last Update**: 2024-02-17, Release 0.12.0rc1 > The checklist version of this document was created as a joint effort > between [Northern Block](https://northernblock.io/), [Animo Solutions](https://animo.id/) and the Ontario government, on behalf of the Ontario government. @@ -129,6 +129,8 @@ are fully supported in ACA-Py **EXCEPT** as noted in the table below. | [0587-encryption-envelope-v2](https://github.com/hyperledger/aries-rfcs/tree/b3a3942ef052039e73cd23d847f42947f8287da2/features/0587-encryption-envelope-v2) | :construction: | Supporting the DIDComm v2 encryption envelope does not make sense until DIDComm v2 is to be supported. | | [0317-please-ack](https://github.com/hyperledger/aries-rfcs/tree/main/features/0317-please-ack) | :x: | An investigation was done into supporting `please-ack` and a number of complications were found. As a result, we expect that `please-ack` will be dropped from AIP 2.0. It has not been implemented by any Aries frameworks or deployments. | +There is a [PR to the Aries RFCs repository](https://github.com/hyperledger/aries-rfcs/pull/814) to remove those RFCs from AIP 2.0. If that PR is removed, the RFCs will be removed from the table above. + ### Other Supported RFCs | RFC | Supported | Notes | diff --git a/mkdocs-requirements.txt b/mkdocs-requirements.txt new file mode 100644 index 0000000000..a64b578cf6 --- /dev/null +++ b/mkdocs-requirements.txt @@ -0,0 +1,3 @@ + +mkdocs-material==9.5.10 +mike==2.0.0 diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 0000000000..1c9a9b0667 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,150 @@ +site_name: Hyperledger Aries ACA-Py Docs +repo_name: hyperledger/aries-cloudagent-python +repo_url: https://github.com/hyperledger/aries-cloudagent-python +theme: + name: material + custom_dir: overrides + logo: https://raw.githubusercontent.com/hyperledger/aries-acapy-docs/main/assets/Hyperledger_Aries_Logo_White.png + favicon: https://raw.githubusercontent.com/hyperledger/aries-cloudagent-python/main/docs/assets/aries-favicon.png + icon: + repo: fontawesome/brands/github + palette: + # Palette toggle for light mode + - media: "(prefers-color-scheme: light)" + scheme: default + toggle: + icon: material/brightness-7 + name: Switch to dark mode + # Palette toggle for dark mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + toggle: + icon: material/brightness-4 + name: Switch to light mode + features: + - content.code.copy + - navigation.expand + - navigation.footer + - navigation.instant + - navigation.tabs + - navigation.tabs.sticky + - navigation.top + - navigation.tracking + - toc.follow +# - toc.integrate +markdown_extensions: + - abbr + - admonition + - attr_list + - def_list + - footnotes + - md_in_html + - toc: + permalink: true + toc_depth: 3 + - pymdownx.arithmatex: + generic: true + - pymdownx.betterem: + smart_enable: all + - pymdownx.caret + - pymdownx.details + - pymdownx.emoji: + emoji_generator: !!python/name:materialx.emoji.to_svg + emoji_index: !!python/name:materialx.emoji.twemoji + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.inlinehilite + - pymdownx.keys + - pymdownx.magiclink: + repo_url_shorthand: true + user: squidfunk + repo: mkdocs-material + - pymdownx.mark + - pymdownx.smartsymbols + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - pymdownx.tabbed: + alternate_style: true + - pymdownx.tasklist: + custom_checkbox: true + - pymdownx.tilde +plugins: + - search + - mike +extra: + version: + provider: mike +nav: +- Welcome!: + - Welcome: aca-py.org.md + - ACA-Py README: README.md + - Release Notes: CHANGELOG.md +- Features: + - Developer Introduction: features/DevReadMe.md + - DevContainer Support: features/devcontainer.md + - Supported Aries Interop Profiles and RFCs: features/SupportedRFCs.md + - The Admin API: features/AdminAPI.md + - ACA-Py Plugins: features/PlugIns.md + - Multitenant ACA-Py: features/Multitenancy.md + - DID Methods: features/DIDMethods.md + - DID Resolution: features/DIDResolution.md + - Configuring Multiple Indy Ledgers: features/Multiledger.md + - Automatically Endorsing Indy Transations: features/Endorser.md + - Using W3C JSON-LD Signed Credentials: features/JsonLdCredentials.md + - Using SD-JWTs: features/SelectiveDisclosureJWTs.md + - AnonCreds Presentation Validation: features/AnoncredsProofValidation.md + - Multiple Credential Types: features/Multicredentials.md + - Code Generation with the Open API: features/UsingOpenAPI.md + - ACA-Py as a DIDComm Mediator: features/Mediation.md +- Demos: + - The Alice-Faber Demo: demo/README.md + - Open API Tutorial: demo/AriesOpenAPIDemo.md + - Alice Gets a Phone: demo/AliceGetsAPhone.md + - Hyperledger Indy Endorser In Action: demo/Endorser.md + - Using W3C JSON-LD Credentials: demo/AliceWantsAJsonCredential.md + - DIY -- ACME Controller Workshop: demo/AcmeDemoWorkshop.md + - Aries Using Postman Demo: demo/AriesPostmanDemo.md +- Getting Started: + - Becoming an Indy/Aries Developer: gettingStarted/README.md + - Hyperledger Indy Basics: gettingStarted/IndyBasics.md + - Hyperledger Aries Basics: gettingStarted/AriesBasics.md + - Decentralized Identity Demos: gettingStarted/DecentralizedIdentityDemos.md + - Aries - The Big Picture: gettingStarted/AriesBigPicture.md + - Aries Architecture: gettingStarted/AriesAgentArchitecture.md + - Aries Messaging: gettingStarted/AriesMessaging.md + - Aries Developer Demos: gettingStarted/AriesDeveloperDemos.md + - Agent Connections: gettingStarted/AgentConnections.md + - Issuing AnonCreds Credentials: gettingStarted/IssuingAnonCredsCredentials.md + - Presenting AnonCreds Proofs: gettingStarted/PresentingAnonCredsProofs.md + - Making Your Own ACA-Py Agent: gettingStarted/YourOwnAriesAgent.md + - Aries Developer Options: gettingStarted/IndyAriesDevOptions.md + - DIDComm Messaging: gettingStarted/DIDcommMsgs.md + - DIDComm Message Routing: gettingStarted/RoutingEncryption.md + - DIDComm Message Routing Example: gettingStarted/AriesRoutingExample.md + - TODO Connecting to an Indy Network: gettingStarted/ConnectIndyNetwork.md + - AnonCreds Credential Revocation: gettingStarted/CredentialRevocation.md +- Deploying: + - Deployment Model: deploying/deploymentModel.md + - Upgrading ACA-Py: deploying/UpgradingACA-Py.md + - Indy SDK to Askar Migration: deploying/IndySDKtoAskarMigration.md + - The Use of Poetry in ACA-Py: deploying/Poetry.md + - ACA-Py Container Images: deploying/ContainerImagesAndGithubActions.md + - Databases: deploying/Databases.md + - Persistent Queues and Caching: deploying/RedisPlugins.md + - The askar-anoncreds Wallet Type: deploying/AnonCredsWalletType.md +- Testing/Troubleshooting: + - Running and Creating Unit Tests: testing/UnitTests.md + - Managing Logging: testing/Logging.md + - ACA-Py Integration Tests: testing/INTEGRATION-TESTS.md + - Protocol Tracing: testing/AgentTracing.md + - Troubleshooting: testing/Troubleshooting.md +- Contributing: + - How to Contribute: CONTRIBUTING.md + - Maintainers: MAINTAINERS.md + - Hyperledger Code of Conduct: CODE_OF_CONDUCT.md + - Security Vulnerability Reporting: SECURITY.md + - Publishing an ACA-Py Release: PUBLISHING.md + - Updating the ACA-Py ReadTheDocs Site: UpdateRTD.md diff --git a/open-api/openapi.json b/open-api/openapi.json index 5840bb0df0..c83a1fe881 100644 --- a/open-api/openapi.json +++ b/open-api/openapi.json @@ -2,7 +2,7 @@ "openapi" : "3.0.1", "info" : { "title" : "Aries Cloud Agent", - "version" : "v0.12.0rc0" + "version" : "v0.12.0rc1" }, "servers" : [ { "url" : "/" @@ -89,20 +89,6 @@ "url" : "https://tools.ietf.org/html/rfc7515" }, "name" : "jsonld" - }, { - "description" : "Issue and verify LDP VCs and VPs", - "externalDocs" : { - "description" : "Specification", - "url" : "https://www.w3.org/TR/vc-data-model/" - }, - "name" : "ldp-vc" - }, { - "description" : "Manage credentials and presentations", - "externalDocs" : { - "description" : "Specification", - "url" : "https://w3c-ccg.github.io/vc-api" - }, - "name" : "vc-api" }, { "description" : "Interaction with ledger", "externalDocs" : { @@ -172,6 +158,13 @@ "url" : "https://github.com/hyperledger/aries-rfcs/tree/527849ec3aa2a8fd47a7bb6c57f918ff8bcb5e8c/features/0048-trust-ping" }, "name" : "trustping" + }, { + "description" : "Endpoints for managing w3c credentials and presentations", + "externalDocs" : { + "description" : "Specification", + "url" : "https://w3c-ccg.github.io/vc-api/" + }, + "name" : "vc-api" }, { "description" : "DID and tag policy management", "externalDocs" : { @@ -383,7 +376,7 @@ "in" : "query", "name" : "state", "schema" : { - "enum" : [ "request", "abandoned", "active", "init", "error", "invitation", "start", "response", "completed" ], + "enum" : [ "abandoned", "invitation", "active", "response", "request", "start", "error", "completed", "init" ], "type" : "string" } }, { @@ -554,7 +547,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/StoredConnRecord" + "$ref" : "#/components/schemas/ConnRecord" } } }, @@ -607,7 +600,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/StoredConnRecord" + "$ref" : "#/components/schemas/ConnRecord" } } }, @@ -657,7 +650,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/StoredConnRecord" + "$ref" : "#/components/schemas/ConnRecord" } } }, @@ -692,7 +685,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/StoredConnRecord" + "$ref" : "#/components/schemas/ConnRecord" } } }, @@ -1430,7 +1423,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/StoredConnRecord" + "$ref" : "#/components/schemas/ConnRecord" } } }, @@ -1489,7 +1482,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/StoredConnRecord" + "$ref" : "#/components/schemas/ConnRecord" } } }, @@ -1532,7 +1525,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/StoredConnRecord" + "$ref" : "#/components/schemas/ConnRecord" } } }, @@ -1582,7 +1575,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/StoredConnRecord" + "$ref" : "#/components/schemas/ConnRecord" } } }, @@ -1619,7 +1612,7 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/StoredConnRecord" + "$ref" : "#/components/schemas/ConnRecord" } } }, @@ -5656,13 +5649,6 @@ "schema" : { "type" : "string" } - }, { - "description" : "Endorser will write the transaction after endorsing it", - "in" : "query", - "name" : "endorser_write_txn", - "schema" : { - "type" : "boolean" - } } ], "requestBody" : { "content" : { @@ -5910,13 +5896,121 @@ "tags" : [ "endorse-transaction" ] } }, - "/vc/ldp/issue" : { + "/vc/credentials" : { + "get" : { + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/ListCredentialsResponse" + } + } + }, + "description" : "" + } + }, + "summary" : "List credentials", + "tags" : [ "vc-api" ] + } + }, + "/vc/credentials/issue" : { + "post" : { + "requestBody" : { + "content" : { + "*/*" : { + "schema" : { + "$ref" : "#/components/schemas/IssueCredentialRequest" + } + } + }, + "required" : false + }, + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/IssueCredentialResponse" + } + } + }, + "description" : "" + } + }, + "summary" : "Issue a credential", + "tags" : [ "vc-api" ], + "x-codegen-request-body-name" : "body" + } + }, + "/vc/credentials/store" : { + "post" : { + "summary" : "Store a credential", + "tags" : [ "vc-api" ] + } + }, + "/vc/credentials/verify" : { + "post" : { + "requestBody" : { + "content" : { + "*/*" : { + "schema" : { + "$ref" : "#/components/schemas/VerifyCredentialRequest" + } + } + }, + "required" : false + }, + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/VerifyCredentialResponse" + } + } + }, + "description" : "" + } + }, + "summary" : "Verify a credential", + "tags" : [ "vc-api" ], + "x-codegen-request-body-name" : "body" + } + }, + "/vc/credentials/{credential_id}" : { + "get" : { + "parameters" : [ { + "in" : "path", + "name" : "credential_id", + "required" : true, + "schema" : { + "type" : "string" + } + } ], + "responses" : { + "200" : { + "content" : { + "application/json" : { + "schema" : { + "$ref" : "#/components/schemas/FetchCredentialResponse" + } + } + }, + "description" : "" + } + }, + "summary" : "Fetch credential by ID", + "tags" : [ "vc-api" ] + } + }, + "/vc/presentations/prove" : { "post" : { "requestBody" : { "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/LdpIssueRequest" + "$ref" : "#/components/schemas/ProvePresentationRequest" } } }, @@ -5927,25 +6021,25 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/LdpIssueResponse" + "$ref" : "#/components/schemas/ProvePresentationResponse" } } }, "description" : "" } }, - "summary" : "Sign an LDP VC.", - "tags" : [ "ldp_vc" ], + "summary" : "Prove a presentation", + "tags" : [ "vc-api" ], "x-codegen-request-body-name" : "body" } }, - "/vc/ldp/verify" : { + "/vc/presentations/verify" : { "post" : { "requestBody" : { "content" : { "*/*" : { "schema" : { - "$ref" : "#/components/schemas/LdpVerifyRequest" + "$ref" : "#/components/schemas/VerifyPresentationRequest" } } }, @@ -5956,15 +6050,15 @@ "content" : { "application/json" : { "schema" : { - "$ref" : "#/components/schemas/LdpVerifyResponse" + "$ref" : "#/components/schemas/VerifyPresentationResponse" } } }, "description" : "" } }, - "summary" : "Verify an LDP VC or VP.", - "tags" : [ "ldp_vc" ], + "summary" : "Verify a Presentation", + "tags" : [ "vc-api" ], "x-codegen-request-body-name" : "body" } }, @@ -6671,6 +6765,117 @@ }, "type" : "object" }, + "ConnRecord" : { + "properties" : { + "accept" : { + "description" : "Connection acceptance: manual or auto", + "enum" : [ "manual", "auto" ], + "example" : "auto", + "type" : "string" + }, + "alias" : { + "description" : "Optional alias to apply to connection for later use", + "example" : "Bob, providing quotes", + "type" : "string" + }, + "connection_id" : { + "description" : "Connection identifier", + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "type" : "string" + }, + "connection_protocol" : { + "description" : "Connection protocol used", + "enum" : [ "connections/1.0", "didexchange/1.0" ], + "example" : "connections/1.0", + "type" : "string" + }, + "created_at" : { + "description" : "Time of record creation", + "example" : "2021-12-31T23:59:59Z", + "pattern" : "^\\d{4}-\\d\\d-\\d\\d[T ]\\d\\d:\\d\\d(?:\\:(?:\\d\\d(?:\\.\\d{1,6})?))?(?:[+-]\\d\\d:?\\d\\d|Z|)$", + "type" : "string" + }, + "error_msg" : { + "description" : "Error message", + "example" : "No DIDDoc provided; cannot connect to public DID", + "type" : "string" + }, + "inbound_connection_id" : { + "description" : "Inbound routing connection id to use", + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "type" : "string" + }, + "invitation_key" : { + "description" : "Public key for connection", + "example" : "H3C2AVvLMv6gmMNam3uVAjZpfkcJCwDwnZn6z3wXmqPV", + "pattern" : "^[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{43,44}$", + "type" : "string" + }, + "invitation_mode" : { + "description" : "Invitation mode", + "enum" : [ "once", "multi", "static" ], + "example" : "once", + "type" : "string" + }, + "invitation_msg_id" : { + "description" : "ID of out-of-band invitation message", + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "type" : "string" + }, + "my_did" : { + "description" : "Our DID for connection", + "example" : "did:peer:WgWxqztrNooG92RXvxSTWv", + "pattern" : "^(did:sov:)?[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}$|^did:([a-zA-Z0-9_]+):([a-zA-Z0-9_.%-]+(:[a-zA-Z0-9_.%-]+)*)((;[a-zA-Z0-9_.:%-]+=[a-zA-Z0-9_.:%-]*)*)(\\/[^#?]*)?([?][^#]*)?(\\#.*)?$$", + "type" : "string" + }, + "request_id" : { + "description" : "Connection request identifier", + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "type" : "string" + }, + "rfc23_state" : { + "description" : "State per RFC 23", + "example" : "invitation-sent", + "readOnly" : true, + "type" : "string" + }, + "state" : { + "description" : "Current record state", + "example" : "active", + "type" : "string" + }, + "their_did" : { + "description" : "Their DID for connection", + "example" : "did:peer:WgWxqztrNooG92RXvxSTWv", + "pattern" : "^(did:sov:)?[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}$|^did:([a-zA-Z0-9_]+):([a-zA-Z0-9_.%-]+(:[a-zA-Z0-9_.%-]+)*)((;[a-zA-Z0-9_.:%-]+=[a-zA-Z0-9_.:%-]*)*)(\\/[^#?]*)?([?][^#]*)?(\\#.*)?$$", + "type" : "string" + }, + "their_label" : { + "description" : "Their label for connection", + "example" : "Bob", + "type" : "string" + }, + "their_public_did" : { + "description" : "Other agent's public DID for connection", + "example" : "2cpBmR3FqGKWi5EyUbpRY8", + "type" : "string" + }, + "their_role" : { + "description" : "Their role in the connection protocol", + "enum" : [ "invitee", "requester", "inviter", "responder" ], + "example" : "requester", + "type" : "string" + }, + "updated_at" : { + "description" : "Time of last record update", + "example" : "2021-12-31T23:59:59Z", + "pattern" : "^\\d{4}-\\d\\d-\\d\\d[T ]\\d\\d:\\d\\d(?:\\:(?:\\d\\d(?:\\.\\d{1,6})?))?(?:[+-]\\d\\d:?\\d\\d|Z|)$", + "type" : "string" + } + }, + "required" : [ "connection_id" ], + "type" : "object" + }, "ConnectionInvitation" : { "properties" : { "@id" : { @@ -6735,7 +6940,7 @@ "results" : { "description" : "List of connection records", "items" : { - "$ref" : "#/components/schemas/StoredConnRecord" + "$ref" : "#/components/schemas/ConnRecord" }, "type" : "array" } @@ -6831,7 +7036,7 @@ "type" : "string" }, "record" : { - "$ref" : "#/components/schemas/StoredConnRecord" + "$ref" : "#/components/schemas/ConnRecord" }, "their_did" : { "description" : "Remote DID", @@ -7228,6 +7433,10 @@ }, "type" : "array" }, + "credentialStatus" : { + "example" : "", + "type" : "object" + }, "credentialSubject" : { "example" : "", "type" : "object" @@ -7923,6 +8132,14 @@ }, "type" : "object" }, + "FetchCredentialResponse" : { + "properties" : { + "results" : { + "$ref" : "#/components/schemas/VerifiableCredential" + } + }, + "type" : "object" + }, "Filter" : { "properties" : { "const" : { @@ -8158,9 +8375,8 @@ "type" : "string" }, "prover_did" : { - "description" : "Prover DID", - "example" : "WgWxqztrNooG92RXvxSTWv", - "pattern" : "^(did:sov:)?[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}$", + "description" : "Prover DID/Random String/UUID", + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type" : "string" } }, @@ -9282,34 +9498,74 @@ "required" : [ "connection_id", "invitation", "invitation_url" ], "type" : "object" }, - "IssueCredentialModuleResponse" : { - "type" : "object" - }, - "IssuerCredRevRecord" : { + "IssuanceOptions" : { "properties" : { - "created_at" : { - "description" : "Time of record creation", - "example" : "2021-12-31T23:59:59Z", - "pattern" : "^\\d{4}-\\d\\d-\\d\\d[T ]\\d\\d:\\d\\d(?:\\:(?:\\d\\d(?:\\.\\d{1,6})?))?(?:[+-]\\d\\d:?\\d\\d|Z|)$", + "challenge" : { + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type" : "string" }, - "cred_def_id" : { - "description" : "Credential definition identifier", - "example" : "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag", - "pattern" : "^([123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}):3:CL:(([1-9][0-9]*)|([123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}:2:.+:[0-9.]+)):(.+)?$", + "created" : { + "example" : "2010-01-01T19:23:24Z", "type" : "string" }, - "cred_ex_id" : { - "description" : "Credential exchange record identifier at credential issue", - "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "domain" : { + "example" : "website.example", "type" : "string" }, - "cred_ex_version" : { - "description" : "Credential exchange version", + "type" : { + "example" : "Ed25519Signature2020", "type" : "string" - }, - "cred_rev_id" : { - "description" : "Credential revocation identifier", + } + }, + "type" : "object" + }, + "IssueCredentialModuleResponse" : { + "type" : "object" + }, + "IssueCredentialRequest" : { + "properties" : { + "credential" : { + "$ref" : "#/components/schemas/Credential" + }, + "options" : { + "$ref" : "#/components/schemas/IssuanceOptions" + } + }, + "type" : "object" + }, + "IssueCredentialResponse" : { + "properties" : { + "verifiableCredential" : { + "$ref" : "#/components/schemas/VerifiableCredential" + } + }, + "type" : "object" + }, + "IssuerCredRevRecord" : { + "properties" : { + "created_at" : { + "description" : "Time of record creation", + "example" : "2021-12-31T23:59:59Z", + "pattern" : "^\\d{4}-\\d\\d-\\d\\d[T ]\\d\\d:\\d\\d(?:\\:(?:\\d\\d(?:\\.\\d{1,6})?))?(?:[+-]\\d\\d:?\\d\\d|Z|)$", + "type" : "string" + }, + "cred_def_id" : { + "description" : "Credential definition identifier", + "example" : "WgWxqztrNooG92RXvxSTWv:3:CL:20:tag", + "pattern" : "^([123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}):3:CL:(([1-9][0-9]*)|([123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}:2:.+:[0-9.]+)):(.+)?$", + "type" : "string" + }, + "cred_ex_id" : { + "description" : "Credential exchange record identifier at credential issue", + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "type" : "string" + }, + "cred_ex_version" : { + "description" : "Credential exchange version", + "type" : "string" + }, + "cred_rev_id" : { + "description" : "Credential revocation identifier", "example" : "12345", "pattern" : "^[1-9][0-9]*$", "type" : "string" @@ -9665,63 +9921,6 @@ }, "type" : "object" }, - "LdpIssueRequest" : { - "properties" : { - "credential" : { - "$ref" : "#/components/schemas/Credential" - }, - "options" : { - "$ref" : "#/components/schemas/LDProofVCOptions" - } - }, - "type" : "object" - }, - "LdpIssueResponse" : { - "properties" : { - "vc" : { - "$ref" : "#/components/schemas/VerifiableCredential" - } - }, - "type" : "object" - }, - "LdpVerifyRequest" : { - "properties" : { - "options" : { - "$ref" : "#/components/schemas/LDProofVCOptions" - }, - "vc" : { - "$ref" : "#/components/schemas/VerifiableCredential" - }, - "vp" : { - "$ref" : "#/components/schemas/VerifiableCredential" - } - }, - "type" : "object" - }, - "LdpVerifyResponse" : { - "properties" : { - "credential_results" : { - "items" : { - "$ref" : "#/components/schemas/DocumentVerificationResult" - }, - "type" : "array" - }, - "errors" : { - "items" : { - "type" : "string" - }, - "type" : "array" - }, - "presentation_result" : { - "$ref" : "#/components/schemas/DocumentVerificationResult" - }, - "verified" : { - "type" : "boolean" - } - }, - "required" : [ "verified" ], - "type" : "object" - }, "LedgerConfigInstance" : { "properties" : { "genesis_file" : { @@ -9777,7 +9976,7 @@ }, "domain" : { "description" : "A string value specifying the restricted domain of the signature.", - "example" : "example.com", + "example" : "https://example.com", "pattern" : "\\w+:(\\/?\\/?)[^\\s]+", "type" : "string" }, @@ -9816,6 +10015,9 @@ "required" : [ "created", "proofPurpose", "type", "verificationMethod" ], "type" : "object" }, + "ListCredentialsResponse" : { + "type" : "object" + }, "MediationCreateRequest" : { "type" : "object" }, @@ -10130,6 +10332,11 @@ "invitation" : { "$ref" : "#/components/schemas/InvitationRecord_invitation" }, + "multi_use" : { + "description" : "Allow for multiple uses of the oobinvitation", + "example" : true, + "type" : "boolean" + }, "oob_id" : { "description" : "Oob record identifier", "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", @@ -10206,6 +10413,48 @@ }, "type" : "object" }, + "Presentation" : { + "properties" : { + "@context" : { + "description" : "The JSON-LD context of the presentation", + "example" : [ "https://www.w3.org/2018/credentials/v1" ], + "items" : { + "type" : "object" + }, + "type" : "array" + }, + "holder" : { + "description" : "The JSON-LD Verifiable Credential Holder. Either string of object with id field.", + "example" : "did:key:z6MkpTHR8VNsBxYAAWHut2Geadd9jSwuBV8xRoAnwWsdvktH", + "type" : "object" + }, + "id" : { + "example" : "http://example.edu/presentations/1872", + "pattern" : "\\w+:(\\/?\\/?)[^\\s]+", + "type" : "string" + }, + "proof" : { + "$ref" : "#/components/schemas/Presentation_proof" + }, + "type" : { + "description" : "The JSON-LD type of the presentation", + "example" : [ "VerifiablePresentation" ], + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "verifiableCredential" : { + "items" : { + "properties" : { }, + "type" : "object" + }, + "type" : "array" + } + }, + "required" : [ "@context", "type" ], + "type" : "object" + }, "PresentationDefinition" : { "properties" : { "format" : { @@ -10293,6 +10542,30 @@ "required" : [ "request_presentations~attach" ], "type" : "object" }, + "PresentationVerificationResult" : { + "properties" : { + "credential_results" : { + "items" : { + "$ref" : "#/components/schemas/DocumentVerificationResult" + }, + "type" : "array" + }, + "errors" : { + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "presentation_result" : { + "$ref" : "#/components/schemas/DocumentVerificationResult" + }, + "verified" : { + "type" : "boolean" + } + }, + "required" : [ "verified" ], + "type" : "object" + }, "ProfileSettings" : { "properties" : { "settings" : { @@ -10345,6 +10618,25 @@ "required" : [ "pid" ], "type" : "object" }, + "ProvePresentationRequest" : { + "properties" : { + "options" : { + "$ref" : "#/components/schemas/IssuanceOptions" + }, + "presentation" : { + "$ref" : "#/components/schemas/Presentation" + } + }, + "type" : "object" + }, + "ProvePresentationResponse" : { + "properties" : { + "verifiablePresentation" : { + "$ref" : "#/components/schemas/VerifiablePresentation" + } + }, + "type" : "object" + }, "PublishRevocations" : { "properties" : { "rrid2crid" : { @@ -11044,117 +11336,6 @@ "required" : [ "proof" ], "type" : "object" }, - "StoredConnRecord" : { - "properties" : { - "accept" : { - "description" : "Connection acceptance: manual or auto", - "enum" : [ "manual", "auto" ], - "example" : "auto", - "type" : "string" - }, - "alias" : { - "description" : "Optional alias to apply to connection for later use", - "example" : "Bob, providing quotes", - "type" : "string" - }, - "connection_id" : { - "description" : "Connection identifier", - "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", - "type" : "string" - }, - "connection_protocol" : { - "description" : "Connection protocol used", - "enum" : [ "connections/1.0", "didexchange/1.0" ], - "example" : "connections/1.0", - "type" : "string" - }, - "created_at" : { - "description" : "Time of record creation", - "example" : "2021-12-31T23:59:59Z", - "pattern" : "^\\d{4}-\\d\\d-\\d\\d[T ]\\d\\d:\\d\\d(?:\\:(?:\\d\\d(?:\\.\\d{1,6})?))?(?:[+-]\\d\\d:?\\d\\d|Z|)$", - "type" : "string" - }, - "error_msg" : { - "description" : "Error message", - "example" : "No DIDDoc provided; cannot connect to public DID", - "type" : "string" - }, - "inbound_connection_id" : { - "description" : "Inbound routing connection id to use", - "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", - "type" : "string" - }, - "invitation_key" : { - "description" : "Public key for connection", - "example" : "H3C2AVvLMv6gmMNam3uVAjZpfkcJCwDwnZn6z3wXmqPV", - "pattern" : "^[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{43,44}$", - "type" : "string" - }, - "invitation_mode" : { - "description" : "Invitation mode", - "enum" : [ "once", "multi", "static" ], - "example" : "once", - "type" : "string" - }, - "invitation_msg_id" : { - "description" : "ID of out-of-band invitation message", - "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", - "type" : "string" - }, - "my_did" : { - "description" : "Our DID for connection", - "example" : "did:peer:WgWxqztrNooG92RXvxSTWv", - "pattern" : "^(did:sov:)?[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}$|^did:([a-zA-Z0-9_]+):([a-zA-Z0-9_.%-]+(:[a-zA-Z0-9_.%-]+)*)((;[a-zA-Z0-9_.:%-]+=[a-zA-Z0-9_.:%-]*)*)(\\/[^#?]*)?([?][^#]*)?(\\#.*)?$$", - "type" : "string" - }, - "request_id" : { - "description" : "Connection request identifier", - "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", - "type" : "string" - }, - "rfc23_state" : { - "description" : "State per RFC 23", - "example" : "invitation-sent", - "readOnly" : true, - "type" : "string" - }, - "state" : { - "description" : "Current record state", - "example" : "active", - "type" : "string" - }, - "their_did" : { - "description" : "Their DID for connection", - "example" : "did:peer:WgWxqztrNooG92RXvxSTWv", - "pattern" : "^(did:sov:)?[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}$|^did:([a-zA-Z0-9_]+):([a-zA-Z0-9_.%-]+(:[a-zA-Z0-9_.%-]+)*)((;[a-zA-Z0-9_.:%-]+=[a-zA-Z0-9_.:%-]*)*)(\\/[^#?]*)?([?][^#]*)?(\\#.*)?$$", - "type" : "string" - }, - "their_label" : { - "description" : "Their label for connection", - "example" : "Bob", - "type" : "string" - }, - "their_public_did" : { - "description" : "Other agent's public DID for connection", - "example" : "2cpBmR3FqGKWi5EyUbpRY8", - "type" : "string" - }, - "their_role" : { - "description" : "Their role in the connection protocol", - "enum" : [ "invitee", "requester", "inviter", "responder" ], - "example" : "requester", - "type" : "string" - }, - "updated_at" : { - "description" : "Time of last record update", - "example" : "2021-12-31T23:59:59Z", - "pattern" : "^\\d{4}-\\d\\d-\\d\\d[T ]\\d\\d:\\d\\d(?:\\:(?:\\d\\d(?:\\.\\d{1,6})?))?(?:[+-]\\d\\d:?\\d\\d|Z|)$", - "type" : "string" - } - }, - "required" : [ "connection_id" ], - "type" : "object" - }, "SubmissionRequirements" : { "properties" : { "count" : { @@ -11323,8 +11504,8 @@ "type" : "string" }, "endorser_write_txn" : { - "description" : "If True, Endorser will write the transaction after endorsing it", - "example" : true, + "description" : "Request Endorser to write the ledger transaction, this parameter is deprecated and no longer supported.", + "example" : false, "type" : "boolean" }, "formats" : { @@ -11374,7 +11555,7 @@ "author_goal_code" : "aries.transaction.ledger.write", "context" : "did:sov", "method" : "add-signature", - "signature_type" : "", + "signature_type" : "default", "signer_goal_code" : "aries.transaction.endorse" }, "properties" : { }, @@ -13615,6 +13796,10 @@ }, "type" : "array" }, + "credentialStatus" : { + "example" : "", + "type" : "object" + }, "credentialSubject" : { "example" : "", "type" : "object" @@ -13656,6 +13841,86 @@ "required" : [ "@context", "credentialSubject", "issuanceDate", "issuer", "proof", "type" ], "type" : "object" }, + "VerifiablePresentation" : { + "properties" : { + "@context" : { + "description" : "The JSON-LD context of the presentation", + "example" : [ "https://www.w3.org/2018/credentials/v1" ], + "items" : { + "type" : "object" + }, + "type" : "array" + }, + "holder" : { + "description" : "The JSON-LD Verifiable Credential Holder. Either string of object with id field.", + "example" : "did:key:z6MkpTHR8VNsBxYAAWHut2Geadd9jSwuBV8xRoAnwWsdvktH", + "type" : "object" + }, + "id" : { + "example" : "http://example.edu/presentations/1872", + "pattern" : "\\w+:(\\/?\\/?)[^\\s]+", + "type" : "string" + }, + "proof" : { + "$ref" : "#/components/schemas/Presentation_proof" + }, + "type" : { + "description" : "The JSON-LD type of the presentation", + "example" : [ "VerifiablePresentation" ], + "items" : { + "type" : "string" + }, + "type" : "array" + }, + "verifiableCredential" : { + "items" : { + "properties" : { }, + "type" : "object" + }, + "type" : "array" + } + }, + "required" : [ "@context", "proof", "type" ], + "type" : "object" + }, + "VerifyCredentialRequest" : { + "properties" : { + "options" : { + "$ref" : "#/components/schemas/LDProofVCOptions" + }, + "verifiableCredential" : { + "$ref" : "#/components/schemas/VerifiableCredential" + } + }, + "type" : "object" + }, + "VerifyCredentialResponse" : { + "properties" : { + "results" : { + "$ref" : "#/components/schemas/PresentationVerificationResult" + } + }, + "type" : "object" + }, + "VerifyPresentationRequest" : { + "properties" : { + "options" : { + "$ref" : "#/components/schemas/LDProofVCOptions" + }, + "verifiablePresentation" : { + "$ref" : "#/components/schemas/VerifiablePresentation" + } + }, + "type" : "object" + }, + "VerifyPresentationResponse" : { + "properties" : { + "results" : { + "$ref" : "#/components/schemas/PresentationVerificationResult" + } + }, + "type" : "object" + }, "VerifyRequest" : { "properties" : { "doc" : { @@ -14077,6 +14342,20 @@ "description" : "The credential status mechanism to use for the credential. Omitting the property indicates the issued credential will not include a credential status", "type" : "object" }, + "Presentation_proof" : { + "allOf" : [ { + "$ref" : "#/components/schemas/LinkedDataProof" + } ], + "description" : "The proof of the presentation", + "example" : { + "created" : "2019-12-11T03:50:55", + "jws" : "eyJhbGciOiAiRWREU0EiLCAiYjY0IjogZmFsc2UsICJjcml0JiNjQiXX0..lKJU0Df_keblRKhZAS9Qq6zybm-HqUXNVZ8vgEPNTAjQKBhQDxvXNo7nvtUBb_Eq1Ch6YBKY5qBQ", + "proofPurpose" : "assertionMethod", + "type" : "Ed25519Signature2018", + "verificationMethod" : "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL#z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" + }, + "type" : "object" + }, "SchemaSendResult_schema" : { "allOf" : [ { "$ref" : "#/components/schemas/Schema" diff --git a/open-api/swagger.json b/open-api/swagger.json index 06fe49b5ec..a1982cf310 100644 --- a/open-api/swagger.json +++ b/open-api/swagger.json @@ -1,7 +1,7 @@ { "swagger" : "2.0", "info" : { - "version" : "v0.12.0rc0", + "version" : "v0.12.0rc1", "title" : "Aries Cloud Agent" }, "tags" : [ { @@ -83,20 +83,6 @@ "description" : "Specification", "url" : "https://tools.ietf.org/html/rfc7515" } - }, { - "name" : "ldp-vc", - "description" : "Issue and verify LDP VCs and VPs", - "externalDocs" : { - "description" : "Specification", - "url" : "https://www.w3.org/TR/vc-data-model/" - } - }, { - "description" : "Manage credentials and presentations", - "externalDocs" : { - "description" : "Specification", - "url" : "https://w3c-ccg.github.io/vc-api" - }, - "name" : "vc-api" }, { "name" : "ledger", "description" : "Interaction with ledger", @@ -166,6 +152,13 @@ "description" : "Specification", "url" : "https://github.com/hyperledger/aries-rfcs/tree/527849ec3aa2a8fd47a7bb6c57f918ff8bcb5e8c/features/0048-trust-ping" } + }, { + "name" : "vc-api", + "description" : "Endpoints for managing w3c credentials and presentations", + "externalDocs" : { + "description" : "Specification", + "url" : "https://w3c-ccg.github.io/vc-api/" + } }, { "name" : "wallet", "description" : "DID and tag policy management", @@ -346,7 +339,7 @@ "description" : "Connection state", "required" : false, "type" : "string", - "enum" : [ "request", "abandoned", "active", "init", "error", "invitation", "start", "response", "completed" ] + "enum" : [ "abandoned", "invitation", "active", "response", "request", "start", "error", "completed", "init" ] }, { "name" : "their_did", "in" : "query", @@ -485,7 +478,7 @@ "200" : { "description" : "", "schema" : { - "$ref" : "#/definitions/StoredConnRecord" + "$ref" : "#/definitions/ConnRecord" } } } @@ -507,7 +500,7 @@ "200" : { "description" : "", "schema" : { - "$ref" : "#/definitions/StoredConnRecord" + "$ref" : "#/definitions/ConnRecord" } } } @@ -569,7 +562,7 @@ "200" : { "description" : "", "schema" : { - "$ref" : "#/definitions/StoredConnRecord" + "$ref" : "#/definitions/ConnRecord" } } } @@ -598,7 +591,7 @@ "200" : { "description" : "", "schema" : { - "$ref" : "#/definitions/StoredConnRecord" + "$ref" : "#/definitions/ConnRecord" } } } @@ -1201,7 +1194,7 @@ "200" : { "description" : "", "schema" : { - "$ref" : "#/definitions/StoredConnRecord" + "$ref" : "#/definitions/ConnRecord" } } } @@ -1250,7 +1243,7 @@ "200" : { "description" : "", "schema" : { - "$ref" : "#/definitions/StoredConnRecord" + "$ref" : "#/definitions/ConnRecord" } } } @@ -1285,7 +1278,7 @@ "200" : { "description" : "", "schema" : { - "$ref" : "#/definitions/StoredConnRecord" + "$ref" : "#/definitions/ConnRecord" } } } @@ -1327,7 +1320,7 @@ "200" : { "description" : "", "schema" : { - "$ref" : "#/definitions/StoredConnRecord" + "$ref" : "#/definitions/ConnRecord" } } } @@ -1356,7 +1349,7 @@ "200" : { "description" : "", "schema" : { - "$ref" : "#/definitions/StoredConnRecord" + "$ref" : "#/definitions/ConnRecord" } } } @@ -4642,12 +4635,6 @@ "description" : "Transaction identifier", "required" : true, "type" : "string" - }, { - "name" : "endorser_write_txn", - "in" : "query", - "description" : "Endorser will write the transaction after endorsing it", - "required" : false, - "type" : "boolean" } ], "responses" : { "200" : { @@ -4838,47 +4825,139 @@ } } }, - "/vc/ldp/issue" : { + "/vc/credentials" : { + "get" : { + "tags" : [ "vc-api" ], + "summary" : "List credentials", + "produces" : [ "application/json" ], + "parameters" : [ ], + "responses" : { + "200" : { + "description" : "", + "schema" : { + "$ref" : "#/definitions/ListCredentialsResponse" + } + } + } + } + }, + "/vc/credentials/issue" : { + "post" : { + "tags" : [ "vc-api" ], + "summary" : "Issue a credential", + "produces" : [ "application/json" ], + "parameters" : [ { + "in" : "body", + "name" : "body", + "required" : false, + "schema" : { + "$ref" : "#/definitions/IssueCredentialRequest" + } + } ], + "responses" : { + "200" : { + "description" : "", + "schema" : { + "$ref" : "#/definitions/IssueCredentialResponse" + } + } + } + } + }, + "/vc/credentials/store" : { + "post" : { + "tags" : [ "vc-api" ], + "summary" : "Store a credential", + "produces" : [ "application/json" ], + "parameters" : [ ], + "responses" : { } + } + }, + "/vc/credentials/verify" : { + "post" : { + "tags" : [ "vc-api" ], + "summary" : "Verify a credential", + "produces" : [ "application/json" ], + "parameters" : [ { + "in" : "body", + "name" : "body", + "required" : false, + "schema" : { + "$ref" : "#/definitions/VerifyCredentialRequest" + } + } ], + "responses" : { + "200" : { + "description" : "", + "schema" : { + "$ref" : "#/definitions/VerifyCredentialResponse" + } + } + } + } + }, + "/vc/credentials/{credential_id}" : { + "get" : { + "tags" : [ "vc-api" ], + "summary" : "Fetch credential by ID", + "produces" : [ "application/json" ], + "parameters" : [ { + "name" : "credential_id", + "in" : "path", + "required" : true, + "type" : "string" + } ], + "responses" : { + "200" : { + "description" : "", + "schema" : { + "$ref" : "#/definitions/FetchCredentialResponse" + } + } + } + } + }, + "/vc/presentations/prove" : { "post" : { - "tags" : [ "ldp_vc" ], - "summary" : "Sign an LDP VC.", + "tags" : [ "vc-api" ], + "summary" : "Prove a presentation", "produces" : [ "application/json" ], "parameters" : [ { "in" : "body", "name" : "body", "required" : false, "schema" : { - "$ref" : "#/definitions/LdpIssueRequest" + "$ref" : "#/definitions/ProvePresentationRequest" } } ], "responses" : { "200" : { "description" : "", "schema" : { - "$ref" : "#/definitions/LdpIssueResponse" + "$ref" : "#/definitions/ProvePresentationResponse" } } } } }, - "/vc/ldp/verify" : { + "/vc/presentations/verify" : { "post" : { - "tags" : [ "ldp_vc" ], - "summary" : "Verify an LDP VC or VP.", + "tags" : [ "vc-api" ], + "summary" : "Verify a Presentation", "produces" : [ "application/json" ], "parameters" : [ { "in" : "body", "name" : "body", "required" : false, "schema" : { - "$ref" : "#/definitions/LdpVerifyRequest" + "$ref" : "#/definitions/VerifyPresentationRequest" } } ], "responses" : { "200" : { "description" : "", "schema" : { - "$ref" : "#/definitions/LdpVerifyResponse" + "$ref" : "#/definitions/VerifyPresentationResponse" } } } @@ -5526,6 +5605,117 @@ } } }, + "ConnRecord" : { + "type" : "object", + "required" : [ "connection_id" ], + "properties" : { + "accept" : { + "type" : "string", + "example" : "auto", + "description" : "Connection acceptance: manual or auto", + "enum" : [ "manual", "auto" ] + }, + "alias" : { + "type" : "string", + "example" : "Bob, providing quotes", + "description" : "Optional alias to apply to connection for later use" + }, + "connection_id" : { + "type" : "string", + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "description" : "Connection identifier" + }, + "connection_protocol" : { + "type" : "string", + "example" : "connections/1.0", + "description" : "Connection protocol used", + "enum" : [ "connections/1.0", "didexchange/1.0" ] + }, + "created_at" : { + "type" : "string", + "example" : "2021-12-31T23:59:59Z", + "description" : "Time of record creation", + "pattern" : "^\\d{4}-\\d\\d-\\d\\d[T ]\\d\\d:\\d\\d(?:\\:(?:\\d\\d(?:\\.\\d{1,6})?))?(?:[+-]\\d\\d:?\\d\\d|Z|)$" + }, + "error_msg" : { + "type" : "string", + "example" : "No DIDDoc provided; cannot connect to public DID", + "description" : "Error message" + }, + "inbound_connection_id" : { + "type" : "string", + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "description" : "Inbound routing connection id to use" + }, + "invitation_key" : { + "type" : "string", + "example" : "H3C2AVvLMv6gmMNam3uVAjZpfkcJCwDwnZn6z3wXmqPV", + "description" : "Public key for connection", + "pattern" : "^[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{43,44}$" + }, + "invitation_mode" : { + "type" : "string", + "example" : "once", + "description" : "Invitation mode", + "enum" : [ "once", "multi", "static" ] + }, + "invitation_msg_id" : { + "type" : "string", + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "description" : "ID of out-of-band invitation message" + }, + "my_did" : { + "type" : "string", + "example" : "did:peer:WgWxqztrNooG92RXvxSTWv", + "description" : "Our DID for connection", + "pattern" : "^(did:sov:)?[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}$|^did:([a-zA-Z0-9_]+):([a-zA-Z0-9_.%-]+(:[a-zA-Z0-9_.%-]+)*)((;[a-zA-Z0-9_.:%-]+=[a-zA-Z0-9_.:%-]*)*)(\\/[^#?]*)?([?][^#]*)?(\\#.*)?$$" + }, + "request_id" : { + "type" : "string", + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "description" : "Connection request identifier" + }, + "rfc23_state" : { + "type" : "string", + "example" : "invitation-sent", + "description" : "State per RFC 23", + "readOnly" : true + }, + "state" : { + "type" : "string", + "example" : "active", + "description" : "Current record state" + }, + "their_did" : { + "type" : "string", + "example" : "did:peer:WgWxqztrNooG92RXvxSTWv", + "description" : "Their DID for connection", + "pattern" : "^(did:sov:)?[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}$|^did:([a-zA-Z0-9_]+):([a-zA-Z0-9_.%-]+(:[a-zA-Z0-9_.%-]+)*)((;[a-zA-Z0-9_.:%-]+=[a-zA-Z0-9_.:%-]*)*)(\\/[^#?]*)?([?][^#]*)?(\\#.*)?$$" + }, + "their_label" : { + "type" : "string", + "example" : "Bob", + "description" : "Their label for connection" + }, + "their_public_did" : { + "type" : "string", + "example" : "2cpBmR3FqGKWi5EyUbpRY8", + "description" : "Other agent's public DID for connection" + }, + "their_role" : { + "type" : "string", + "example" : "requester", + "description" : "Their role in the connection protocol", + "enum" : [ "invitee", "requester", "inviter", "responder" ] + }, + "updated_at" : { + "type" : "string", + "example" : "2021-12-31T23:59:59Z", + "description" : "Time of last record update", + "pattern" : "^\\d{4}-\\d\\d-\\d\\d[T ]\\d\\d:\\d\\d(?:\\:(?:\\d\\d(?:\\.\\d{1,6})?))?(?:[+-]\\d\\d:?\\d\\d|Z|)$" + } + } + }, "ConnectionInvitation" : { "type" : "object", "properties" : { @@ -5593,7 +5783,7 @@ "type" : "array", "description" : "List of connection records", "items" : { - "$ref" : "#/definitions/StoredConnRecord" + "$ref" : "#/definitions/ConnRecord" } } } @@ -5688,7 +5878,7 @@ "pattern" : "^[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{43,44}$" }, "record" : { - "$ref" : "#/definitions/StoredConnRecord" + "$ref" : "#/definitions/ConnRecord" }, "their_did" : { "type" : "string", @@ -6083,6 +6273,9 @@ "description" : "The JSON-LD context of the credential", "items" : { } }, + "credentialStatus" : { + "example" : "" + }, "credentialSubject" : { "example" : "" }, @@ -6771,6 +6964,14 @@ } } }, + "FetchCredentialResponse" : { + "type" : "object", + "properties" : { + "results" : { + "$ref" : "#/definitions/VerifiableCredential" + } + } + }, "Filter" : { "type" : "object", "properties" : { @@ -7003,9 +7204,8 @@ }, "prover_did" : { "type" : "string", - "example" : "WgWxqztrNooG92RXvxSTWv", - "description" : "Prover DID", - "pattern" : "^(did:sov:)?[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}$" + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", + "description" : "Prover DID/Random String/UUID" } } }, @@ -8123,9 +8323,49 @@ } } }, + "IssuanceOptions" : { + "type" : "object", + "properties" : { + "challenge" : { + "type" : "string", + "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6" + }, + "created" : { + "type" : "string", + "example" : "2010-01-01T19:23:24Z" + }, + "domain" : { + "type" : "string", + "example" : "website.example" + }, + "type" : { + "type" : "string", + "example" : "Ed25519Signature2020" + } + } + }, "IssueCredentialModuleResponse" : { "type" : "object" }, + "IssueCredentialRequest" : { + "type" : "object", + "properties" : { + "credential" : { + "$ref" : "#/definitions/Credential" + }, + "options" : { + "$ref" : "#/definitions/IssuanceOptions" + } + } + }, + "IssueCredentialResponse" : { + "type" : "object", + "properties" : { + "verifiableCredential" : { + "$ref" : "#/definitions/VerifiableCredential" + } + } + }, "IssuerCredRevRecord" : { "type" : "object", "properties" : { @@ -8506,69 +8746,12 @@ } } }, - "LdpIssueRequest" : { + "LedgerConfigInstance" : { "type" : "object", "properties" : { - "credential" : { - "$ref" : "#/definitions/Credential" - }, - "options" : { - "$ref" : "#/definitions/LDProofVCOptions" - } - } - }, - "LdpIssueResponse" : { - "type" : "object", - "properties" : { - "vc" : { - "$ref" : "#/definitions/VerifiableCredential" - } - } - }, - "LdpVerifyRequest" : { - "type" : "object", - "properties" : { - "options" : { - "$ref" : "#/definitions/LDProofVCOptions" - }, - "vc" : { - "$ref" : "#/definitions/VerifiableCredential" - }, - "vp" : { - "$ref" : "#/definitions/VerifiableCredential" - } - } - }, - "LdpVerifyResponse" : { - "type" : "object", - "required" : [ "verified" ], - "properties" : { - "credential_results" : { - "type" : "array", - "items" : { - "$ref" : "#/definitions/DocumentVerificationResult" - } - }, - "errors" : { - "type" : "array", - "items" : { - "type" : "string" - } - }, - "presentation_result" : { - "$ref" : "#/definitions/DocumentVerificationResult" - }, - "verified" : { - "type" : "boolean" - } - } - }, - "LedgerConfigInstance" : { - "type" : "object", - "properties" : { - "genesis_file" : { - "type" : "string", - "description" : "genesis_file" + "genesis_file" : { + "type" : "string", + "description" : "genesis_file" }, "genesis_transactions" : { "type" : "string", @@ -8620,7 +8803,7 @@ }, "domain" : { "type" : "string", - "example" : "example.com", + "example" : "https://example.com", "description" : "A string value specifying the restricted domain of the signature.", "pattern" : "\\w+:(\\/?\\/?)[^\\s]+" }, @@ -8657,6 +8840,9 @@ } } }, + "ListCredentialsResponse" : { + "type" : "object" + }, "MediationCreateRequest" : { "type" : "object" }, @@ -8973,6 +9159,11 @@ "invitation" : { "$ref" : "#/definitions/InvitationRecord_invitation" }, + "multi_use" : { + "type" : "boolean", + "example" : true, + "description" : "Allow for multiple uses of the oobinvitation" + }, "oob_id" : { "type" : "string", "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", @@ -9047,6 +9238,45 @@ } } }, + "Presentation" : { + "type" : "object", + "required" : [ "@context", "type" ], + "properties" : { + "@context" : { + "type" : "array", + "example" : [ "https://www.w3.org/2018/credentials/v1" ], + "description" : "The JSON-LD context of the presentation", + "items" : { } + }, + "holder" : { + "example" : "did:key:z6MkpTHR8VNsBxYAAWHut2Geadd9jSwuBV8xRoAnwWsdvktH", + "description" : "The JSON-LD Verifiable Credential Holder. Either string of object with id field." + }, + "id" : { + "type" : "string", + "example" : "http://example.edu/presentations/1872", + "pattern" : "\\w+:(\\/?\\/?)[^\\s]+" + }, + "proof" : { + "$ref" : "#/definitions/Presentation_proof" + }, + "type" : { + "type" : "array", + "example" : [ "VerifiablePresentation" ], + "description" : "The JSON-LD type of the presentation", + "items" : { + "type" : "string" + } + }, + "verifiableCredential" : { + "type" : "array", + "items" : { + "type" : "object", + "properties" : { } + } + } + } + }, "PresentationDefinition" : { "type" : "object", "properties" : { @@ -9134,6 +9364,30 @@ } } }, + "PresentationVerificationResult" : { + "type" : "object", + "required" : [ "verified" ], + "properties" : { + "credential_results" : { + "type" : "array", + "items" : { + "$ref" : "#/definitions/DocumentVerificationResult" + } + }, + "errors" : { + "type" : "array", + "items" : { + "type" : "string" + } + }, + "presentation_result" : { + "$ref" : "#/definitions/DocumentVerificationResult" + }, + "verified" : { + "type" : "boolean" + } + } + }, "ProfileSettings" : { "type" : "object", "properties" : { @@ -9186,6 +9440,25 @@ } } }, + "ProvePresentationRequest" : { + "type" : "object", + "properties" : { + "options" : { + "$ref" : "#/definitions/IssuanceOptions" + }, + "presentation" : { + "$ref" : "#/definitions/Presentation" + } + } + }, + "ProvePresentationResponse" : { + "type" : "object", + "properties" : { + "verifiablePresentation" : { + "$ref" : "#/definitions/VerifiablePresentation" + } + } + }, "PublishRevocations" : { "type" : "object", "properties" : { @@ -9883,117 +10156,6 @@ } } }, - "StoredConnRecord" : { - "type" : "object", - "required" : [ "connection_id" ], - "properties" : { - "accept" : { - "type" : "string", - "example" : "auto", - "description" : "Connection acceptance: manual or auto", - "enum" : [ "manual", "auto" ] - }, - "alias" : { - "type" : "string", - "example" : "Bob, providing quotes", - "description" : "Optional alias to apply to connection for later use" - }, - "connection_id" : { - "type" : "string", - "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", - "description" : "Connection identifier" - }, - "connection_protocol" : { - "type" : "string", - "example" : "connections/1.0", - "description" : "Connection protocol used", - "enum" : [ "connections/1.0", "didexchange/1.0" ] - }, - "created_at" : { - "type" : "string", - "example" : "2021-12-31T23:59:59Z", - "description" : "Time of record creation", - "pattern" : "^\\d{4}-\\d\\d-\\d\\d[T ]\\d\\d:\\d\\d(?:\\:(?:\\d\\d(?:\\.\\d{1,6})?))?(?:[+-]\\d\\d:?\\d\\d|Z|)$" - }, - "error_msg" : { - "type" : "string", - "example" : "No DIDDoc provided; cannot connect to public DID", - "description" : "Error message" - }, - "inbound_connection_id" : { - "type" : "string", - "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", - "description" : "Inbound routing connection id to use" - }, - "invitation_key" : { - "type" : "string", - "example" : "H3C2AVvLMv6gmMNam3uVAjZpfkcJCwDwnZn6z3wXmqPV", - "description" : "Public key for connection", - "pattern" : "^[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{43,44}$" - }, - "invitation_mode" : { - "type" : "string", - "example" : "once", - "description" : "Invitation mode", - "enum" : [ "once", "multi", "static" ] - }, - "invitation_msg_id" : { - "type" : "string", - "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", - "description" : "ID of out-of-band invitation message" - }, - "my_did" : { - "type" : "string", - "example" : "did:peer:WgWxqztrNooG92RXvxSTWv", - "description" : "Our DID for connection", - "pattern" : "^(did:sov:)?[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}$|^did:([a-zA-Z0-9_]+):([a-zA-Z0-9_.%-]+(:[a-zA-Z0-9_.%-]+)*)((;[a-zA-Z0-9_.:%-]+=[a-zA-Z0-9_.:%-]*)*)(\\/[^#?]*)?([?][^#]*)?(\\#.*)?$$" - }, - "request_id" : { - "type" : "string", - "example" : "3fa85f64-5717-4562-b3fc-2c963f66afa6", - "description" : "Connection request identifier" - }, - "rfc23_state" : { - "type" : "string", - "example" : "invitation-sent", - "description" : "State per RFC 23", - "readOnly" : true - }, - "state" : { - "type" : "string", - "example" : "active", - "description" : "Current record state" - }, - "their_did" : { - "type" : "string", - "example" : "did:peer:WgWxqztrNooG92RXvxSTWv", - "description" : "Their DID for connection", - "pattern" : "^(did:sov:)?[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{21,22}$|^did:([a-zA-Z0-9_]+):([a-zA-Z0-9_.%-]+(:[a-zA-Z0-9_.%-]+)*)((;[a-zA-Z0-9_.:%-]+=[a-zA-Z0-9_.:%-]*)*)(\\/[^#?]*)?([?][^#]*)?(\\#.*)?$$" - }, - "their_label" : { - "type" : "string", - "example" : "Bob", - "description" : "Their label for connection" - }, - "their_public_did" : { - "type" : "string", - "example" : "2cpBmR3FqGKWi5EyUbpRY8", - "description" : "Other agent's public DID for connection" - }, - "their_role" : { - "type" : "string", - "example" : "requester", - "description" : "Their role in the connection protocol", - "enum" : [ "invitee", "requester", "inviter", "responder" ] - }, - "updated_at" : { - "type" : "string", - "example" : "2021-12-31T23:59:59Z", - "description" : "Time of last record update", - "pattern" : "^\\d{4}-\\d\\d-\\d\\d[T ]\\d\\d:\\d\\d(?:\\:(?:\\d\\d(?:\\.\\d{1,6})?))?(?:[+-]\\d\\d:?\\d\\d|Z|)$" - } - } - }, "SubmissionRequirements" : { "type" : "object", "properties" : { @@ -10164,8 +10326,8 @@ }, "endorser_write_txn" : { "type" : "boolean", - "example" : true, - "description" : "If True, Endorser will write the transaction after endorsing it" + "example" : false, + "description" : "Request Endorser to write the ledger transaction, this parameter is deprecated and no longer supported." }, "formats" : { "type" : "array", @@ -10216,7 +10378,7 @@ "author_goal_code" : "aries.transaction.ledger.write", "context" : "did:sov", "method" : "add-signature", - "signature_type" : "", + "signature_type" : "default", "signer_goal_code" : "aries.transaction.endorse" }, "properties" : { } @@ -12462,6 +12624,9 @@ "description" : "The JSON-LD context of the credential", "items" : { } }, + "credentialStatus" : { + "example" : "" + }, "credentialSubject" : { "example" : "" }, @@ -12499,6 +12664,83 @@ } } }, + "VerifiablePresentation" : { + "type" : "object", + "required" : [ "@context", "proof", "type" ], + "properties" : { + "@context" : { + "type" : "array", + "example" : [ "https://www.w3.org/2018/credentials/v1" ], + "description" : "The JSON-LD context of the presentation", + "items" : { } + }, + "holder" : { + "example" : "did:key:z6MkpTHR8VNsBxYAAWHut2Geadd9jSwuBV8xRoAnwWsdvktH", + "description" : "The JSON-LD Verifiable Credential Holder. Either string of object with id field." + }, + "id" : { + "type" : "string", + "example" : "http://example.edu/presentations/1872", + "pattern" : "\\w+:(\\/?\\/?)[^\\s]+" + }, + "proof" : { + "$ref" : "#/definitions/Presentation_proof" + }, + "type" : { + "type" : "array", + "example" : [ "VerifiablePresentation" ], + "description" : "The JSON-LD type of the presentation", + "items" : { + "type" : "string" + } + }, + "verifiableCredential" : { + "type" : "array", + "items" : { + "type" : "object", + "properties" : { } + } + } + } + }, + "VerifyCredentialRequest" : { + "type" : "object", + "properties" : { + "options" : { + "$ref" : "#/definitions/LDProofVCOptions" + }, + "verifiableCredential" : { + "$ref" : "#/definitions/VerifiableCredential" + } + } + }, + "VerifyCredentialResponse" : { + "type" : "object", + "properties" : { + "results" : { + "$ref" : "#/definitions/PresentationVerificationResult" + } + } + }, + "VerifyPresentationRequest" : { + "type" : "object", + "properties" : { + "options" : { + "$ref" : "#/definitions/LDProofVCOptions" + }, + "verifiablePresentation" : { + "$ref" : "#/definitions/VerifiablePresentation" + } + } + }, + "VerifyPresentationResponse" : { + "type" : "object", + "properties" : { + "results" : { + "$ref" : "#/definitions/PresentationVerificationResult" + } + } + }, "VerifyRequest" : { "type" : "object", "required" : [ "doc" ], @@ -12782,6 +13024,11 @@ "type" : "object", "description" : "The credential status mechanism to use for the credential. Omitting the property indicates the issued credential will not include a credential status" }, + "Presentation_proof" : { + "type" : "object", + "description" : "The proof of the presentation", + "example" : "{\"created\":\"2019-12-11T03:50:55\",\"jws\":\"eyJhbGciOiAiRWREU0EiLCAiYjY0IjogZmFsc2UsICJjcml0JiNjQiXX0..lKJU0Df_keblRKhZAS9Qq6zybm-HqUXNVZ8vgEPNTAjQKBhQDxvXNo7nvtUBb_Eq1Ch6YBKY5qBQ\",\"proofPurpose\":\"assertionMethod\",\"type\":\"Ed25519Signature2018\",\"verificationMethod\":\"did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL#z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL\"}" + }, "SchemaSendResult_schema" : { "type" : "object", "description" : "Schema definition" diff --git a/overrides/README.md b/overrides/README.md new file mode 100644 index 0000000000..568e43f838 --- /dev/null +++ b/overrides/README.md @@ -0,0 +1,6 @@ +# Mkdocs Overrides + +This folder contains any overrides for the mkdocs docs publishing. Most notably, +the `base.html` file that puts a banner on the screen for all versions of the +docs other than the main branch. The `base.html` file is generated on publishing +the docs (in the publishing GitHub Action) -- and does not exist in the main branch. diff --git a/poetry.lock b/poetry.lock index 8081a9129a..e40bdffbfb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -619,43 +619,43 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "42.0.2" +version = "42.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be"}, - {file = "cryptography-42.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529"}, - {file = "cryptography-42.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9"}, - {file = "cryptography-42.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2"}, - {file = "cryptography-42.0.2-cp37-abi3-win32.whl", hash = "sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee"}, - {file = "cryptography-42.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee"}, - {file = "cryptography-42.0.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90"}, - {file = "cryptography-42.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea"}, - {file = "cryptography-42.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33"}, - {file = "cryptography-42.0.2-cp39-abi3-win32.whl", hash = "sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635"}, - {file = "cryptography-42.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2"}, - {file = "cryptography-42.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a"}, - {file = "cryptography-42.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65"}, - {file = "cryptography-42.0.2.tar.gz", hash = "sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888"}, + {file = "cryptography-42.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:de5086cd475d67113ccb6f9fae6d8fe3ac54a4f9238fd08bfdb07b03d791ff0a"}, + {file = "cryptography-42.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:935cca25d35dda9e7bd46a24831dfd255307c55a07ff38fd1a92119cffc34857"}, + {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20100c22b298c9eaebe4f0b9032ea97186ac2555f426c3e70670f2517989543b"}, + {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eb6368d5327d6455f20327fb6159b97538820355ec00f8cc9464d617caecead"}, + {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:39d5c93e95bcbc4c06313fc6a500cee414ee39b616b55320c1904760ad686938"}, + {file = "cryptography-42.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d96ea47ce6d0055d5b97e761d37b4e84195485cb5a38401be341fabf23bc32a"}, + {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d1998e545081da0ab276bcb4b33cce85f775adb86a516e8f55b3dac87f469548"}, + {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93fbee08c48e63d5d1b39ab56fd3fdd02e6c2431c3da0f4edaf54954744c718f"}, + {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:90147dad8c22d64b2ff7331f8d4cddfdc3ee93e4879796f837bdbb2a0b141e0c"}, + {file = "cryptography-42.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:4dcab7c25e48fc09a73c3e463d09ac902a932a0f8d0c568238b3696d06bf377b"}, + {file = "cryptography-42.0.3-cp37-abi3-win32.whl", hash = "sha256:1e935c2900fb53d31f491c0de04f41110351377be19d83d908c1fd502ae8daa5"}, + {file = "cryptography-42.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:762f3771ae40e111d78d77cbe9c1035e886ac04a234d3ee0856bf4ecb3749d54"}, + {file = "cryptography-42.0.3-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3ec384058b642f7fb7e7bff9664030011ed1af8f852540c76a1317a9dd0d20"}, + {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35772a6cffd1f59b85cb670f12faba05513446f80352fe811689b4e439b5d89e"}, + {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04859aa7f12c2b5f7e22d25198ddd537391f1695df7057c8700f71f26f47a129"}, + {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c3d1f5a1d403a8e640fa0887e9f7087331abb3f33b0f2207d2cc7f213e4a864c"}, + {file = "cryptography-42.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df34312149b495d9d03492ce97471234fd9037aa5ba217c2a6ea890e9166f151"}, + {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:de4ae486041878dc46e571a4c70ba337ed5233a1344c14a0790c4c4be4bbb8b4"}, + {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0fab2a5c479b360e5e0ea9f654bcebb535e3aa1e493a715b13244f4e07ea8eec"}, + {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25b09b73db78facdfd7dd0fa77a3f19e94896197c86e9f6dc16bce7b37a96504"}, + {file = "cryptography-42.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d5cf11bc7f0b71fb71af26af396c83dfd3f6eed56d4b6ef95d57867bf1e4ba65"}, + {file = "cryptography-42.0.3-cp39-abi3-win32.whl", hash = "sha256:0fea01527d4fb22ffe38cd98951c9044400f6eff4788cf52ae116e27d30a1ba3"}, + {file = "cryptography-42.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:2619487f37da18d6826e27854a7f9d4d013c51eafb066c80d09c63cf24505306"}, + {file = "cryptography-42.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ead69ba488f806fe1b1b4050febafdbf206b81fa476126f3e16110c818bac396"}, + {file = "cryptography-42.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:20180da1b508f4aefc101cebc14c57043a02b355d1a652b6e8e537967f1e1b46"}, + {file = "cryptography-42.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fbf0f3f0fac7c089308bd771d2c6c7b7d53ae909dce1db52d8e921f6c19bb3a"}, + {file = "cryptography-42.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c23f03cfd7d9826cdcbad7850de67e18b4654179e01fe9bc623d37c2638eb4ef"}, + {file = "cryptography-42.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db0480ffbfb1193ac4e1e88239f31314fe4c6cdcf9c0b8712b55414afbf80db4"}, + {file = "cryptography-42.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:6c25e1e9c2ce682d01fc5e2dde6598f7313027343bd14f4049b82ad0402e52cd"}, + {file = "cryptography-42.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9541c69c62d7446539f2c1c06d7046aef822940d248fa4b8962ff0302862cc1f"}, + {file = "cryptography-42.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b797099d221df7cce5ff2a1d272761d1554ddf9a987d3e11f6459b38cd300fd"}, + {file = "cryptography-42.0.3.tar.gz", hash = "sha256:069d2ce9be5526a44093a0991c450fe9906cdf069e0e7cd67d9dee49a62b9ebe"}, ] [package.dependencies] @@ -790,17 +790,6 @@ toolz = ">=0.8.0" [package.extras] cython = ["cython"] -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - [[package]] name = "deepmerge" version = "0.3.0" @@ -882,62 +871,59 @@ gmpy2 = ["gmpy2"] [[package]] name = "eth-hash" -version = "0.3.3" +version = "0.6.0" description = "eth-hash: The Ethereum hashing function, keccak256, sometimes (erroneously) called sha3" optional = false -python-versions = ">=3.5, <4" +python-versions = ">=3.8, <4" files = [ - {file = "eth-hash-0.3.3.tar.gz", hash = "sha256:8cde211519ff1a98b46e9057cb909f12ab62e263eb30a0a94e2f7e1f46ac67a0"}, - {file = "eth_hash-0.3.3-py3-none-any.whl", hash = "sha256:3c884e4f788b38cc92cff05c4e43bc6b82686066f04ecfae0e11cdcbe5a283bd"}, + {file = "eth-hash-0.6.0.tar.gz", hash = "sha256:ae72889e60db6acbb3872c288cfa02ed157f4c27630fcd7f9c8442302c31e478"}, + {file = "eth_hash-0.6.0-py3-none-any.whl", hash = "sha256:9f8daaa345764f8871dc461855049ac54ae4291d780279bce6fce7f24e3f17d3"}, ] [package.extras] -dev = ["Sphinx (>=1.6.5,<2)", "bumpversion (>=0.5.3,<1)", "flake8 (==3.7.9)", "ipython", "isort (>=4.2.15,<5)", "mypy (==0.770)", "pydocstyle (>=5.0.0,<6)", "pytest (==5.4.1)", "pytest-watch (>=4.1.0,<5)", "pytest-xdist", "sphinx-rtd-theme (>=0.1.9,<1)", "towncrier (>=19.2.0,<20)", "tox (==3.14.6)", "twine", "wheel"] -doc = ["Sphinx (>=1.6.5,<2)", "sphinx-rtd-theme (>=0.1.9,<1)", "towncrier (>=19.2.0,<20)"] -lint = ["flake8 (==3.7.9)", "isort (>=4.2.15,<5)", "mypy (==0.770)", "pydocstyle (>=5.0.0,<6)"] +dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] pycryptodome = ["pycryptodome (>=3.6.6,<4)"] -pysha3 = ["pysha3 (>=1.0.0,<2.0.0)"] -test = ["pytest (==5.4.1)", "pytest-xdist", "tox (==3.14.6)"] +pysha3 = ["pysha3 (>=1.0.0,<2.0.0)", "safe-pysha3 (>=1.0.0)"] +test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "eth-typing" -version = "2.3.0" +version = "4.0.0" description = "eth-typing: Common type annotations for ethereum python packages" optional = false -python-versions = ">=3.5, <4" +python-versions = ">=3.8, <4" files = [ - {file = "eth-typing-2.3.0.tar.gz", hash = "sha256:39cce97f401f082739b19258dfa3355101c64390914c73fe2b90012f443e0dc7"}, - {file = "eth_typing-2.3.0-py3-none-any.whl", hash = "sha256:b7fa58635c1cb0cbf538b2f5f1e66139575ea4853eac1d6000f0961a4b277422"}, + {file = "eth-typing-4.0.0.tar.gz", hash = "sha256:9af0b6beafbc5c2e18daf19da5f5a68315023172c4e79d149e12ad10a3d3f731"}, + {file = "eth_typing-4.0.0-py3-none-any.whl", hash = "sha256:7e556bea322b6e8c0a231547b736c258e10ce9eed5ddc254f51031b12af66a16"}, ] [package.extras] -dev = ["Sphinx (>=1.6.5,<2)", "bumpversion (>=0.5.3,<1)", "flake8 (==3.8.3)", "ipython", "isort (>=4.2.15,<5)", "mypy (==0.782)", "pydocstyle (>=3.0.0,<4)", "pytest (>=4.4,<4.5)", "pytest-watch (>=4.1.0,<5)", "pytest-xdist", "sphinx-rtd-theme (>=0.1.9)", "tox (>=2.9.1,<3)", "twine", "wheel"] -doc = ["Sphinx (>=1.6.5,<2)", "sphinx-rtd-theme (>=0.1.9)"] -lint = ["flake8 (==3.8.3)", "isort (>=4.2.15,<5)", "mypy (==0.782)", "pydocstyle (>=3.0.0,<4)"] -test = ["pytest (>=4.4,<4.5)", "pytest-xdist", "tox (>=2.9.1,<3)"] +dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] +test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "eth-utils" -version = "1.10.0" +version = "3.0.0" description = "eth-utils: Common utility functions for python code that interacts with Ethereum" optional = false -python-versions = ">=3.5,!=3.5.2,<4" +python-versions = ">=3.8, <4" files = [ - {file = "eth-utils-1.10.0.tar.gz", hash = "sha256:bf82762a46978714190b0370265a7148c954d3f0adaa31c6f085ea375e4c61af"}, - {file = "eth_utils-1.10.0-py3-none-any.whl", hash = "sha256:74240a8c6f652d085ed3c85f5f1654203d2f10ff9062f83b3bad0a12ff321c7a"}, + {file = "eth-utils-3.0.0.tar.gz", hash = "sha256:8721869568448349bceae63c277b75758d11e0dc190e7ef31e161b89619458f1"}, + {file = "eth_utils-3.0.0-py3-none-any.whl", hash = "sha256:9a284106acf6f6ce91ddf792489cf8bd4c681fd5ae7653d2f3d5d100be5c3905"}, ] [package.dependencies] -cytoolz = {version = ">=0.10.1,<1.0.0", markers = "implementation_name == \"cpython\""} -eth-hash = ">=0.3.1,<0.4.0" -eth-typing = ">=2.2.1,<3.0.0" -toolz = {version = ">0.8.2,<1", markers = "implementation_name == \"pypy\""} +cytoolz = {version = ">=0.10.1", markers = "implementation_name == \"cpython\""} +eth-hash = ">=0.3.1" +eth-typing = ">=3.0.0" +toolz = {version = ">0.8.2", markers = "implementation_name == \"pypy\""} [package.extras] -dev = ["Sphinx (>=1.6.5,<2)", "black (>=18.6b4,<19)", "bumpversion (>=0.5.3,<1)", "flake8 (==3.7.9)", "hypothesis (>=4.43.0,<5.0.0)", "ipython", "isort (>=4.2.15,<5)", "mypy (==0.720)", "pydocstyle (>=5.0.0,<6)", "pytest (==5.4.1)", "pytest (>=3.4.1,<4.0.0)", "pytest-watch (>=4.1.0,<5)", "pytest-xdist", "sphinx-rtd-theme (>=0.1.9,<2)", "towncrier (>=19.2.0,<20)", "tox (==3.14.6)", "twine (>=1.13,<2)", "wheel (>=0.30.0,<1.0.0)"] -doc = ["Sphinx (>=1.6.5,<2)", "sphinx-rtd-theme (>=0.1.9,<2)", "towncrier (>=19.2.0,<20)"] -lint = ["black (>=18.6b4,<19)", "flake8 (==3.7.9)", "isort (>=4.2.15,<5)", "mypy (==0.720)", "pydocstyle (>=5.0.0,<6)", "pytest (>=3.4.1,<4.0.0)"] -test = ["hypothesis (>=4.43.0,<5.0.0)", "pytest (==5.4.1)", "pytest-xdist", "tox (==3.14.6)"] +dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "eth-hash[pycryptodome]", "hypothesis (>=4.43.0)", "ipython", "mypy (==1.5.1)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] +test = ["hypothesis (>=4.43.0)", "mypy (==1.5.1)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "exceptiongroup" @@ -1136,6 +1122,25 @@ files = [ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] +[[package]] +name = "importlib-metadata" +version = "7.0.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + [[package]] name = "indy-credx" version = "1.1.1" @@ -1203,33 +1208,31 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonpath-ng" -version = "1.5.2" +version = "1.6.1" description = "A final implementation of JSONPath for Python that aims to be standard compliant, including arithmetic and binary comparison operators and providing clear AST for metaprogramming." optional = false python-versions = "*" files = [ - {file = "jsonpath-ng-1.5.2.tar.gz", hash = "sha256:144d91379be14d9019f51973bd647719c877bfc07dc6f3f5068895765950c69d"}, - {file = "jsonpath_ng-1.5.2-py3-none-any.whl", hash = "sha256:93d1f248be68e485eb6635c3a01b2d681f296dc349d71e37c8755837b8944d36"}, + {file = "jsonpath-ng-1.6.1.tar.gz", hash = "sha256:086c37ba4917304850bd837aeab806670224d3f038fe2833ff593a672ef0a5fa"}, + {file = "jsonpath_ng-1.6.1-py3-none-any.whl", hash = "sha256:8f22cd8273d7772eea9aaa84d922e0841aa36fdb8a2c6b7f6c3791a16a9bc0be"}, ] [package.dependencies] -decorator = "*" ply = "*" -six = "*" [[package]] name = "jwcrypto" -version = "1.5.3" +version = "1.5.4" description = "Implementation of JOSE Web standards" optional = false python-versions = ">= 3.8" files = [ - {file = "jwcrypto-1.5.3.tar.gz", hash = "sha256:3af84bb6ed78fb29325308d4eca55e2842f1583010cb6c09207375a4ecea151f"}, + {file = "jwcrypto-1.5.4.tar.gz", hash = "sha256:0815fbab613db99bad85691da5f136f8860423396667728a264bcfa6e1db36b0"}, ] [package.dependencies] cryptography = ">=3.4" -typing_extensions = "*" +typing_extensions = ">=4.5.0" [[package]] name = "lxml" @@ -1326,19 +1329,20 @@ source = ["Cython (>=3.0.7)"] [[package]] name = "markdown" -version = "3.1.1" -description = "Python implementation of Markdown." +version = "3.5.2" +description = "Python implementation of John Gruber's Markdown." optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +python-versions = ">=3.8" files = [ - {file = "Markdown-3.1.1-py2.py3-none-any.whl", hash = "sha256:56a46ac655704b91e5b7e6326ce43d5ef72411376588afa1dd90e881b83c7e8c"}, - {file = "Markdown-3.1.1.tar.gz", hash = "sha256:2e50876bcdd74517e7b71f3e7a76102050edec255b3983403f1a63e7c8a41e7a"}, + {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, + {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, ] [package.dependencies] -setuptools = ">=36" +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} [package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] [[package]] @@ -1968,12 +1972,13 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pyld" -version = "2.0.3" +version = "2.0.4" description = "Python implementation of the JSON-LD API" optional = false python-versions = "*" files = [ - {file = "PyLD-2.0.3.tar.gz", hash = "sha256:287445f888c3a332ccbd20a14844c66c2fcbaeab3c99acd506a0788e2ebb2f82"}, + {file = "PyLD-2.0.4-py3-none-any.whl", hash = "sha256:6dab9905644616df33f8755489fc9b354ed7d832d387b7d1974b4fbd3b8d2a89"}, + {file = "PyLD-2.0.4.tar.gz", hash = "sha256:311e350f0dbc964311c79c28e86f84e195a81d06fef5a6f6ac2a4f6391ceeacc"}, ] [package.dependencies] @@ -2015,13 +2020,13 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pytest" -version = "8.0.0" +version = "8.0.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, - {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, + {file = "pytest-8.0.1-py3-none-any.whl", hash = "sha256:3e4f16fe1c0a9dc9d9389161c127c3edc5d810c38d6793042fb81d9f48a59fca"}, + {file = "pytest-8.0.1.tar.gz", hash = "sha256:267f6563751877d772019b13aacbe4e860d73fe8f651f28112e9ac37de7513ae"}, ] [package.dependencies] @@ -2254,23 +2259,23 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rlp" -version = "1.2.0" -description = "A package for Recursive Length Prefix encoding and decoding" +version = "4.0.0" +description = "rlp: A package for Recursive Length Prefix encoding and decoding" optional = false -python-versions = "*" +python-versions = ">=3.8, <4" files = [ - {file = "rlp-1.2.0-py2.py3-none-any.whl", hash = "sha256:97b7e770f16442772311b33e6bc28b45318e7c8def69b9df16452304e224e9df"}, - {file = "rlp-1.2.0.tar.gz", hash = "sha256:27273fc2dbc3513c1e05ea6b8af28aac8745fb09c164e39e2ed2807bf7e1b342"}, + {file = "rlp-4.0.0-py3-none-any.whl", hash = "sha256:1747fd933e054e6d25abfe591be92e19a4193a56c93981c05bd0f84dfe279f14"}, + {file = "rlp-4.0.0.tar.gz", hash = "sha256:61a5541f86e4684ab145cb849a5929d2ced8222930a570b3941cf4af16b72a78"}, ] [package.dependencies] -eth-utils = ">=1.0.2,<2" +eth-utils = ">=2" [package.extras] -dev = ["Sphinx (>=1.6.5,<2)", "bumpversion (>=0.5.3,<1)", "flake8 (==3.4.1)", "hypothesis (==3.56.5)", "ipython", "pytest (==3.3.2)", "pytest-watch (>=4.1.0,<5)", "pytest-xdist", "setuptools (>=36.2.0)", "sphinx-rtd-theme (>=0.1.9)", "tox (>=2.9.1,<3)", "twine", "wheel"] -doc = ["Sphinx (>=1.6.5,<2)", "sphinx-rtd-theme (>=0.1.9)"] -lint = ["flake8 (==3.4.1)"] -test = ["hypothesis (==3.56.5)", "pytest (==3.3.2)", "tox (>=2.9.1,<3)"] +dev = ["build (>=0.9.0)", "bumpversion (>=0.5.3)", "hypothesis (==5.19.0)", "ipython", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=4.0.0)", "twine", "wheel"] +docs = ["sphinx (>=6.0.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] +rust-backend = ["rusty-rlp (>=0.2.1,<0.3)"] +test = ["hypothesis (==5.19.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] name = "ruff" @@ -2300,13 +2305,13 @@ files = [ [[package]] name = "sd-jwt" -version = "0.10.3" +version = "0.10.4" description = "The reference implementation of the IETF SD-JWT specification." optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "sd_jwt-0.10.3-py3-none-any.whl", hash = "sha256:de5d8296a977c758cefcc153a1bfab12de3087fbf4a38bf589165ab31c7a41f7"}, - {file = "sd_jwt-0.10.3.tar.gz", hash = "sha256:c9307ed1cb9597c532f19d5eb3e040a77ff264214354b2ed0533db83c05a3a8e"}, + {file = "sd_jwt-0.10.4-py3-none-any.whl", hash = "sha256:d7ae669eb5d51bceeb38e0df8ab2faddd12e3b21ab64d831b6d048fc1e00ce75"}, + {file = "sd_jwt-0.10.4.tar.gz", hash = "sha256:82f93e2f570cfd31fab124e301febb81f3bcad70b10e38f5f9cff70ad659c2ce"}, ] [package.dependencies] @@ -2769,6 +2774,21 @@ files = [ idna = ">=2.0" multidict = ">=4.0" +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + [extras] askar = ["anoncreds", "aries-askar", "indy-credx", "indy-vdr"] bbs = ["ursa-bbs-signatures"] @@ -2777,4 +2797,4 @@ indy = ["python3-indy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "7170ac3cc281b82cf814d0757f2b7d0b94bf7009a58921ee9b6566dc51c957bd" +content-hash = "0edb68b8723afe8a2feba6e0a855c1947826152c1b472d31906e630a16d0654f" diff --git a/pyproject.toml b/pyproject.toml index d206d36eb1..0f7e9242ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aries_cloudagent" -version = "0.12.0rc0" +version = "0.12.0rc1" description = "Hyperledger Aries Cloud Agent Python (ACA-Py) is a foundation for building decentralized identity applications and services running in non-mobile environments. " authors = ["Hyperledger Aries "] license = "Apache-2.0" @@ -25,8 +25,8 @@ base58="~2.1.0" ConfigArgParse="~1.5.3" deepmerge="~0.3.0" ecdsa="~0.16.1" -jsonpath_ng="1.5.2" -Markdown="~3.1.1" +jsonpath-ng="1.6.1" +Markdown="~3.5.2" markupsafe="2.0.1" marshmallow="~3.20.1" nest_asyncio="~1.5.5" @@ -35,7 +35,7 @@ portalocker="~2.7.0" prompt_toolkit=">=2.0.9,<2.1.0" pydid="^0.4.3" pyjwt="~2.8.0" -pyld="~2.0.3" +pyld="^2.0.4" pynacl="~1.5.0" python-dateutil="~2.8.1" python-json-logger="~2.0.7" @@ -43,7 +43,7 @@ pytz="~2021.1" pyyaml="~6.0.1" qrcode = {version = ">=6.1,<7.0", extras = ["pil"]} requests="~2.31.0" -rlp="1.2.0" +rlp="4.0.0" unflatten="~0.1" sd-jwt = "^0.10.3" did-peer-2 = "^0.1.2" @@ -139,6 +139,9 @@ markers = [ ] junit_family = "xunit1" asyncio_mode = "auto" +filterwarnings = [ + 'ignore:distutils Version classes are deprecated. Use packaging.version instead.:DeprecationWarning', # Ignore specific DeprecationWarning for old packages using distutils version class +] [tool.coverage.run] diff --git a/scripts/generate-open-api-spec b/scripts/generate-open-api-spec index 496fa80098..64399f3f3e 100755 --- a/scripts/generate-open-api-spec +++ b/scripts/generate-open-api-spec @@ -38,8 +38,7 @@ ACA_PY_CMD_OPTIONS=" \ --multitenant \ --multitenant-admin \ --jwt-secret test \ - --no-ledger \ - --log-file /usr/src/app/logs/agent.log" + --no-ledger" # Specify openAPI JSON config file and shared directory OPEN_API_JSON_CONFIG="openAPIJSON.config"