diff --git a/.github/workflows/oid4vc-conformance-tests.yaml b/.github/workflows/oid4vc-conformance-tests.yaml new file mode 100644 index 000000000..5fbfdb0b3 --- /dev/null +++ b/.github/workflows/oid4vc-conformance-tests.yaml @@ -0,0 +1,149 @@ +name: OID4VC Conformance Tests +# Runs the OIDF HAIP conformance suite against ACA-Py OID4VCI issuer and +# OID4VP verifier. The suite is started from source inside Docker Compose and +# all test results are written to a JUnit XML artifact. +# +# Trigger conditions: +# - PR or push that touches oid4vc/** source files +# - Manual run via workflow_dispatch (always runs regardless of changed files) +on: + pull_request: + types: [opened, synchronize, reopened, ready_for_review] + branches: + - "**" + paths: + - "oid4vc/**" + push: + branches: + - main + paths: + - "oid4vc/**" + workflow_dispatch: + +jobs: + conformance-tests: + name: "OID4VC Conformance Tests" + runs-on: ubuntu-latest + # Skip draft PRs (same policy as integration-tests) + if: | + github.event_name == 'workflow_dispatch' || + (github.event_name == 'push') || + (github.event_name == 'pull_request' && github.event.pull_request.draft == false) + timeout-minutes: 90 + + steps: + # ── Checkout ──────────────────────────────────────────────────────────── + - name: Check out repository + uses: actions/checkout@v4 + + # ── Docker Buildx (enables layer cache via GitHub Actions cache) ──────── + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + # ── Pre-build ACA-Py issuer image (Rust/isomdl, ~10 min cold) ────────── + # Both issuer and verifier share the same Dockerfile; the verifier build + # hits cache after the issuer build completes. + - name: Build acapy-issuer image + uses: docker/build-push-action@v6 + with: + context: . + file: oid4vc/docker/Dockerfile + push: false + load: true + tags: oid4vc-integration-acapy-issuer:latest + build-args: | + ACAPY_VERSION=1.4.0 + ISOMDL_BRANCH=fix/python-build-system + cache-from: type=gha,scope=acapy-oid4vc + cache-to: type=gha,mode=max,scope=acapy-oid4vc + + - name: Build acapy-verifier image + uses: docker/build-push-action@v6 + with: + context: . + file: oid4vc/docker/Dockerfile + push: false + load: true + tags: oid4vc-integration-acapy-verifier:latest + build-args: | + ACAPY_VERSION=1.4.0 + ISOMDL_BRANCH=fix/python-build-system + # Issuer + verifier share all layers; use same cache scope. + cache-from: type=gha,scope=acapy-oid4vc + + # ── Pre-build OIDF conformance server (Maven build, ~15 min cold) ─────── + - name: Build conformance-server image + uses: docker/build-push-action@v6 + with: + context: oid4vc/integration/conformance + file: oid4vc/integration/conformance/Dockerfile.server + push: false + load: true + tags: oid4vc-integration-conformance-server:latest + build-args: | + CONFORMANCE_SUITE_BRANCH=master + cache-from: type=gha,scope=conformance-server + cache-to: type=gha,mode=max,scope=conformance-server + + # ── Pre-build conformance runner (lightweight Python image) ───────────── + - name: Build conformance-runner image + uses: docker/build-push-action@v6 + with: + context: oid4vc/integration + file: oid4vc/integration/conformance/Dockerfile.runner + push: false + load: true + tags: oid4vc-integration-conformance-runner:latest + cache-from: type=gha,scope=conformance-runner + cache-to: type=gha,mode=max,scope=conformance-runner + + # ── Run conformance suite ──────────────────────────────────────────────── + # DOCKER_PLATFORM is detected automatically by the shell script based on + # `uname -m`; set explicitly here to avoid any ambiguity on CI runners. + - name: Run conformance tests + env: + DOCKER_PLATFORM: linux/amd64 + run: | + bash oid4vc/integration/run-conformance-tests.sh run all + + # ── Collect results ────────────────────────────────────────────────────── + - name: Upload JUnit test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: conformance-junit-results + path: oid4vc/integration/test-results/conformance-junit.xml + if-no-files-found: warn + + - name: Publish JUnit test summary + if: always() + uses: mikepenz/action-junit-report@v4 + with: + report_paths: "oid4vc/integration/test-results/conformance-junit.xml" + check_name: "OIDF Conformance Results" + fail_on_failure: false + require_tests: false + + # ── Collect Docker logs on failure ─────────────────────────────────────── + - name: Dump Docker Compose logs + if: failure() + run: | + mkdir -p /tmp/conformance-logs + cd oid4vc/integration + # Capture all service logs for post-mortem analysis + docker compose --profile conformance logs --no-color \ + > /tmp/conformance-logs/docker-compose.log 2>&1 || true + docker compose --profile conformance logs --no-color acapy-issuer \ + > /tmp/conformance-logs/acapy-issuer.log 2>&1 || true + docker compose --profile conformance logs --no-color acapy-verifier \ + > /tmp/conformance-logs/acapy-verifier.log 2>&1 || true + docker compose --profile conformance logs --no-color conformance-server \ + > /tmp/conformance-logs/conformance-server.log 2>&1 || true + + - name: Upload Docker logs artifact + if: failure() + uses: actions/upload-artifact@v4 + with: + name: conformance-docker-logs + path: /tmp/conformance-logs/ + retention-days: 7 diff --git a/.github/workflows/pr-linting-and-unit-tests.yaml b/.github/workflows/pr-linting-and-unit-tests.yaml index c9d871b4a..e686679f5 100644 --- a/.github/workflows/pr-linting-and-unit-tests.yaml +++ b/.github/workflows/pr-linting-and-unit-tests.yaml @@ -100,7 +100,6 @@ jobs: #---------------------------------------------- - name: Unit test plugins id: unit-tests - continue-on-error: true run: | for dir in ${{ steps.changed-plugins.outputs.changed-plugins }}; do cd $dir @@ -110,7 +109,6 @@ jobs: integration-tests: name: "Integration tests" runs-on: ubuntu-latest - continue-on-error: true needs: linting-and-unit-tests if: needs.linting-and-unit-tests.result == 'success' steps: diff --git a/oid4vc/README.md b/oid4vc/README.md index 04ed06246..888be4f35 100644 --- a/oid4vc/README.md +++ b/oid4vc/README.md @@ -1,6 +1,6 @@ # OpenID4VCI Plugin for ACA-Py -This plugin implements [OpenID4VCI (Draft 11)][oid4vci]. The OpenID4VCI specification is in active development, as is this plugin. Consider this plugin experimental; endpoints and records may change to reflect upstream changes in the specification. +This plugin implements [OpenID4VCI 1.0][oid4vci]. This implementation follows the OpenID4VCI 1.0 final specification and is not backwards compatible with earlier drafts. ## Developer Documentation @@ -435,4 +435,4 @@ For Apple Silicon, the `DOCKER_DEFAULT_PLATFORM=linux/amd64` environment variabl - Batch Credential Issuance - We're limited to DID Methods that ACA-Py supports for issuance (more can be added by Plugin, e.g. DID Web); `did:sov`, `did:key` -[oid4vci]: https://openid.net/specs/openid-4-verifiable-credential-issuance-1_0-11.html +[oid4vci]: https://openid.net/specs/openid-4-verifiable-credential-issuance-1_0.html diff --git a/oid4vc/demo/.env.example b/oid4vc/demo/.env.example index 3154b4d25..022386386 100644 --- a/oid4vc/demo/.env.example +++ b/oid4vc/demo/.env.example @@ -8,11 +8,11 @@ # ── ACA-Py host port bindings ─────────────────────────────────────────────── # The admin APIs and OID4VC endpoints are exposed on the host for easy curl # access and for the local Playwright demo script. -# If the default ports (8021/8022) are already occupied on your machine, +# If the default ports (8121/8122) are already occupied on your machine, # override them here and also set the explicit URL vars below. -ACAPY_ISSUER_ADMIN_PORT=8021 -ACAPY_ISSUER_OID4VCI_PORT=8022 +ACAPY_ISSUER_ADMIN_PORT=8121 +ACAPY_ISSUER_OID4VCI_PORT=8122 ACAPY_VERIFIER_ADMIN_PORT=8031 ACAPY_VERIFIER_OID4VP_PORT=8032 @@ -23,17 +23,17 @@ ACAPY_VERIFIER_OID4VP_PORT=8032 # Set these to match ACAPY_ISSUER_ADMIN_PORT / ACAPY_ISSUER_OID4VCI_PORT # whenever you override the default ports above. -# ACAPY_ISSUER_ADMIN_URL=http://localhost:8021 -# ACAPY_ISSUER_OID4VCI_URL=http://localhost:8022 +# ACAPY_ISSUER_ADMIN_URL=http://localhost:8121 +# ACAPY_ISSUER_OID4VCI_URL=http://localhost:8122 # ── Walt.id wallet port ───────────────────────────────────────────────────── # The nginx proxy combines the wallet frontend and API on this port. -# Change if port 7101 is already in use on your machine. -WALLET_PORT=7101 +# Change if port 7201 is already in use on your machine. +WALLET_PORT=7201 # Explicit wallet URLs for Playwright — must match WALLET_PORT above. -# WALTID_WALLET_URL=http://localhost:7101 -# WALTID_WALLET_API_URL=http://localhost:7101 +# WALTID_WALLET_URL=http://localhost:7201 +# WALTID_WALLET_API_URL=http://localhost:7201 # ── Docker platform ───────────────────────────────────────────────────────── # Default linux/arm64 (Apple Silicon native for ACA-Py). @@ -63,9 +63,9 @@ WALLET_PORT=7101 # 1. Install zrok: https://docs.zrok.io/docs/getting-started # 2. Reserve permanent tunnel names once (lowercase alphanumeric, 4-32 chars): # -# zrok reserve public --unique-name "myissuerapi" http://localhost:8022 +# zrok reserve public --unique-name "myissuerapi" http://localhost:8122 # zrok reserve public --unique-name "myverifierapi" http://localhost:8032 -# zrok reserve public --unique-name "mydemowallet" http://localhost:7101 +# zrok reserve public --unique-name "mydemowallet" http://localhost:7201 # # 3. Activate all tunnels each session (in separate terminals): # diff --git a/oid4vc/demo/README.md b/oid4vc/demo/README.md index 4bf81ed51..12a291731 100644 --- a/oid4vc/demo/README.md +++ b/oid4vc/demo/README.md @@ -41,7 +41,7 @@ docker compose up -d ./setup.sh # 5. Open the wallet in your browser -open http://localhost:7101 +open http://localhost:7201 ``` Register a new account in the wallet and you're ready to go. @@ -52,9 +52,9 @@ Register a new account in the wallet and you're ready to go. | Service | URL | Purpose | |---|---|---| -| walt.id Web Wallet | | Holder wallet (browser) | -| ACA-Py Issuer admin | | Issue credentials | -| ACA-Py Issuer OID4VCI | | OID4VCI v1 endpoint | +| walt.id Web Wallet | | Holder wallet (browser) | +| ACA-Py Issuer admin | | Issue credentials | +| ACA-Py Issuer OID4VCI | | OID4VCI v1 endpoint | | ACA-Py Verifier admin | | Verify presentations | | ACA-Py Verifier OID4VP | | OID4VP v1 endpoint | @@ -106,9 +106,9 @@ an HTTPS endpoint — useful for testing with real mobile wallets. # https://docs.zrok.io/docs/getting-started # Reserve permanent tunnel names (one time) -zrok reserve public --unique-name "myissuerapi" http://localhost:8022 +zrok reserve public --unique-name "myissuerapi" http://localhost:8122 zrok reserve public --unique-name "myverifierapi" http://localhost:8032 -zrok reserve public --unique-name "mydemowallet" http://localhost:7101 +zrok reserve public --unique-name "mydemowallet" http://localhost:7201 # Activate tunnels (each session, in separate terminals) zrok share reserved myissuerapi @@ -140,8 +140,8 @@ Restart the stack: `docker compose up -d` and re-run `./setup.sh`. │ │ │ ┌─────────────────┐ OID4VCI v1 ┌─────────────┐ │ │ │ ACA-Py Issuer │ ◄──────────────── │ walt.id │ │ -│ │ :8021 admin │ │ wallet-api │ │ -│ │ :8022 OID4VCI │ │ :7001 │ │ +│ │ :8121 admin │ │ wallet-api │ │ +│ │ :8122 OID4VCI │ │ :7001 │ │ │ └─────────────────┘ └─────────────┘ │ │ │ │ │ ┌─────────────────┐ OID4VP v1 │ │ @@ -236,10 +236,10 @@ To issue a credential manually: ```bash # Get the credential config IDs -curl -s http://localhost:8021/oid4vci/credential-supported/list | python3 -m json.tool +curl -s http://localhost:8121/oid4vci/credential-supported/list | python3 -m json.tool # Create an offer (replace and ) -curl -s -X POST http://localhost:8021/oid4vci/exchange/create \ +curl -s -X POST http://localhost:8121/oid4vci/exchange/create \ -H "Content-Type: application/json" \ -d '{ "supported_cred_id": "", @@ -258,7 +258,7 @@ curl -s -X POST http://localhost:8021/oid4vci/exchange/create \ ``` Then paste the `credential_offer` URL into the wallet at -`http://localhost:7101`. +`http://localhost:7201`. --- diff --git a/oid4vc/demo/docker-compose.yml b/oid4vc/demo/docker-compose.yml index 949ea7c90..b30fbfd55 100644 --- a/oid4vc/demo/docker-compose.yml +++ b/oid4vc/demo/docker-compose.yml @@ -33,8 +33,8 @@ services: ACAPY_VERSION: 1.4.0 ISOMDL_BRANCH: fix/python-build-system ports: - - "${ACAPY_ISSUER_ADMIN_PORT:-8021}:8021" - - "${ACAPY_ISSUER_OID4VCI_PORT:-8022}:8022" + - "${ACAPY_ISSUER_ADMIN_PORT:-8121}:8021" + - "${ACAPY_ISSUER_OID4VCI_PORT:-8122}:8022" environment: - AGENT_ENDPOINT=http://acapy-issuer:8020 # OID4VCI_ENDPOINT is the URL embedded in credential offers. @@ -203,7 +203,7 @@ services: environment: - PORT=7101 # Must match the public-facing wallet URL so deep-links resolve correctly. - - NUXT_PUBLIC_ISSUER_CALLBACK_URL=${WALLET_PUBLIC_URL:-http://localhost:7101} + - NUXT_PUBLIC_ISSUER_CALLBACK_URL=${WALLET_PUBLIC_URL:-http://localhost:7201} healthcheck: test: ["CMD", "wget", "-q", "--spider", "http://localhost:7101"] interval: 10s @@ -230,7 +230,7 @@ services: waltid-proxy: image: nginx:alpine ports: - - "${WALLET_PORT:-7101}:80" + - "${WALLET_PORT:-7201}:80" volumes: - ./nginx.conf:/etc/nginx/nginx.conf:ro depends_on: diff --git a/oid4vc/demo/playwright/demo.spec.ts b/oid4vc/demo/playwright/demo.spec.ts index ebe6b9db0..e7dd85bff 100644 --- a/oid4vc/demo/playwright/demo.spec.ts +++ b/oid4vc/demo/playwright/demo.spec.ts @@ -200,15 +200,18 @@ test.describe('OID4VC mDOC Demo', () => { // ── Create credential offer ── const credentialSubject = { 'org.iso.18013.5.1': { - given_name: 'Alice', - family_name: 'Holder', - birth_date: '1990-06-15', - issuing_country: 'US', - issuing_authority: 'Demo DMV', - document_number: 'DL-DEMO-001', - issue_date: new Date().toISOString().split('T')[0], - expiry_date: new Date(Date.now() + 365 * 24 * 60 * 60 * 1000) - .toISOString().split('T')[0], + given_name: 'Alice', + family_name: 'Holder', + birth_date: '1990-06-15', + issuing_country: 'US', + issuing_authority: 'Demo DMV', + document_number: 'DL-DEMO-001', + issue_date: new Date().toISOString().split('T')[0], + expiry_date: new Date(Date.now() + 365 * 24 * 60 * 60 * 1000) + .toISOString().split('T')[0], + // portrait and un_distinguishing_sign are required by ISO 18013-5.1 + portrait: 'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==', + un_distinguishing_sign: 'USA', driving_privileges: [ { vehicle_category_code: 'C', issue_date: '2020-01-01', expiry_date: '2030-01-01' }, ], diff --git a/oid4vc/demo/setup.sh b/oid4vc/demo/setup.sh index 116017232..c63379a29 100755 --- a/oid4vc/demo/setup.sh +++ b/oid4vc/demo/setup.sh @@ -18,9 +18,18 @@ # WALLET_URL default http://localhost:7101 set -euo pipefail +# Load .env from the same directory as this script so port overrides are honoured. +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +if [[ -f "$SCRIPT_DIR/.env" ]]; then + set -a + # shellcheck disable=SC1091 + . "$SCRIPT_DIR/.env" + set +a +fi + ISSUER_ADMIN="${ACAPY_ISSUER_ADMIN_URL:-http://localhost:8021}" VERIFIER_ADMIN="${ACAPY_VERIFIER_ADMIN_URL:-http://localhost:8031}" -WALLET_URL="${WALLET_URL:-http://localhost:7101}" +WALLET_URL="${WALTID_WALLET_URL:-${WALLET_URL:-http://localhost:7101}}" GREEN='\033[0;32m' YELLOW='\033[1;33m' diff --git a/oid4vc/docker/Dockerfile b/oid4vc/docker/Dockerfile index 7d8b76d84..273e40da5 100644 --- a/oid4vc/docker/Dockerfile +++ b/oid4vc/docker/Dockerfile @@ -1,44 +1,116 @@ +# ============================================================================= +# Stage 1: Build isomdl-uniffi wheel (requires Rust) +# ============================================================================= +FROM python:3.12-slim-bookworm AS isomdl-build + +WORKDIR /build + +# Install build dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + git \ + build-essential \ + && rm -rf /var/lib/apt/lists/* + +# Install Rust toolchain (minimal profile to save space) +RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile minimal +ENV PATH="/root/.cargo/bin:${PATH}" + +# Clone isomdl-uniffi with shallow clone +ARG ISOMDL_BRANCH=fix/python-build-system +RUN git clone --depth 1 --branch ${ISOMDL_BRANCH} \ + https://github.com/Indicio-tech/isomdl-uniffi.git /build/isomdl-uniffi + +WORKDIR /build/isomdl-uniffi/python + +# Build wheel — limit Cargo parallelism to avoid Docker VM OOM on resource-constrained hosts +# (CARGO_BUILD_JOBS=2 cuts peak memory roughly in half vs. the default all-cores build) +RUN pip install --no-cache-dir build wheel setuptools +ENV CARGO_BUILD_JOBS=2 +RUN python setup.py bdist_wheel + +# ============================================================================= +# Stage 2: Install ACA-Py and plugin dependencies +# ============================================================================= FROM python:3.12-slim-bookworm AS base + WORKDIR /usr/src/app -# Install and configure poetry -USER root +# Install only required build/runtime dependencies (no Rust needed here) +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + jq \ + git \ + && rm -rf /var/lib/apt/lists/* -# Install and configure poetry -WORKDIR /usr/src/app -ENV POETRY_VERSION=2.1.2 -ENV POETRY_HOME=/opt/poetry -RUN apt-get update && apt-get install -y curl jq && apt-get clean -RUN curl -sSL https://install.python-poetry.org | python - +# Accept build argument for ACA-Py version +ARG ACAPY_VERSION=1.4.0 -ENV PATH="/opt/poetry/bin:$PATH" -RUN poetry config virtualenvs.in-project true +# Clone ACA-Py source with shallow clone +RUN git clone --depth 1 --branch ${ACAPY_VERSION} \ + https://github.com/openwallet-foundation/acapy.git /usr/src/acapy -# Setup project -RUN mkdir oid4vc && touch oid4vc/__init__.py -RUN mkdir jwt_vc_json && touch jwt_vc_json/__init__.py -RUN mkdir sd_jwt_vc && touch sd_jwt_vc/__init__.py -RUN mkdir mso_mdoc && touch mso_mdoc/__init__.py -COPY oid4vc/pyproject.toml oid4vc/poetry.lock oid4vc/README.md ./ -RUN poetry install --without dev --all-extras -USER $user +WORKDIR /usr/src/acapy -FROM python:3.12-bookworm +# Install ACA-Py +RUN pip install --no-cache-dir -e . +RUN pip install --no-cache-dir configargparse +# Setup plugin project structure WORKDIR /usr/src/app -COPY --from=base /usr/src/app/.venv /usr/src/app/.venv -ENV PATH="/usr/src/app/.venv/bin:$PATH" -RUN apt-get update && apt-get install -y curl jq && apt-get clean + +# Copy the entire plugin source tree +COPY oid4vc/pyproject.toml ./ +COPY oid4vc/README.md ./ +COPY oid4vc/oid4vc/ oid4vc/ COPY oid4vc/jwt_vc_json/ jwt_vc_json/ COPY oid4vc/mso_mdoc/ mso_mdoc/ COPY oid4vc/sd_jwt_vc/ sd_jwt_vc/ -COPY oid4vc/oid4vc/ oid4vc/ COPY status_list/ status_list/ RUN pip install -e ./status_list + +# Install isomdl-uniffi from builder stage +COPY --from=isomdl-build /build/isomdl-uniffi/python/dist/*.whl /tmp/ +RUN pip install --no-cache-dir /tmp/*.whl && rm -rf /tmp/*.whl + +# Install the plugin with extras for mso_mdoc and sd_jwt_vc +RUN pip install --no-cache-dir -e ".[mso_mdoc,sd_jwt_vc]" + +# ============================================================================= +# Stage 3: Final slim runtime image +# ============================================================================= +FROM python:3.12-slim-bookworm + +WORKDIR /usr/src/app + +# Copy the complete environment from base stage +COPY --from=base /usr/src/acapy /usr/src/acapy +COPY --from=base /usr/src/app /usr/src/app + +# Install only runtime dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + jq \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Copy the entire Python environment from base stage, including site-packages +COPY --from=base /usr/local/lib/python3.12/site-packages /usr/local/lib/python3.12/site-packages +COPY --from=base /usr/local/bin /usr/local/bin + +# Copy dev config RUN mkdir -p /usr/src/app/docker COPY oid4vc/docker/dev.yml /usr/src/app/docker/dev.yml COPY oid4vc/docker/dev-verifier.yml /usr/src/app/docker/dev-verifier.yml -COPY oid4vc/docker/default.yml /usr/src/app/default.yml +COPY oid4vc/docker/default.yml /usr/src/app/docker/default.yml + +# Expose ports +EXPOSE 8030 8031 8032 + +# Add health check +HEALTHCHECK --interval=10s --timeout=5s --retries=12 --start-period=60s \ + CMD curl -f http://localhost:${ACAPY_ADMIN_PORT:-8021}/status/ready || exit 1 -ENTRYPOINT ["/bin/bash", "-c", "aca-py \"$@\"", "--"] -CMD ["start", "--arg-file", "default.yml"] +# Set working directory and run ACA-Py +WORKDIR /usr/src/acapy +CMD ["python", "-m", "acapy_agent", "start", "--arg-file", "/usr/src/app/docker/dev.yml"] diff --git a/oid4vc/docker/dev-verifier.yml b/oid4vc/docker/dev-verifier.yml index ef8aeacd2..4678295ff 100644 --- a/oid4vc/docker/dev-verifier.yml +++ b/oid4vc/docker/dev-verifier.yml @@ -22,13 +22,11 @@ plugin: - sd_jwt_vc - mso_mdoc -# OID4VC plugin configuration - Use different ports for OID4VCI and OID4VP servers +# OID4VC plugin configuration - both OID4VCI and OID4VP routes served on the same port plugin-config-value: - oid4vci.host=0.0.0.0 - - oid4vci.port=8033 - - oid4vci.endpoint=${OID4VCI_ENDPOINT:-http://localhost:8033} - - oid4vp.host=0.0.0.0 - - oid4vp.port=8032 + - oid4vci.port=8032 + - oid4vci.endpoint=${OID4VCI_ENDPOINT:-http://localhost:8032} - oid4vp.endpoint=${OID4VP_ENDPOINT:-http://localhost:8032} # Ledger configuration - use no-ledger for simple development diff --git a/oid4vc/integration/tests/conftest.py b/oid4vc/integration/tests/conftest.py index d9440cbdb..8a7657e0d 100644 --- a/oid4vc/integration/tests/conftest.py +++ b/oid4vc/integration/tests/conftest.py @@ -47,11 +47,14 @@ async def credo_client(): """HTTP client for Credo agent service.""" async with httpx.AsyncClient(base_url=CREDO_AGENT_URL, timeout=30.0) as client: - # Wait for service to be ready - for _ in range(5): # Reduced since services should already be ready - response = await client.get("/health") - if response.status_code == 200: - break + # Wait for service to be ready (30 retries to handle brief unavailability) + for _ in range(30): + try: + response = await client.get("/health") + if response.status_code == 200: + break + except httpx.ConnectError: + pass await asyncio.sleep(1) else: raise RuntimeError("Credo agent service not available") diff --git a/oid4vc/integration/tests/mdoc/test_pki.py b/oid4vc/integration/tests/mdoc/test_pki.py index d739f9393..80401aedc 100644 --- a/oid4vc/integration/tests/mdoc/test_pki.py +++ b/oid4vc/integration/tests/mdoc/test_pki.py @@ -41,9 +41,9 @@ async def test_mdoc_pki_trust_chain( doctype = "org.iso.18013.5.1.mDL" namespaces = { "org.iso.18013.5.1": { - "given_name": cbor2.dumps("Alice"), - "family_name": cbor2.dumps("Smith"), - "birth_date": cbor2.dumps("1990-01-01"), + "given_name": json.dumps("Alice"), + "family_name": json.dumps("Smith"), + "birth_date": json.dumps("1990-01-01"), } } diff --git a/oid4vc/mso_mdoc/README.md b/oid4vc/mso_mdoc/README.md index 02d63e7d7..bd6a02136 100644 --- a/oid4vc/mso_mdoc/README.md +++ b/oid4vc/mso_mdoc/README.md @@ -1,9 +1,218 @@ -# MSO MDOC Credential Format Plugin +# MSO MDOC Credential Format -## Description +Implementation of ISO/IEC 18013-5:2021 compliant mobile document (mDoc) credential format for ACA-Py. -This plugin provides `mso_mdoc` credential support for the OID4VCI plugin. It acts as a module, dynamically loaded by the OID4VCI plugin, takes input parameters, and constructs and signs `mso_mdoc` credentials. +## Overview -## Configuration +This module provides support for issuing and verifying mobile documents (mDocs) as defined in ISO 18013-5, including mobile driver's licenses (mDL) and other identity credentials. The implementation uses the `isomdl-uniffi` library for core mDoc operations and integrates with ACA-Py's credential issuance framework. -No configuration is required for this plugin. +## Features + +- **ISO 18013-5 Compliance**: Full compliance with the international standard for mobile documents +- **CBOR Encoding**: Efficient binary encoding using CBOR (RFC 8949) +- **COSE Signing**: Cryptographic protection using COSE (RFC 8152/9052) +- **Selective Disclosure**: Privacy-preserving attribute disclosure +- **OpenID4VCI Integration**: Seamless integration with OpenID for Verifiable Credential Issuance + +## Protocol Support + +- ISO/IEC 18013-5:2021 - Mobile driving licence (mDL) application +- RFC 8152 - CBOR Object Signing and Encryption (COSE) +- RFC 9052 - CBOR Object Signing and Encryption (COSE): Structures and Process +- RFC 8949 - Concise Binary Object Representation (CBOR) +- OpenID4VCI 1.0 - Verifiable Credential Issuance Protocol + +## Installation + +The mso_mdoc module is included as part of the oid4vc plugin. Dependencies are managed through UV: + +```toml +dependencies = [ + "cbor2>=5.4.3", + "cwt>=1.6.0", + "pycose>=1.0.0", + "isomdl-uniffi @ git+https://github.com/Indicio-tech/isomdl-uniffi.git@feat/x509#subdirectory=python", +] +``` + +## Usage + +### Credential Issuance + +The module automatically registers the `MsoMdocCredProcessor` with the credential processor registry: + +```python +from mso_mdoc.cred_processor import MsoMdocCredProcessor + +# The processor handles mso_mdoc format credentials +processor = MsoMdocCredProcessor() +``` + +### Supported Document Types + +Common document type identifiers: +- `org.iso.18013.5.1.mDL` - Mobile driver's license +- Custom organizational document types following the reverse domain notation + +### Configuration + +Credentials are configured through the OpenID4VCI credential configuration: + +```json +{ + "format": "mso_mdoc", + "doctype": "org.iso.18013.5.1.mDL", + "cryptographic_binding_methods_supported": ["jwk"], + "credential_signing_alg_values_supported": ["ES256"] +} +``` + +## Architecture + +### Core Components + +- **`cred_processor.py`**: Main credential processor implementing the `Issuer` interface +- **`storage.py`**: Persistent storage for keys and certificates +- **`key_generation.py`**: Cryptographic key generation utilities +- **`mdoc/issuer.py`**: mDoc issuance operations +- **`mdoc/verifier.py`**: mDoc verification operations + +### Key Management + +The module supports: +- Automatic EC P-256 key generation +- Persistent key storage with metadata +- Certificate generation and management +- Verification method resolution + +## API Endpoints + +The module provides REST API endpoints for mDoc operations: + +### Sign mDoc +``` +POST /oid4vc/mdoc/sign +``` + +Request body: +```json +{ + "payload": { + "doctype": "org.iso.18013.5.1.mDL", + "claims": { + "org.iso.18013.5.1": { + "family_name": "Doe", + "given_name": "John", + "birth_date": "1990-01-01", + "age_over_18": true + } + } + }, + "headers": { + "alg": "ES256" + }, + "verificationMethod": "did:key:z6Mkn6z3Eg2mrgQmripNPGDybZYYojwZw1VPjRkCzbNV7JfN#0" +} +``` + +### Verify mDoc +``` +POST /oid4vc/mdoc/verify +``` + +Request body: +```json +{ + "mDoc": "", + "nonce": "optional-nonce" +} +``` + +## Testing + +Comprehensive test coverage including: +- Unit tests for all components +- Integration tests with real mDoc operations +- Real functional tests with actual cryptographic operations +- Compliance tests against ISO 18013-5 requirements + +Run tests: +```bash +cd oid4vc +uv run pytest mso_mdoc/tests/ -v +``` + +Test categories: +- **Unit Tests**: Individual component testing +- **Integration Tests**: Cross-component functionality +- **Real Tests**: Actual mDoc operations with isomdl-uniffi +- **Storage Tests**: Persistent storage operations +- **Security Tests**: Cryptographic validation + +## Security Considerations + +- All cryptographic operations use industry-standard libraries +- Keys are generated using secure random sources (P-256 ECDSA) +- Private keys are stored securely in ACA-Py's encrypted wallet +- No hardcoded credentials or keys +- Full compliance with ISO 18013-5 security requirements +- COSE signing for tamper detection + +## Troubleshooting + +### Common Issues + +1. **Import Errors**: Ensure `isomdl-uniffi` is properly installed +2. **Key Generation Failures**: Check that the wallet is properly initialized +3. **CBOR Encoding Errors**: Verify data types match ISO 18013-5 requirements +4. **Signature Verification Failures**: Ensure proper key material and algorithm support + +### Debug Mode + +Enable debug logging for detailed operation information: + +```python +import logging + +logging.getLogger("mso_mdoc").setLevel(logging.DEBUG) +``` + +## Contributing + +When contributing to this module: + +1. **Ensure ISO 18013-5 compliance** - All changes must maintain standard compliance +2. **Add comprehensive tests** - Both unit and integration tests for new features +3. **Update documentation** - Keep API documentation current +4. **Run security scans** - Use `bandit` to check for security issues +5. **Format code** - Use `black` and `isort` for consistent formatting +6. **Type hints** - Maintain complete type annotations + +### Development Setup + +```bash +# Install development dependencies +uv sync --dev + +# Run tests +cd oid4vc +uv run pytest mso_mdoc/tests/ + +# Run security scan +uv run bandit -r mso_mdoc/ -x "*/tests/*" + +# Format code +uv run black mso_mdoc/ +uv run isort mso_mdoc/ +``` + +## License + +This module is part of the Aries ACA-Py plugins project and follows the same licensing terms. + +## References + +- [ISO/IEC 18013-5:2021](https://www.iso.org/standard/69084.html) - Mobile driving licence (mDL) application +- [OpenID for Verifiable Credential Issuance](https://openid.net/specs/openid-4-verifiable-credential-issuance-1_0.html) +- [RFC 8152 - CBOR Object Signing and Encryption (COSE)](https://tools.ietf.org/html/rfc8152) +- [RFC 8949 - Concise Binary Object Representation (CBOR)](https://tools.ietf.org/html/rfc8949) diff --git a/oid4vc/mso_mdoc/__init__.py b/oid4vc/mso_mdoc/__init__.py index 554013a7e..a34ee895c 100644 --- a/oid4vc/mso_mdoc/__init__.py +++ b/oid4vc/mso_mdoc/__init__.py @@ -1,31 +1,64 @@ -"""MSO_MDOC Crendential Handler Plugin.""" +"""MSO_MDOC Credential Handler Plugin.""" -from importlib.util import find_spec +import logging -from acapy_agent.admin.base_server import BaseAdminServer from acapy_agent.config.injection_context import InjectionContext +from acapy_agent.core.event_bus import EventBus +from acapy_agent.core.profile import Profile +from acapy_agent.core.util import STARTUP_EVENT_PATTERN from mso_mdoc.cred_processor import MsoMdocCredProcessor +from mso_mdoc.storage import MdocStorageManager from oid4vc.cred_processor import CredProcessors -from . import routes +from . import routes as routes -cwt = find_spec("cwt") -pycose = find_spec("pycose") -cbor2 = find_spec("cbor2") -cbor_diag = find_spec("cbor_diag") -if not all((cwt, pycose, cbor2, cbor_diag)): - raise ImportError("`mso_mdoc` extra required") +LOGGER = logging.getLogger(__name__) + + +async def on_startup(profile: Profile, event: object): + """Handle startup event to initialize profile-dependent resources. + + Trust anchors are always wallet-scoped; a fresh WalletTrustStore is + constructed per-request in verify_credential / verify_presentation so + each tenant's Askar partition is queried automatically. + """ + LOGGER.info("MSO_MDOC plugin startup - initializing profile-dependent resources") + + # Initialize storage and generate default keys/certs if needed + storage_manager = MdocStorageManager(profile) + + # Use a session for storage operations + async with profile.session() as session: + # Check if default keys exist + default_key = await storage_manager.get_default_signing_key(session) + if not default_key: + LOGGER.warning( + "WARNING: No mDoc signing key found. mDoc credential issuance " + "will fail until a key is provisioned. Use the admin API " + "POST /mso_mdoc/keys/generate to provision a signing key." + ) + else: + LOGGER.info( + "Using existing default mDoc key: %s", + default_key["key_id"], + ) async def setup(context: InjectionContext): """Setup the plugin.""" - processors = context.inject_or(CredProcessors) - if not processors: - processors = CredProcessors() - context.injector.bind_instance(CredProcessors, processors) - mso_mdoc = MsoMdocCredProcessor() - processors.register_issuer("mso_mdoc", mso_mdoc) - - admin_server = context.inject_or(BaseAdminServer) - if admin_server: - await routes.register(admin_server.app) + LOGGER.info("Setting up MSO_MDOC plugin") + + # Trust anchors are always wallet-scoped. A fresh WalletTrustStore is + # constructed per-request inside verify_credential / verify_presentation + # so each tenant's Askar partition is used automatically. + # Register credential processor + processors = context.inject(CredProcessors) + _mso_mdoc_processor = MsoMdocCredProcessor() + processors.register_issuer("mso_mdoc", _mso_mdoc_processor) + processors.register_cred_verifier("mso_mdoc", _mso_mdoc_processor) + processors.register_pres_verifier("mso_mdoc", _mso_mdoc_processor) + + # Register startup event handler for storage initialization + event_bus = context.inject(EventBus) + event_bus.subscribe(STARTUP_EVENT_PATTERN, on_startup) + LOGGER.info("MSO_MDOC plugin registered startup handler") diff --git a/oid4vc/mso_mdoc/cred_processor.py b/oid4vc/mso_mdoc/cred_processor.py index a53eaece8..28b61158b 100644 --- a/oid4vc/mso_mdoc/cred_processor.py +++ b/oid4vc/mso_mdoc/cred_processor.py @@ -1,25 +1,359 @@ -"""Issue a mso_mdoc credential.""" +"""mso_mdoc credential processor. +Glues together the signing-key resolution, payload preparation, and isomdl +binding layers to implement ISO/IEC 18013-5:2021 compliant mDoc issuance and +verification inside the OID4VCI plugin framework. + +Public API re-exported from sub-modules for backward compatibility: + +- ``check_certificate_not_expired`` — from :mod:`.signing_key` +- ``resolve_signing_key_for_credential`` — from :mod:`.signing_key` +""" + +import base64 import json import logging +import os import re -from typing import Any +from typing import Any, Dict, Optional from acapy_agent.admin.request_context import AdminRequestContext +from acapy_agent.core.profile import Profile, ProfileSession -from oid4vc.cred_processor import CredProcessorError, Issuer +from oid4vc.cred_processor import CredProcessorError, CredVerifier, Issuer, PresVerifier from oid4vc.models.exchange import OID4VCIExchangeRecord +from oid4vc.models.presentation import OID4VPPresentation from oid4vc.models.supported_cred import SupportedCredential from oid4vc.pop_result import PopResult -from .mdoc import mso_mdoc_sign +from .key_generation import pem_from_jwk, pem_to_jwk +from .mdoc.issuer import isomdl_mdoc_sign +from .mdoc.verifier import MsoMdocCredVerifier, MsoMdocPresVerifier, WalletTrustStore +from .payload import normalize_mdoc_result, prepare_mdoc_payload +from .signing_key import ( + check_certificate_not_expired, + resolve_signing_key_for_credential, +) +from .storage import MdocStorageManager + +# Re-export so existing ``from .cred_processor import X`` and +# ``patch("mso_mdoc.cred_processor.X")`` usages continue to work. +__all__ = [ + "MsoMdocCredProcessor", + "check_certificate_not_expired", + "resolve_signing_key_for_credential", +] LOGGER = logging.getLogger(__name__) -class MsoMdocCredProcessor(Issuer): +class MsoMdocCredProcessor(Issuer, CredVerifier, PresVerifier): """Credential processor class for mso_mdoc credential format.""" + def format_data_is_top_level(self) -> bool: + """mso_mdoc format_data (doctype, claims, etc.) belongs at top level. + + Per OID4VCI spec Appendix E, mso_mdoc credential configurations must + have ``doctype`` and other format fields at the top level of the + credential configuration object, NOT inside ``credential_definition``. + """ + return True + + # COSE algorithm name → integer identifier mapping (RFC 8152 / IANA COSE registry) + _COSE_ALG: dict = {"ES256": -7, "ES384": -35, "ES512": -36, "ES256K": -47} + + def transform_issuer_metadata(self, metadata: dict) -> None: + """Convert mso_mdoc metadata to OID4VCI 1.0 spec-compliant form. + + Performs two transformations required by OID4VCI 1.0: + + 1. ``credential_signing_alg_values_supported`` — converts string + algorithm names to COSE integer identifiers (e.g. "ES256" → -7) + per OID4VCI 1.0 Appendix A.2.2 and ISO 18013-5. + + 2. ``claims`` — converts the stored namespace-keyed dict + ``{namespace: {claim_name: descriptor}}`` to the spec-compliant + flat array ``[{path: [namespace, claim_name], ...}]`` and nests + it inside ``credential_metadata`` per OID4VCI 1.0 Appendix A.2.2, + Section 12.2.4, and Appendix B.2. + + 3. ``display`` — moves the credential display array into + ``credential_metadata`` per OID4VCI 1.0 Section 12.2.4. + """ + algs = metadata.get("credential_signing_alg_values_supported") + if algs: + metadata["credential_signing_alg_values_supported"] = [ + self._COSE_ALG.get(a, a) if isinstance(a, str) else a for a in algs + ] + + claims = metadata.pop("claims", None) + if isinstance(claims, dict): + claims_list = [] + for namespace, claim_map in claims.items(): + if isinstance(claim_map, dict): + for claim_name, descriptor in claim_map.items(): + entry: dict = {"path": [namespace, claim_name]} + if isinstance(descriptor, dict): + if "mandatory" in descriptor: + entry["mandatory"] = descriptor["mandatory"] + if "display" in descriptor: + entry["display"] = descriptor["display"] + claims_list.append(entry) + credential_metadata = metadata.setdefault("credential_metadata", {}) + credential_metadata["claims"] = claims_list + elif isinstance(claims, list): + # Already converted — just ensure it's nested in credential_metadata + credential_metadata = metadata.setdefault("credential_metadata", {}) + credential_metadata["claims"] = claims + + # Move display into credential_metadata per OID4VCI 1.0 Section 12.2.4 + display = metadata.pop("display", None) + if display is not None: + credential_metadata = metadata.setdefault("credential_metadata", {}) + credential_metadata["display"] = display + + def __init__(self, trust_store: Optional[Any] = None): + """Initialize the processor.""" + self.trust_store = trust_store + + def _validate_and_get_doctype( + self, body: Dict[str, Any], supported: SupportedCredential + ) -> str: + """Validate and extract doctype from request and configuration. + + Validates the document type identifier according to ISO 18013-5 § 8.3.2.1.2.1 + requirements and OpenID4VCI 1.0 § E.1.1 specification. + + Args: + body: Request body containing credential issuance parameters + supported: Supported credential configuration with format data + + Returns: + Validated doctype string (e.g., "org.iso.18013.5.1.mDL") + + Raises: + CredProcessorError: If doctype validation fails with detailed context + """ + doctype_from_request = body.get("doctype") + doctype_from_config = ( + supported.format_data.get("doctype") if supported.format_data else None + ) + + if not doctype_from_request and not doctype_from_config: + raise CredProcessorError( + "Document type (doctype) is required for mso_mdoc format. " + "Provide doctype in request body or credential configuration. " + "See OpenID4VCI 1.0 § E.1.1 and ISO 18013-5 § 8.3.2.1.2.1" + ) + + # Use doctype from request if provided, otherwise from configuration + doctype = doctype_from_request or doctype_from_config + + if doctype_from_request and doctype_from_config: + if doctype_from_request != doctype_from_config: + raise CredProcessorError( + f"Document type mismatch: request contains '{doctype_from_request}' " + f"but credential configuration specifies '{doctype_from_config}'. " + "Ensure consistency between request and credential configuration." + ) + + # Validate doctype format (basic ISO format check) + if not doctype or not isinstance(doctype, str): + raise CredProcessorError( + "Invalid doctype format: expected non-empty string, " + f"got {type(doctype).__name__}" + ) + + if not doctype.startswith("org.iso."): + LOGGER.warning( + "Document type '%s' does not follow ISO format convention (org.iso.*)", + doctype, + ) + + return doctype + + def _extract_device_key( + self, pop: PopResult, ex_record: OID4VCIExchangeRecord + ) -> Optional[str]: + """Extract device authentication key from proof of possession or exchange record. + + Extracts and validates the device key for holder binding according to + ISO 18013-5 § 9.1.3.4 device authentication requirements and + OpenID4VCI proof of possession mechanisms. + + Args: + pop: Proof of possession result containing holder key information + ex_record: Exchange record with credential issuance context + + Returns: + Serialized device key string (JWK JSON or key identifier), + or None if unavailable + + Raises: + CredProcessorError: If device key format is invalid or unsupported + """ + # Priority order: holder JWK > holder key ID > verification method from record + device_candidate = ( + pop.holder_jwk or pop.holder_kid or ex_record.verification_method + ) + + if isinstance(device_candidate, dict): + # M-4: strip private key material before serialising. + # The device key embedded in the mDoc MSO must contain ONLY public + # parameters; passing 'd' to the Rust isomdl library would leak + # the holder's private key into the issued credential. + _PUBLIC_JWK_FIELDS = frozenset(("kty", "crv", "x", "y", "n", "e")) + public_only = { + k: v for k, v in device_candidate.items() if k in _PUBLIC_JWK_FIELDS + } + return json.dumps(public_only) + elif isinstance(device_candidate, str): + # If a DID with fragment, prefer fragment (key id); otherwise raw string + m = re.match(r"did:(.+?):(.+?)(?:#(.*))?$", device_candidate) + if m: + method = m.group(1) + identifier = m.group(2) + fragment = m.group(3) + + if method == "jwk": + # did:jwk encodes the holder's public JWK as a base64url + # value in the DID identifier itself (i.e. between + # "did:jwk:" and "#0"). ACA-Py uses this method natively + # when a wallet generates ephemeral keys. + # + # Without special handling the generic DID regex returns + # only the fragment "0", and json.loads("0") silently + # produces the integer 0 — which the Rust isomdl library + # then receives as the holder key, causing an opaque + # failure with no hint that the root cause is a + # mis-parsed DID method. + try: + # Base64url may be missing padding — add it back. + padding = "=" * (-len(identifier) % 4) + jwk_bytes = base64.urlsafe_b64decode(identifier + padding) + return jwk_bytes.decode("utf-8") + except Exception as exc: + raise CredProcessorError( + f"Invalid did:jwk identifier — could not decode " + f"embedded JWK from '{device_candidate}': {exc}" + ) from exc + + return fragment if fragment else device_candidate + else: + return device_candidate + + return None + + def _build_headers( + self, doctype: str, device_key_str: Optional[str] + ) -> Dict[str, Any]: + """Build mso_mdoc headers according to OID4VCI specification.""" + headers = {"doctype": doctype} + if device_key_str: + headers["deviceKey"] = device_key_str + return headers + + async def _resolve_signing_key( + self, + context: AdminRequestContext, + session: Any, + verification_method: Optional[str], + ) -> Dict[str, Any]: + """Resolve the signing key for credential issuance.""" + storage_manager = MdocStorageManager(context.profile) + + # Check for environment variables for static key + key_path = os.getenv("OID4VC_MDOC_SIGNING_KEY_PATH") + cert_path = os.getenv("OID4VC_MDOC_SIGNING_CERT_PATH") + + if ( + key_path + and cert_path + and os.path.exists(key_path) + and os.path.exists(cert_path) + ): + static_key_id = "static-signing-key" + # Use the same API as the rest of the signing-key path. + existing_key = await storage_manager.get_signing_key( + session, identifier=static_key_id + ) + if not existing_key: + LOGGER.info("Loading static signing key from %s", key_path) + try: + with open(key_path, "r") as f: + private_key_pem = f.read() + with open(cert_path, "r") as f: + certificate_pem = f.read() + + # Derive JWK from PEM + jwk = pem_to_jwk(private_key_pem) + + await storage_manager.store_key( + session, + key_id=static_key_id, + jwk=jwk, + purpose="signing", + metadata={"static": True}, + ) + + cert_id = f"mdoc-cert-{static_key_id}" + await storage_manager.store_certificate( + session, + cert_id=cert_id, + certificate_pem=certificate_pem, + key_id=static_key_id, + metadata={"static": True, "purpose": "mdoc_issuing"}, + ) + + # Only set as default when no key has been configured yet. + # Without this guard the env-var key would silently overwrite + # whatever key the operator registered via the key management API. + existing_default = await storage_manager.get_config( + session, "default_signing_key" + ) + if not existing_default: + await storage_manager.store_config( + session, "default_signing_key", {"key_id": static_key_id} + ) + + except CredProcessorError: + raise + except Exception as e: + raise CredProcessorError( + f"Failed to load static signing key from {key_path!r}: {e}" + ) from e + + if verification_method: + # Use verification method to resolve signing key + if "#" in verification_method: + _, key_id = verification_method.split("#", 1) + else: + key_id = verification_method + + key_data = await storage_manager.get_signing_key( + session, + identifier=key_id, + verification_method=verification_method, + ) + + if key_data: + LOGGER.info( + "Using signing key from verification method: %s", + verification_method, + ) + return key_data + + # Fall back to default signing key from storage + key_data = await storage_manager.get_default_signing_key(session) + if key_data: + LOGGER.info("Using default signing key") + return key_data + + raise CredProcessorError( + "No default signing key is configured. " + "Register a signing key via the mso_mdoc key management API before issuing." + ) + async def issue( self, body: Any, @@ -28,57 +362,202 @@ async def issue( pop: PopResult, context: AdminRequestContext, ): - """Return signed credential in COBR format.""" - assert supported.format_data - if body.get("doctype") != supported.format_data.get("doctype"): - raise CredProcessorError("Requested doctype does not match offer.") + """Return signed credential in CBOR format. + + Issues an ISO 18013-5 compliant mDoc credential using the mobile + security object (MSO) format. The credential is CBOR-encoded and + follows the issuerSigned structure defined in ISO 18013-5. + + Protocol Compliance: + - OpenID4VCI 1.0 § 7.3.1: Credential Response for mso_mdoc format + - OpenID4VCI 1.0 Appendix E.1.1: mso_mdoc Credential format identifier + - ISO 18013-5 § 8.3: Mobile document structure + - ISO 18013-5 § 9.1.2: IssuerSigned data structure + - ISO 18013-5 § 9.1.3: Mobile security object (MSO) + - RFC 8949: CBOR encoding for binary efficiency + - RFC 8152: COSE signing for cryptographic protection + + OpenID4VCI 1.0 § E.1.1: mso_mdoc Format + https://openid.net/specs/openid-4-verifiable-credential-issuance-1_0.html#appendix-E.1.1 + """ + if not supported.format_data: + raise CredProcessorError("Supported credential must have format_data") try: - headers = { - "doctype": supported.format_data.get("doctype"), - "deviceKey": re.sub( - "did:(.+?):(.+?)#(.*)", - "\\2", - json.dumps(pop.holder_jwk or pop.holder_kid), - ), - } - did = None + # Validate and extract doctype + doctype = self._validate_and_get_doctype(body, supported) + + # Extract device key for holder binding + device_key_str = self._extract_device_key(pop, ex_record) + + # Build mso_mdoc headers + headers = self._build_headers(doctype, device_key_str) + + # Get payload and verification method verification_method = ex_record.verification_method - payload = ex_record.credential_subject - mso_mdoc = await mso_mdoc_sign( - context.profile, headers, payload, did, verification_method + payload = prepare_mdoc_payload(ex_record.credential_subject, doctype) + + # Resolve signing key + async with context.profile.session() as session: + key_data = await self._resolve_signing_key( + context, session, verification_method + ) + key_id = key_data.get("key_id") + # C-1: private_key_pem is no longer persisted in metadata. + # Reconstruct it on-demand from the JWK 'd' parameter. + private_key_pem = key_data.get("metadata", {}).get("private_key_pem") + if not private_key_pem: + signing_jwk = key_data.get("jwk", {}) + if signing_jwk.get("d"): + private_key_pem = pem_from_jwk(signing_jwk) + + # Fetch certificate + storage_manager = MdocStorageManager(context.profile) + certificate_pem = await storage_manager.get_certificate_for_key( + session, key_id + ) + + if not certificate_pem: + raise CredProcessorError( + f"Certificate not found for key {key_id!r}. " + "Keys must be registered with a certificate before use." + ) + + if not private_key_pem: + raise CredProcessorError("Private key PEM not found for signing key") + + if not certificate_pem: + raise CredProcessorError("Certificate PEM not found for signing key") + + # Validity-period guard: reject expired or not-yet-valid certificates + # before passing them to the Rust signing library. + check_certificate_not_expired(certificate_pem) + + if not device_key_str and not pop.holder_jwk: + raise CredProcessorError( + "No device key available: provide holder_jwk, " + "holder_kid, or verification_method" + ) + + # Clean up JWK for isomdl (remove extra fields like kid, alg, use) + # isomdl rejects alg and use fields in the holder JWK + if pop.holder_jwk and isinstance(pop.holder_jwk, dict): + if pop.holder_jwk.get("kty") != "EC": + raise CredProcessorError( + "mso_mdoc requires an EC holder key, " + f"got kty={pop.holder_jwk.get('kty')}" + ) + holder_jwk_clean = { + k: v + for k, v in pop.holder_jwk.items() + if k in ["kty", "crv", "x", "y"] + } + else: + # Fallback: build a minimal JWK placeholder from device_key_str + # The Rust library needs a JWK dict for the holder key binding + holder_jwk_clean = None + + # Issue mDoc using isomdl-uniffi library with ISO 18013-5 compliance + LOGGER.debug( + "Issuing mso_mdoc with holder_jwk=%s headers=%s payload_keys=%s", + holder_jwk_clean, + headers, + (list(payload.keys()) if isinstance(payload, dict) else type(payload)), + ) + # Use cleaned JWK if available, otherwise fall back to + # the device key extracted from holder_kid / verification_method. + # isomdl_mdoc_sign expects a dict-like JWK. + signing_holder_key = holder_jwk_clean + if signing_holder_key is None and device_key_str: + try: + signing_holder_key = json.loads(device_key_str) + except (json.JSONDecodeError, TypeError): + # device_key_str is a key-id, not a JWK — + # cannot bind holder key without a JWK. + raise CredProcessorError( + "Holder key identifier provided but a full " + "EC JWK is required for mso_mdoc device " + "key binding. Provide holder_jwk in the " + "proof of possession." + ) + + if signing_holder_key is None: + raise CredProcessorError( + "Unable to resolve a holder JWK for device key binding." + ) + + mso_mdoc = isomdl_mdoc_sign( + signing_holder_key, headers, payload, certificate_pem, private_key_pem ) - mso_mdoc = mso_mdoc[2:-1] if mso_mdoc.startswith("b'") else None + + # Normalize mDoc result handling for robust string/bytes processing + mso_mdoc = normalize_mdoc_result(mso_mdoc) + + LOGGER.info( + "Issued mso_mdoc credential with doctype: %s, format: %s", + doctype, + supported.format, + ) + except Exception as ex: - raise CredProcessorError("Failed to issue credential") from ex + # Log full exception for debugging before raising a generic error + LOGGER.exception("mso_mdoc issuance error: %s", ex) + # Surface the underlying exception text in the CredProcessorError + raise CredProcessorError(f"Failed to issue mso_mdoc credential: {ex}") from ex return mso_mdoc + def _prepare_payload( + self, payload: Dict[str, Any], doctype: str = None + ) -> Dict[str, Any]: + return prepare_mdoc_payload(payload, doctype) + + def _normalize_mdoc_result(self, result: Any) -> str: + return normalize_mdoc_result(result) + def validate_credential_subject(self, supported: SupportedCredential, subject: dict): """Validate the credential subject.""" - pass + if not subject: + raise CredProcessorError("Credential subject cannot be empty") + + if not isinstance(subject, dict): + raise CredProcessorError("Credential subject must be a dictionary") + + return True def validate_supported_credential(self, supported: SupportedCredential): """Validate a supported MSO MDOC Credential.""" - pass + if not supported.format_data: + raise CredProcessorError("format_data is required for mso_mdoc format") - def format_data_is_top_level(self) -> bool: - """mso_mdoc format_data fields belong at the top level of credential config.""" - return True + # Validate doctype presence and format + self._validate_and_get_doctype({}, supported) - # COSE algorithm name → integer identifier mapping (RFC 8152 / IANA COSE registry) - _COSE_ALG: dict = {"ES256": -7, "ES384": -35, "ES512": -36, "ES256K": -47} + return True - def transform_issuer_metadata(self, metadata: dict) -> None: - """Convert mso_mdoc algorithm names to COSE integer identifiers. + async def verify_credential( + self, + profile: Profile, + credential: Any, + ): + """Verify an mso_mdoc credential.""" + # Always build a per-request WalletTrustStore from the calling profile + # so each tenant's Askar partition is queried (wallet-scoped registry). + trust_store = WalletTrustStore(profile) + verifier = MsoMdocCredVerifier(trust_store=trust_store) + return await verifier.verify_credential(profile, credential) - Per OID4VCI spec Appendix E and ISO 18013-5, ``credential_signing_alg_ - values_supported`` for mso_mdoc must contain COSE algorithm integer - identifiers (e.g. -7 for ES256), NOT string names. This method converts - any string entries in-place. - """ - algs = metadata.get("credential_signing_alg_values_supported") - if algs: - metadata["credential_signing_alg_values_supported"] = [ - self._COSE_ALG.get(a, a) if isinstance(a, str) else a for a in algs - ] + async def verify_presentation( + self, + profile: Profile, + presentation: Any, + presentation_record: "OID4VPPresentation", + ): + """Verify an mso_mdoc presentation.""" + # Always build a per-request WalletTrustStore from the calling profile + # so each tenant's Askar partition is queried (wallet-scoped registry). + trust_store = WalletTrustStore(profile) + verifier = MsoMdocPresVerifier(trust_store=trust_store) + return await verifier.verify_presentation( + profile, presentation, presentation_record + ) diff --git a/oid4vc/mso_mdoc/key_generation.py b/oid4vc/mso_mdoc/key_generation.py new file mode 100644 index 000000000..3209ed9e1 --- /dev/null +++ b/oid4vc/mso_mdoc/key_generation.py @@ -0,0 +1,481 @@ +"""Key and certificate generation utilities for mso_mdoc. + +This module provides cryptographic key generation functions that comply with +ISO 18013-5 requirements for mDoc issuance and verification. All generated +keys use ECDSA with P-256 curve as specified in ISO 18013-5 § 9.1.3.5. + +Key Protocol Compliance: +- ISO/IEC 18013-5:2021 § 9.1.3.5 - Cryptographic algorithms for mDoc +- RFC 7517 - JSON Web Key (JWK) format +- RFC 7518 § 3.4 - ES256 signature algorithm +- RFC 8152 - CBOR Object Signing and Encryption (COSE) +""" + +import base64 +import logging +import os +import uuid +from datetime import UTC, datetime, timedelta +from typing import Any, Dict, Optional, Tuple + +from cryptography import x509 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.x509.oid import NameOID + +LOGGER = logging.getLogger(__name__) + + +def int_to_base64url_uint(val: int, length: int = 32) -> str: + """Convert integer to base64url unsigned integer. + + Converts an elliptic curve coordinate integer to base64url encoding + as required by RFC 7517 for EC JWK format. + + Args: + val: Integer value to encode + length: Byte length for the integer (default 32 for P-256) + + Returns: + Base64url-encoded string without padding + """ + val_bytes = val.to_bytes(length, byteorder="big") + return base64.urlsafe_b64encode(val_bytes).decode("ascii").rstrip("=") + + +def generate_ec_key_pair() -> Tuple[str, str, Dict[str, Any]]: + """Generate an ECDSA key pair for mDoc signing. + + Generates a P-256 (secp256r1) elliptic curve key pair compliant with + ISO 18013-5 § 9.1.3.5 requirements for mDoc cryptographic operations. + The generated key supports ES256 algorithm as specified in RFC 7518 § 3.4. + + Returns: + Tuple containing: + - private_key_pem: PEM-encoded private key string + - public_key_pem: PEM-encoded public key string + - jwk: JSON Web Key dictionary with EC parameters + + Raises: + ValueError: If key generation parameters are invalid + RuntimeError: If cryptographic operation fails + + Example: + >>> private_pem, public_pem, jwk = generate_ec_key_pair() + >>> print(jwk['kty']) # 'EC' + >>> print(jwk['crv']) # 'P-256' + """ + # Generate private key + private_key = ec.generate_private_key(ec.SECP256R1()) + + # Serialize private key to PEM + private_pem = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ).decode("utf-8") + + # Serialize public key to PEM + public_key = private_key.public_key() + public_pem = public_key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ).decode("utf-8") + + # Create JWK representation + private_numbers = private_key.private_numbers() + public_numbers = private_numbers.public_numbers + + jwk = { + "kty": "EC", + "crv": "P-256", + "x": int_to_base64url_uint(public_numbers.x), + "y": int_to_base64url_uint(public_numbers.y), + "d": int_to_base64url_uint(private_numbers.private_value), + } + + return private_pem, public_pem, jwk + + +def pem_to_jwk(private_key_pem: str) -> Dict[str, Any]: + """Derive JWK from a PEM-encoded EC private key. + + M-1 fix: detect the actual curve instead of hard-coding "P-256". + + Args: + private_key_pem: PEM-encoded private key string + + Returns: + JSON Web Key dictionary with EC parameters + """ + private_key = serialization.load_pem_private_key( + private_key_pem.encode("utf-8"), password=None + ) + + if not isinstance(private_key, ec.EllipticCurvePrivateKey): + raise ValueError("PEM must be an EC private key") + + # Map cryptography curve instances to JWK crv names and their byte sizes. + _CURVE_MAP = { + ec.SECP256R1: ("P-256", 32), + ec.SECP384R1: ("P-384", 48), + ec.SECP521R1: ("P-521", 66), + } + curve_type = type(private_key.curve) + crv_info = _CURVE_MAP.get(curve_type) + if crv_info is None: + raise ValueError(f"Unsupported EC curve: {curve_type.__name__}") + crv_name, byte_len = crv_info + + private_numbers = private_key.private_numbers() + public_numbers = private_numbers.public_numbers + + return { + "kty": "EC", + "crv": crv_name, + "x": int_to_base64url_uint(public_numbers.x, byte_len), + "y": int_to_base64url_uint(public_numbers.y, byte_len), + "d": int_to_base64url_uint(private_numbers.private_value, byte_len), + } + + +def pem_from_jwk(jwk: Dict[str, Any]) -> str: + """Reconstruct a PEM-encoded EC private key from a JWK containing a 'd' parameter. + + C-1 fix: allows callers to avoid persisting raw PEM blobs — the JWK ``d`` + parameter is the single source of truth for the private scalar. + + Args: + jwk: JSON Web Key dictionary containing at least kty, crv, x, y, d. + + Returns: + PEM-encoded PKCS#8 private key string. + + Raises: + ValueError: If the JWK is missing required fields or uses an unsupported curve. + """ + kty = jwk.get("kty") + if kty != "EC": + raise ValueError(f"pem_from_jwk: expected EC key, got {kty!r}") + + crv = jwk.get("crv", "P-256") + _CURVE_MAP_INV = { + "P-256": (ec.SECP256R1(), 32), + "P-384": (ec.SECP384R1(), 48), + "P-521": (ec.SECP521R1(), 66), + } + crv_info = _CURVE_MAP_INV.get(crv) + if crv_info is None: + raise ValueError(f"pem_from_jwk: unsupported curve {crv!r}") + curve, _byte_len = crv_info + + def _b64url_to_int(s: str) -> int: + padded = s + "=" * (-len(s) % 4) + return int.from_bytes(base64.urlsafe_b64decode(padded), "big") + + missing = [f for f in ("x", "y", "d") if f not in jwk] + if missing: + raise ValueError(f"pem_from_jwk: JWK is missing required field(s): {missing}") + + public_numbers = ec.EllipticCurvePublicNumbers( + x=_b64url_to_int(jwk["x"]), + y=_b64url_to_int(jwk["y"]), + curve=curve, + ) + private_numbers = ec.EllipticCurvePrivateNumbers( + private_value=_b64url_to_int(jwk["d"]), + public_numbers=public_numbers, + ) + private_key = private_numbers.private_key() + + return private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ).decode("utf-8") + + +def generate_self_signed_certificate( + private_key_pem: str, + subject_name: str = "CN=mDoc Test Issuer,C=US", + issuer_name: Optional[str] = None, + validity_days: int = 365, +) -> str: + """Generate a self-signed X.509 IACA certificate for mDoc issuer. + + Creates a self-signed certificate compliant with ISO 18013-5 Annex B + requirements for IACA (Issuing Authority Certificate Authority) + authentication. The certificate includes all required extensions for + proper trust chain validation. + + Required Extensions per ISO 18013-5 Annex B.1.1: + - BasicConstraints: CA=True + - KeyUsage: keyCertSign, cRLSign + - SubjectKeyIdentifier: SHA-1 hash of public key + - CRLDistributionPoints: HTTP URI for CRL + - IssuerAlternativeName: URI + + Extension helpers use the standard `from_public_key` / `from_issuer_public_key` + class methods so that the DER encoding is strictly correct. Manual construction + of SubjectKeyIdentifier/AuthorityKeyIdentifier bytes was previously used here but + can produce DER that the Rust x509_cert crate (used by isomdl_uniffi) fails to + parse when ``Certificate::from_pem`` is called. + + Args: + private_key_pem: Private key in PEM format for signing + subject_name: Subject Distinguished Name (default: CN=mDoc Test Issuer,C=US) + issuer_name: Issuer DN (uses subject_name if None) + validity_days: Certificate validity period in days (default: 365) + + Returns: + PEM-encoded X.509 certificate string + + Raises: + ValueError: If private key format is invalid or parameters are invalid + RuntimeError: If certificate generation fails + + Example: + >>> private_pem, _, _ = generate_ec_key_pair() + >>> cert = generate_self_signed_certificate(private_pem) + >>> print("-----BEGIN CERTIFICATE-----" in cert) # True + """ + # Load private key + private_key = serialization.load_pem_private_key( + private_key_pem.encode("utf-8"), password=None + ) + + if issuer_name is None: + issuer_name = subject_name + + # Parse subject and issuer names + def parse_dn(dn_string: str) -> x509.Name: + r"""Parse a DN string into an x509.Name. + + Prefers ``x509.Name.from_rfc4514_string()`` (cryptography >= 38.0), + which correctly handles RFC 4514 escaping (commas inside values, + multi-valued RDNs such as ``O=Doe\, Inc``). + + Falls back to the minimal comma-split implementation for older + cryptography versions, which is sufficient for the straightforward + DNs generated by this module (CN, O, C, ST, L without escaped + characters). + """ + try: + # from_rfc4514_string reverses the attribute order from + # most-specific-first (RFC 4514 string) to most-general-first + # (X.509 DER / ASN.1), matching what x509.Name() produces. + return x509.Name.from_rfc4514_string(dn_string) + except AttributeError: + # cryptography < 38.0: fall through to minimal parser. + pass + name_parts = [] + for part in dn_string.split(","): + part = part.strip() + if "=" in part: + attr, value = part.split("=", 1) + attr = attr.strip().upper() + value = value.strip() + + if attr == "CN": + name_parts.append(x509.NameAttribute(NameOID.COMMON_NAME, value)) + elif attr == "O": + name_parts.append( + x509.NameAttribute(NameOID.ORGANIZATION_NAME, value) + ) + elif attr == "C": + name_parts.append(x509.NameAttribute(NameOID.COUNTRY_NAME, value)) + elif attr == "ST": + name_parts.append( + x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, value) + ) + elif attr == "L": + name_parts.append(x509.NameAttribute(NameOID.LOCALITY_NAME, value)) + return x509.Name(name_parts) + + subject = parse_dn(subject_name) + issuer = parse_dn(issuer_name) + + public_key = private_key.public_key() + + # Generate certificate + now = datetime.now(UTC) + cert_builder = x509.CertificateBuilder() + cert_builder = cert_builder.subject_name(subject) + cert_builder = cert_builder.issuer_name(issuer) + cert_builder = cert_builder.public_key(public_key) + cert_builder = cert_builder.serial_number(int(uuid.uuid4())) + cert_builder = cert_builder.not_valid_before(now) + cert_builder = cert_builder.not_valid_after(now + timedelta(days=validity_days)) + + # Add ISO 18013-5 Annex B required extensions for IACA certificate + + # 1. BasicConstraints - CA=True (required) + cert_builder = cert_builder.add_extension( + x509.BasicConstraints(ca=True, path_length=0), + critical=True, + ) + + # 2. KeyUsage - keyCertSign and cRLSign (required for IACA) + cert_builder = cert_builder.add_extension( + x509.KeyUsage( + digital_signature=False, + key_cert_sign=True, + crl_sign=True, + key_encipherment=False, + data_encipherment=False, + key_agreement=False, + content_commitment=False, + encipher_only=False, + decipher_only=False, + ), + critical=True, + ) + + # 3. SubjectKeyIdentifier - use the standard helper to produce correct DER. + # Manual bytes-based construction was previously used but can generate DER + # that x509_cert (Rust) rejects in Certificate::from_pem. + cert_builder = cert_builder.add_extension( + x509.SubjectKeyIdentifier.from_public_key(public_key), + critical=False, + ) + + # 3b. AuthorityKeyIdentifier - use from_issuer_public_key for correct DER encoding. + cert_builder = cert_builder.add_extension( + x509.AuthorityKeyIdentifier.from_issuer_public_key(public_key), + critical=False, + ) + + # 4. CRLDistributionPoints - HTTP URI (required per Annex B) + # M-7: configurable via OID4VC_MDOC_CRL_URI; default is a placeholder. + crl_uri = os.getenv("OID4VC_MDOC_CRL_URI", "http://example.com/crl") + cert_builder = cert_builder.add_extension( + x509.CRLDistributionPoints( + [ + x509.DistributionPoint( + full_name=[x509.UniformResourceIdentifier(crl_uri)], + relative_name=None, + reasons=None, + crl_issuer=None, + ) + ] + ), + critical=False, + ) + + # 5. IssuerAlternativeName - URI type (required per Annex B). + # URI is used here instead of RFC822Name because the x509_cert Rust crate + # used by isomdl_uniffi has been observed to reject certs with RFC822Name + # in IssuerAlternativeName when parsing via Certificate::from_pem. + # M-7: configurable via OID4VC_MDOC_ISSUER_URI; default is a placeholder. + issuer_uri = os.getenv("OID4VC_MDOC_ISSUER_URI", "https://example.com") + cert_builder = cert_builder.add_extension( + x509.IssuerAlternativeName( + [ + x509.UniformResourceIdentifier(issuer_uri), + ] + ), + critical=False, + ) + + # Sign the certificate + certificate = cert_builder.sign(private_key, hashes.SHA256()) + + # Return PEM encoded certificate + return certificate.public_bytes(serialization.Encoding.PEM).decode("utf-8") + + +async def generate_default_keys_and_certs( + storage_manager: Any, session: Any +) -> Dict[str, Any]: + """Generate default keys and certificates for mDoc operations. + + Creates a complete set of cryptographic materials for mDoc issuance + including ECDSA signing keys and X.509 certificates. All materials + are generated according to ISO 18013-5 specifications and stored + in the configured storage backend. + + Args: + storage_manager: MdocStorageManager instance for persistent storage + session: Database session for storage operations + + Returns: + Dictionary containing generated identifiers: + - key_id: Identifier for the signing key + - cert_id: Identifier for the X.509 certificate + - jwk: JSON Web Key for the generated key pair + + Raises: + StorageError: If key/certificate storage fails + RuntimeError: If key generation fails + + Example: + >>> storage = MdocStorageManager(profile) + >>> result = await generate_default_keys_and_certs(storage, session) + >>> print(result['key_id']) # 'mdoc-key-abc12345' + """ + LOGGER.info("Generating default mDoc keys and certificates") + + # Generate key pair + private_pem, public_pem, jwk = generate_ec_key_pair() + key_id = f"mdoc-key-{uuid.uuid4().hex[:8]}" + + # Store the key + # C-1: do NOT store private_key_pem; the JWK 'd' parameter is the + # single source of truth for the private scalar. + await storage_manager.store_key( + session, + key_id=key_id, + jwk=jwk, + purpose="signing", + metadata={ + "public_key_pem": public_pem, + "key_type": "EC", + "curve": "P-256", + }, + ) + + # Generate certificate with ISO 18013-5 compliant subject name + # Must include stateOrProvinceName (ST) for IACA validation + # Configurable via OID4VC_MDOC_CERT_SUBJECT environment variable + default_subject = "CN=mDoc Test Issuer,O=ACA-Py,ST=NY,C=US" + cert_subject = os.getenv("OID4VC_MDOC_CERT_SUBJECT", default_subject) + cert_pem = generate_self_signed_certificate( + private_key_pem=private_pem, + subject_name=cert_subject, + validity_days=365, + ) + + cert_id = f"mdoc-cert-{uuid.uuid4().hex[:8]}" + + # Store the certificate + await storage_manager.store_certificate( + session, + cert_id=cert_id, + certificate_pem=cert_pem, + key_id=key_id, + metadata={ + "self_signed": True, + "purpose": "mdoc_issuing", + "issuer_dn": cert_subject, + "subject_dn": cert_subject, + "valid_from": datetime.now(UTC).isoformat(), + "valid_to": (datetime.now(UTC) + timedelta(days=365)).isoformat(), + }, + ) + + # Set as defaults + await storage_manager.store_config(session, "default_signing_key", {"key_id": key_id}) + await storage_manager.store_config( + session, "default_certificate", {"cert_id": cert_id} + ) + + LOGGER.info("Generated default mDoc key: %s and certificate: %s", key_id, cert_id) + + return { + "key_id": key_id, + "cert_id": cert_id, + "jwk": jwk, + "private_key_pem": private_pem, + "public_key_pem": public_pem, + "certificate_pem": cert_pem, + } diff --git a/oid4vc/mso_mdoc/key_routes.py b/oid4vc/mso_mdoc/key_routes.py new file mode 100644 index 000000000..8e09fb179 --- /dev/null +++ b/oid4vc/mso_mdoc/key_routes.py @@ -0,0 +1,294 @@ +"""Admin routes for mso_mdoc signing key and certificate management.""" + +from acapy_agent.admin.request_context import AdminRequestContext +from acapy_agent.messaging.models.openapi import OpenAPISchema +from aiohttp import web +from aiohttp_apispec import docs, response_schema +from marshmallow import fields + +from .key_generation import generate_default_keys_and_certs +from .storage import MdocStorageManager + + +# ============================================================================= +# Schemas +# ============================================================================= + + +class MdocKeyListSchema(OpenAPISchema): + """Response schema for listing mDoc keys.""" + + keys = fields.List( + fields.Dict(), + required=True, + metadata={"description": "List of stored mDoc keys"}, + ) + + +class MdocCertListSchema(OpenAPISchema): + """Response schema for listing mDoc certificates.""" + + certificates = fields.List( + fields.Dict(), + required=True, + metadata={"description": "List of stored mDoc certificates"}, + ) + + +class MdocKeyGenSchema(OpenAPISchema): + """Response schema for key generation.""" + + key_id = fields.Str(required=True, metadata={"description": "Generated key ID"}) + cert_id = fields.Str( + required=True, metadata={"description": "Generated certificate ID"} + ) + message = fields.Str(required=True, metadata={"description": "Success message"}) + + +class DefaultCertificateResponseSchema(OpenAPISchema): + """Response schema for default certificate.""" + + cert_id = fields.Str(required=True, metadata={"description": "Certificate ID"}) + key_id = fields.Str(required=True, metadata={"description": "Associated key ID"}) + certificate_pem = fields.Str( + required=True, metadata={"description": "PEM-encoded certificate"} + ) + created_at = fields.Str(required=True, metadata={"description": "Creation timestamp"}) + metadata = fields.Dict( + required=False, metadata={"description": "Certificate metadata"} + ) + + +# ============================================================================= +# Handlers +# ============================================================================= + + +@docs( + tags=["mso_mdoc"], + summary="List all mDoc signing keys", +) +@response_schema(MdocKeyListSchema(), 200) +async def list_keys(request: web.BaseRequest): + """List all stored mDoc keys.""" + context: AdminRequestContext = request["context"] + storage_manager = MdocStorageManager(context.profile) + + try: + async with context.profile.session() as session: + keys = await storage_manager.list_keys(session) + # Remove sensitive key material from response. The metadata dict may + # contain private_key_pem / public_key_pem from store_signing_key(), and + # the raw JWK (including the 'd' parameter) under both 'jwk' and metadata + # keys. Explicitly allowlist safe fields rather than trying to blocklist. + _SAFE_METADATA_KEYS = { + "verification_method", + "key_id", + "key_type", + "curve", + "purpose", + "is_default", + "generated_on_demand", + "static", + } + safe_keys = [] + for key in keys: + safe_key = { + "key_id": key.get("key_id", "unknown"), + "key_type": key.get("key_type", "ES256"), # Default to ES256 if not set + "created_at": key.get("created_at"), + "metadata": { + k: v + for k, v in key.get("metadata", {}).items() + if k in _SAFE_METADATA_KEYS + }, + } + safe_keys.append(safe_key) + + return web.json_response({"keys": safe_keys}) + except Exception as e: + raise web.HTTPInternalServerError(reason=f"Failed to list keys: {e}") from e + + +@docs( + tags=["mso_mdoc"], + summary="List all mDoc certificates", +) +@response_schema(MdocCertListSchema(), 200) +async def list_certificates(request: web.BaseRequest): + """List all stored mDoc certificates. + + Query parameters: + include_pem: If "true", include the certificate_pem field in results + """ + context: AdminRequestContext = request["context"] + storage_manager = MdocStorageManager(context.profile) + + # Check for include_pem query parameter + include_pem = request.query.get("include_pem", "").lower() == "true" + + try: + async with context.profile.session() as session: + certificates = await storage_manager.list_certificates( + session, include_pem=include_pem + ) + return web.json_response({"certificates": certificates}) + except Exception as e: + raise web.HTTPInternalServerError( + reason=f"Failed to list certificates: {e}" + ) from e + + +@docs( + tags=["mso_mdoc"], + summary="Get the default signing certificate", + description="Returns the certificate that will be used for credential signing", +) +@response_schema(DefaultCertificateResponseSchema(), 200) +async def get_default_certificate(request: web.BaseRequest): + """Get the default signing certificate. + + This returns the certificate that will be used when issuing mDoc credentials. + The default certificate is associated with the default signing key. + """ + context: AdminRequestContext = request["context"] + storage_manager = MdocStorageManager(context.profile) + + try: + async with context.profile.session() as session: + # Get the default signing key first + default_key = await storage_manager.get_default_signing_key(session) + + if not default_key: + raise web.HTTPNotFound(reason="No default signing key configured") + + key_id = default_key["key_id"] + + # Get the certificate associated with this key + certificate_pem = await storage_manager.get_certificate_for_key( + session, key_id + ) + + if not certificate_pem: + raise web.HTTPNotFound( + reason=f"No certificate found for default signing key: {key_id}" + ) + + # Get full certificate info + certificates = await storage_manager.list_certificates( + session, include_pem=True + ) + + # Find the certificate for this key + cert_info = None + for cert in certificates: + if cert.get("key_id") == key_id: + cert_info = cert + break + + if not cert_info: + # Fall back to basic response + return web.json_response( + { + "cert_id": f"cert-for-{key_id}", + "key_id": key_id, + "certificate_pem": certificate_pem, + "created_at": default_key.get("created_at", ""), + "metadata": {}, + } + ) + + return web.json_response( + { + "cert_id": cert_info.get("cert_id"), + "key_id": key_id, + "certificate_pem": certificate_pem, + "created_at": cert_info.get("created_at", ""), + "metadata": cert_info.get("metadata", {}), + } + ) + + except web.HTTPError: + raise + except Exception as e: + raise web.HTTPInternalServerError( + reason=f"Failed to get default certificate: {e}" + ) from e + + +@docs( + tags=["mso_mdoc"], + summary="Generate new mDoc signing key and certificate", + description="Generates a new mDoc signing key and self-signed certificate. " + "If force=false (default) and keys already exist, returns the existing key.", +) +@response_schema(MdocKeyGenSchema(), 200) +async def generate_keys(request: web.BaseRequest): + """Generate new mDoc signing key and certificate. + + Query parameters: + force: If "true", always generate new keys even if keys already exist. + Default is "false" - returns existing keys if present. + """ + context: AdminRequestContext = request["context"] + storage_manager = MdocStorageManager(context.profile) + + # Check for force query parameter + force = request.query.get("force", "").lower() == "true" + + try: + async with context.profile.session() as session: + # Check if keys already exist (unless force is set) + if not force: + existing_key = await storage_manager.get_default_signing_key(session) + if existing_key: + # Get the associated certificate + key_id = existing_key["key_id"] + certificates = await storage_manager.list_certificates(session) + cert_id = None + for cert in certificates: + if cert.get("key_id") == key_id: + cert_id = cert.get("cert_id") + break + + return web.json_response( + { + "key_id": key_id, + "cert_id": cert_id or f"cert-for-{key_id}", + "message": ( + "Existing mDoc signing key found " + "(use ?force=true to generate new)" + ), + } + ) + + # Generate new keys + generated = await generate_default_keys_and_certs(storage_manager, session) + return web.json_response( + { + "key_id": generated["key_id"], + "cert_id": generated["cert_id"], + "message": ( + "Successfully generated new mDoc signing key and certificate" + ), + } + ) + except Exception as e: + raise web.HTTPInternalServerError(reason=f"Failed to generate keys: {e}") from e + + +# ============================================================================= +# Route registration +# ============================================================================= + + +def register_key_routes(app: web.Application): + """Register signing key and certificate management routes.""" + app.router.add_get("/mso_mdoc/keys", list_keys) + app.router.add_get("/mso_mdoc/certificates", list_certificates) + app.router.add_get("/mso_mdoc/certificates/default", get_default_certificate) + app.router.add_post("/mso_mdoc/generate-keys", generate_keys) + + +# Backward-compat alias used by routes.py +register_key_management_routes = register_key_routes diff --git a/oid4vc/mso_mdoc/mdoc/__init__.py b/oid4vc/mso_mdoc/mdoc/__init__.py index a3767ae51..4dc0e3dac 100644 --- a/oid4vc/mso_mdoc/mdoc/__init__.py +++ b/oid4vc/mso_mdoc/mdoc/__init__.py @@ -1,18 +1,15 @@ """MDoc module.""" -from .issuer import mso_mdoc_sign, mdoc_sign -from .verifier import mso_mdoc_verify, mdoc_verify, MdocVerifyResult -from .exceptions import MissingPrivateKey, MissingIssuerAuth -from .exceptions import NoDocumentTypeProvided, NoSignedDocumentProvided +from .issuer import isomdl_mdoc_sign, parse_mdoc +from .utils import extract_signing_cert, flatten_trust_anchors, split_pem_chain +from .verifier import MdocVerifyResult, mdoc_verify __all__ = [ - "mso_mdoc_sign", - "mdoc_sign", - "mso_mdoc_verify", + "isomdl_mdoc_sign", + "parse_mdoc", "mdoc_verify", "MdocVerifyResult", - "MissingPrivateKey", - "MissingIssuerAuth", - "NoDocumentTypeProvided", - "NoSignedDocumentProvided", + "split_pem_chain", + "extract_signing_cert", + "flatten_trust_anchors", ] diff --git a/oid4vc/mso_mdoc/mdoc/exceptions.py b/oid4vc/mso_mdoc/mdoc/exceptions.py deleted file mode 100644 index a34006d00..000000000 --- a/oid4vc/mso_mdoc/mdoc/exceptions.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Exceptions module.""" - - -class MissingPrivateKey(Exception): - """Missing private key error.""" - - pass - - -class NoDocumentTypeProvided(Exception): - """No document type error.""" - - pass - - -class NoSignedDocumentProvided(Exception): - """No signed document provider error.""" - - pass - - -class MissingIssuerAuth(Exception): - """Missing issuer authentication error.""" - - pass diff --git a/oid4vc/mso_mdoc/mdoc/issuer.py b/oid4vc/mso_mdoc/mdoc/issuer.py index f63c0836b..509ce60af 100644 --- a/oid4vc/mso_mdoc/mdoc/issuer.py +++ b/oid4vc/mso_mdoc/mdoc/issuer.py @@ -1,142 +1,177 @@ -"""Operations supporting mso_mdoc issuance.""" +"""Operations supporting mso_mdoc issuance using isomdl-uniffi. + +This module implements ISO/IEC 18013-5:2021 compliant mobile document issuance +using the isomdl-uniffi Rust library via UniFFI bindings. It provides +cryptographic operations for creating signed mobile documents (mDocs) including +mobile driver's licenses (mDLs). + +Protocol Compliance: +- OpenID4VCI 1.0 § E.1.1: mso_mdoc Credential Format + https://openid.net/specs/openid-4-verifiable-credential-issuance-1_0.html#appendix-E.1.1 +- ISO/IEC 18013-5:2021 § 8: Mobile document format and structure +- ISO/IEC 18013-5:2021 § 9: Cryptographic mechanisms +- RFC 8152: CBOR Object Signing and Encryption (COSE) +- RFC 8949: Concise Binary Object Representation (CBOR) +- RFC 7517: JSON Web Key (JWK) format for key material + +The mso_mdoc format is defined in OpenID4VCI 1.0 Appendix E.1.1 as a specific +credential format that follows the ISO 18013-5 mobile document structure. +""" import json import logging -import os -from binascii import hexlify from typing import Any, Mapping, Optional -import cbor2 -from acapy_agent.core.profile import Profile -from acapy_agent.wallet.base import BaseWallet -from acapy_agent.wallet.default_verification_key_strategy import ( - BaseVerificationKeyStrategy, -) -from acapy_agent.wallet.util import b64_to_bytes, bytes_to_b64 -from pycose.keys import CoseKey -from pydid import DIDUrl +# ISO 18013-5 § 8.4: Presentation session +# ISO 18013-5 § 9.1.3.5: ECDSA P-256 key pairs +# ISO 18013-5 § 8.4.1: Session establishment +# ISO 18013-5 § 8.4.2: Response handling +# Test mDL generation for ISO 18013-5 compliance +# Import ISO 18013-5 compliant mDoc operations from isomdl-uniffi +# These provide cryptographically secure implementations of: +# - mDoc creation and signing (ISO 18013-5 § 8.3) +# - Presentation protocols (ISO 18013-5 § 8.4) +# - P-256 elliptic curve cryptography (ISO 18013-5 § 9.1.3.5) +from isomdl_uniffi import Mdoc # ISO 18013-5 § 8.3: Mobile document structure -from ..mso import MsoIssuer -from ..x509 import selfsigned_x509cert +from .utils import extract_signing_cert LOGGER = logging.getLogger(__name__) -def dict_to_b64(value: Mapping[str, Any]) -> str: - """Encode a dictionary as a b64 string.""" - return bytes_to_b64(json.dumps(value).encode(), urlsafe=True, pad=False) +def _prepare_mdl_namespaces( + payload: Mapping[str, Any], +) -> tuple[str, Optional[str]]: + """Prepare mDL namespace items for create_and_sign_mdl. + + Args: + payload: The credential payload + Returns: + Tuple of (mdl_items_json, aamva_items_json) where aamva_items_json + may be None. Both are JSON-serialized dicts; isomdl-uniffi handles + CBOR encoding internally. + """ + mdl_payload = payload.get("org.iso.18013.5.1", payload) + mdl_items = {k: v for k, v in mdl_payload.items() if k != "org.iso.18013.5.1.aamva"} -def b64_to_dict(value: str) -> Mapping[str, Any]: - """Decode a dictionary from a b64 encoded value.""" - return json.loads(b64_to_bytes(value, urlsafe=True)) + aamva_payload = payload.get("org.iso.18013.5.1.aamva") + aamva_items_json = json.dumps(aamva_payload) if aamva_payload else None + return json.dumps(mdl_items), aamva_items_json -def nym_to_did(value: str) -> str: - """Return a did from nym if passed value is nym, else return value.""" - return value if value.startswith("did:") else f"did:sov:{value}" +def _prepare_generic_namespaces(doctype: str, payload: Mapping[str, Any]) -> dict: + """Prepare namespaces for generic doctypes. -def did_lookup_name(value: str) -> str: - """Return the value used to lookup a DID in the wallet. + Args: + doctype: The document type + payload: The credential payload - If value is did:sov, return the unqualified value. Else, return value. + Returns: + Dictionary of namespaces with JSON-encoded element values + for use with Mdoc.create_and_sign. """ - return value.split(":", 3)[2] if value.startswith("did:sov:") else value + encoded_payload = {k: json.dumps(v) for k, v in payload.items()} + return {doctype: encoded_payload} -async def mso_mdoc_sign( - profile: Profile, +def isomdl_mdoc_sign( + jwk: dict, headers: Mapping[str, Any], payload: Mapping[str, Any], - did: Optional[str] = None, - verification_method: Optional[str] = None, + iaca_cert_pem: str, + iaca_key_pem: str, ) -> str: - """Create a signed mso_mdoc given headers, payload, and signing DID or DID URL.""" - if verification_method is None: - if did is None: - raise ValueError("did or verificationMethod required.") - - did = nym_to_did(did) - - verkey_strat = profile.inject(BaseVerificationKeyStrategy) - verification_method = await verkey_strat.get_verification_method_id_for_did( - did, profile - ) - if not verification_method: - raise ValueError("Could not determine verification method from DID") - else: - # We look up keys by did for now - did = DIDUrl.parse(verification_method).did - if not did: - raise ValueError("DID URL must be absolute") - - async with profile.session() as session: - wallet = session.inject(BaseWallet) - LOGGER.info(f"mso_mdoc sign: {did}") - - did_info = await wallet.get_local_did(did_lookup_name(did)) - key_pair = await wallet._session.handle.fetch_key(did_info.verkey) - jwk_bytes = key_pair.key.get_jwk_secret() - jwk = json.loads(jwk_bytes) - - return mdoc_sign(jwk, headers, payload) - - -def mdoc_sign(jwk: dict, headers: Mapping[str, Any], payload: Mapping[str, Any]) -> str: - """Create a signed mso_mdoc given headers, payload, and private key.""" - pk_dict = { - "KTY": jwk.get("kty") or "", # OKP, EC - "CURVE": jwk.get("crv") or "", # ED25519, P_256 - "ALG": "EdDSA" if jwk.get("kty") == "OKP" else "ES256", - "D": b64_to_bytes(jwk.get("d") or "", True), # EdDSA - "X": b64_to_bytes(jwk.get("x") or "", True), # EdDSA, EcDSA - "Y": b64_to_bytes(jwk.get("y") or "", True), # EcDSA - "KID": os.urandom(32), - } - cose_key = CoseKey.from_dict(pk_dict) - - if isinstance(headers, dict): - doctype = headers.get("doctype") or "" - device_key = headers.get("deviceKey") or "" - else: + """Create a signed mso_mdoc using isomdl-uniffi. + + Creates and signs a mobile security object (MSO) compliant with + ISO 18013-5 § 9.1.3. The signing uses ECDSA with P-256 curve (ES256) + as mandated by ISO 18013-5 § 9.1.3.5 for mDoc cryptographic protection. + + Protocol Compliance: + - ISO 18013-5 § 9.1.3: Mobile security object (MSO) structure + - ISO 18013-5 § 9.1.3.5: ECDSA P-256 signature algorithm + - RFC 8152: COSE signing for MSO authentication + - RFC 7517: JWK format for key material input + + Args: + jwk: The signing key in JWK format + headers: Header parameters including doctype + payload: The credential data to sign + iaca_cert_pem: Issuer certificate in PEM format + iaca_key_pem: Issuer private key in PEM format + + Returns: + CBOR-encoded mDoc as string + """ + if not isinstance(headers, dict): raise ValueError("missing headers.") - if isinstance(payload, dict): - doctype = headers.get("doctype") - data = [{"doctype": doctype, "data": payload}] - else: + if not isinstance(payload, dict): raise ValueError("missing payload.") - documents = [] - for doc in data: - _cert = selfsigned_x509cert(private_key=cose_key) - msoi = MsoIssuer(data=doc["data"], private_key=cose_key, x509_cert=_cert) - mso = msoi.sign(device_key=device_key, doctype=doctype) - issuer_auth = mso.encode() - issuer_auth = cbor2.loads(issuer_auth).value - issuer_auth[2] = cbor2.dumps(cbor2.CBORTag(24, issuer_auth[2])) - document = { - "docType": doctype, - "issuerSigned": { - "nameSpaces": { - ns: [cbor2.CBORTag(24, cbor2.dumps(v)) for k, v in dgst.items()] - for ns, dgst in msoi.disclosure_map.items() - }, - "issuerAuth": issuer_auth, - }, - # this is required during the presentation. - # 'deviceSigned': { - # # TODO - # } - } - documents.append(document) - - signed = { - "version": "1.0", - "documents": documents, - "status": 0, - } - signed_hex = hexlify(cbor2.dumps(signed)) - - return f"{signed_hex}" + try: + doctype = headers.get("doctype") + holder_jwk = json.dumps(jwk) + + LOGGER.debug("holder_jwk: %s", holder_jwk) + LOGGER.debug("iaca_cert_pem length: %d", len(iaca_cert_pem)) + LOGGER.debug("iaca_key_pem length: %d", len(iaca_key_pem)) + + # If iaca_cert_pem contains a chain (multiple PEM blocks), Rust's + # x509_cert crate only reads the first certificate and silently drops + # everything after it. Extract just the signing cert (first block) + # so Rust always receives a single, unambiguous certificate. + signing_cert_pem = extract_signing_cert(iaca_cert_pem) + if signing_cert_pem != iaca_cert_pem: + LOGGER.info( + "iaca_cert_pem contained a PEM chain; extracted first certificate " + "(%d bytes) as the signing cert", + len(signing_cert_pem), + ) + + # Prepare namespaces based on doctype + if doctype == "org.iso.18013.5.1.mDL": + # Use the dedicated mDL constructor — accepts JSON strings and + # handles CBOR encoding internally (isomdl-uniffi >= create_and_sign_mdl) + mdl_items, aamva_items = _prepare_mdl_namespaces(payload) + LOGGER.info("Creating mDL mdoc via create_and_sign_mdl") + mdoc = Mdoc.create_and_sign_mdl( + mdl_items, + aamva_items, + holder_jwk, + signing_cert_pem, + iaca_key_pem, + ) + else: + namespaces = _prepare_generic_namespaces(doctype, payload) + LOGGER.info("Creating mdoc with namespaces: %s", list(namespaces.keys())) + mdoc = Mdoc.create_and_sign( + doctype, + namespaces, + holder_jwk, + signing_cert_pem, + iaca_key_pem, + ) + + LOGGER.info("Generated mdoc with doctype: %s", mdoc.doctype()) + + # Serialize as ISO 18013-5 §8.3 compliant IssuerSigned CBOR (camelCase keys, + # nameSpaces as arrays). issuer_signed_b64() uses the upstream IssuerSigned + # struct directly, which carries the correct serde renames, eliminating the + # need for any post-serialization key patching. + return mdoc.issuer_signed_b64() + + except Exception as ex: + LOGGER.error("Failed to create mdoc with isomdl: %r", ex) + raise ValueError(f"Failed to create mdoc: {ex!r}") from ex + + +def parse_mdoc(cbor_data: str) -> Mdoc: + """Parse a CBOR-encoded mDoc string into an Mdoc object.""" + try: + return Mdoc.from_string(cbor_data) + except Exception as ex: + LOGGER.error("Failed to parse mdoc: %s", ex) + raise ValueError(f"Failed to parse mdoc: {ex}") from ex diff --git a/oid4vc/mso_mdoc/mdoc/utils.py b/oid4vc/mso_mdoc/mdoc/utils.py new file mode 100644 index 000000000..32ddeefd5 --- /dev/null +++ b/oid4vc/mso_mdoc/mdoc/utils.py @@ -0,0 +1,99 @@ +"""Utility functions for mso_mdoc credential operations.""" + +import re +from typing import List + + +# Matches a single complete PEM certificate block (including its trailing newline, if any) +_PEM_CERT_RE = re.compile( + r"-----BEGIN CERTIFICATE-----[A-Za-z0-9+/=\s]+?-----END CERTIFICATE-----\n?", + re.DOTALL, +) + + +def split_pem_chain(pem_chain: str) -> List[str]: + r"""Split a concatenated PEM chain into individual certificate PEM strings. + + The isomdl-uniffi Rust library (and the underlying x509_cert crate) reads + only the **first** ``-----BEGIN CERTIFICATE-----`` block from a PEM string. + When a caller stores or passes a multi-cert chain as one string, every cert + after the first is silently dropped, causing either: + + * **Issuer side** – the wrong certificate is embedded in the MSO (the + signing key no longer corresponds to the embedded cert → verification + fails). + * **Verifier side** – trust-anchor chains are truncated to one cert, so + any mdoc whose embedded cert is not the single root in the chain cannot + be verified. + + This function normalises any PEM input into a flat list of single-cert + PEM strings so that each element can be safely handed to Rust. + + Args: + pem_chain: Zero or more PEM certificate blocks, possibly concatenated + with arbitrary whitespace between them. + + Returns: + List of individual PEM certificate strings, one cert per element. + Returns an empty list for blank / whitespace-only input. + + Examples:: + + # Single cert → one-element list (no-op) + split_pem_chain(single_cert_pem) # ["-----BEGIN CERTIFICATE-----\n..."] + + # Root + leaf chain → two-element list + split_pem_chain(root_pem + leaf_pem) # [root_pem, leaf_pem] + """ + if not pem_chain or not pem_chain.strip(): + return [] + + matches = _PEM_CERT_RE.findall(pem_chain) + return matches + + +def extract_signing_cert(pem_chain: str) -> str: + """Return the first certificate from a PEM chain. + + For the issuer, the signing certificate (the one whose private key is + used to sign the MSO) is expected to be the **first** cert in the chain. + This helper extracts exactly that cert so that only one PEM block is + forwarded to ``Mdoc.create_and_sign()``. + + Args: + pem_chain: One or more concatenated PEM certificate blocks. + + Returns: + PEM string containing only the first certificate in the chain. + + Raises: + ValueError: If no certificate block is found in *pem_chain*. + """ + certs = split_pem_chain(pem_chain) + if not certs: + raise ValueError( + "No certificate found in provided PEM string. " + "Expected at least one '-----BEGIN CERTIFICATE-----' block." + ) + return certs[0] + + +def flatten_trust_anchors(trust_anchors: List[str]) -> List[str]: + """Flatten a list of PEM trust-anchor strings into individual cert PEMs. + + Each element of *trust_anchors* may itself contain a concatenated PEM + chain. This function expands every element so that the returned list + contains one entry per individual certificate, which is what the Rust + ``verify_issuer_signature`` / ``verify_oid4vp_response`` APIs expect. + + Args: + trust_anchors: List of PEM strings, each potentially containing + multiple concatenated certificate blocks. + + Returns: + Flat list of single-certificate PEM strings. + """ + flat: List[str] = [] + for pem in trust_anchors: + flat.extend(split_pem_chain(pem)) + return flat diff --git a/oid4vc/mso_mdoc/mdoc/verifier.py b/oid4vc/mso_mdoc/mdoc/verifier.py index 826b0b14f..a957b6e3a 100644 --- a/oid4vc/mso_mdoc/mdoc/verifier.py +++ b/oid4vc/mso_mdoc/mdoc/verifier.py @@ -1,103 +1,839 @@ -"""Operations supporting mso_mdoc creation and verification.""" +"""Mdoc Verifier implementation using isomdl-uniffi.""" +import base64 +import json import logging -import re -from binascii import unhexlify -from typing import Any, Mapping +from abc import abstractmethod +from dataclasses import dataclass +from typing import Any, List, Optional, Protocol -import cbor2 +# Import isomdl_uniffi library directly +import isomdl_uniffi from acapy_agent.core.profile import Profile -from acapy_agent.messaging.models.base import BaseModel, BaseModelSchema -from acapy_agent.wallet.base import BaseWallet -from acapy_agent.wallet.error import WalletNotFoundError -from acapy_agent.wallet.util import bytes_to_b58 -from cbor_diag import cbor2diag -from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey -from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PublicKey -from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat -from marshmallow import fields - -from ..mso import MsoVerifier + +from oid4vc.config import Config +from oid4vc.cred_processor import ( + CredVerifier, + PresVerifier, + PresVerifierError, + VerifyResult, +) +from oid4vc.did_utils import retrieve_or_create_did_jwk +from oid4vc.models.presentation import OID4VPPresentation + +from ..storage import MdocStorageManager +from .utils import flatten_trust_anchors LOGGER = logging.getLogger(__name__) -class MdocVerifyResult(BaseModel): - """Result from verify.""" +def extract_mdoc_item_value(item: Any) -> Any: + """Extract the actual value from an MDocItem enum variant. + + MDocItem is a Rust enum exposed via UniFFI with variants: + - TEXT(str) + - BOOL(bool) + - INTEGER(int) + - ARRAY(List[MDocItem]) + - ITEM_MAP(Dict[str, MDocItem]) + + Each variant stores its value in _values[0]. + """ + if item is None: + return None + + # Check if it's an MDocItem variant by checking for _values attribute + if hasattr(item, "_values") and item._values: + inner_value = item._values[0] + + # Handle nested structures recursively + if isinstance(inner_value, dict): + return {k: extract_mdoc_item_value(v) for k, v in inner_value.items()} + elif isinstance(inner_value, list): + return [extract_mdoc_item_value(v) for v in inner_value] + else: + return inner_value + + # Already a plain value + return item + + +def extract_verified_claims(verified_response: dict) -> dict: + """Extract claims from MdlReaderVerifiedData.verified_response. + + The verified_response is structured as: + dict[str, dict[str, MDocItem]] + e.g. {"org.iso.18013.5.1": {"given_name": MDocItem.TEXT("Alice"), ...}} + + This function converts it to: + {"org.iso.18013.5.1": {"given_name": "Alice", ...}} + """ + claims = {} + for namespace, elements in verified_response.items(): + ns_claims = {} + for element_name, mdoc_item in elements.items(): + ns_claims[element_name] = extract_mdoc_item_value(mdoc_item) + claims[namespace] = ns_claims + return claims + + +class TrustStore(Protocol): + """Protocol for retrieving trust anchors.""" + + @abstractmethod + def get_trust_anchors(self) -> List[str]: + """Retrieve trust anchors as PEM strings.""" + ... + + +class WalletTrustStore: + """Trust store implementation backed by Askar wallet storage. + + This implementation stores trust anchor certificates in the ACA-Py + wallet using the MdocStorageManager, providing secure storage that + doesn't require filesystem access or static certificate files. + """ + + def __init__(self, profile: Profile): + """Initialize the wallet trust store. + + Args: + profile: ACA-Py profile for accessing wallet storage + """ + self.profile = profile + self._cached_anchors: Optional[List[str]] = None + + def get_trust_anchors(self) -> List[str]: + """Retrieve trust anchors from wallet storage. + + This method is synchronous to satisfy the TrustStore protocol + expected by the isomdl-uniffi Rust layer. The cache **must** + be populated by ``await refresh_cache()`` before calling this + method (all ACA-Py verification paths do this). + + Returns: + List of PEM-encoded trust anchor certificates + + Raises: + RuntimeError: If called before ``refresh_cache()`` has been + awaited. Always call ``await refresh_cache()`` before + any verification operation. + """ + if self._cached_anchors is not None: + return self._cached_anchors + + raise RuntimeError( + "WalletTrustStore.get_trust_anchors() called before cache was " + "populated. Always await refresh_cache() before verification." + ) + + async def refresh_cache(self) -> List[str]: + """Refresh the cached trust anchors from wallet storage. + + This method should be called before verification operations + when running in an async context. + + Returns: + List of PEM-encoded trust anchor certificates + """ + self._cached_anchors = await self._fetch_trust_anchors() + return self._cached_anchors + + async def _fetch_trust_anchors(self) -> List[str]: + """Fetch trust anchors from wallet storage. + + Returns: + List of PEM-encoded trust anchor certificates + """ + storage_manager = MdocStorageManager(self.profile) + async with self.profile.session() as session: + anchors = await storage_manager.get_all_trust_anchor_pems(session) + LOGGER.debug("Loaded %d trust anchors from wallet", len(anchors)) + return anchors + + def clear_cache(self) -> None: + """Clear the cached trust anchors.""" + self._cached_anchors = None + + +@dataclass +class PreverifiedMdocClaims: + """Typed sentinel wrapping namespaced claims already verified by verify_presentation. + + C-5 fix: replaces a heuristic ``dict`` key-prefix check that could be + bypassed by any caller-controlled dict containing an ``org.iso.*`` key. + Only ``MsoMdocPresVerifier.verify_presentation`` (trusted code) should + construct instances of this class; external callers cannot spoof it. + """ + + claims: dict + + +def _is_preverified_claims_dict(credential: Any) -> bool: + """Return True only when *credential* is a typed :class:`PreverifiedMdocClaims`. + + C-5 fix: the previous heuristic — checking for ``org.iso.*`` key prefixes — + was bypassable by any external caller whose dict happened to contain such a + key. Using a typed sentinel makes the check unforgeable. + """ + return isinstance(credential, PreverifiedMdocClaims) + + +def _parse_string_credential(credential: str) -> tuple[Optional[Any], Optional[str]]: + """Parse a string credential into an Mdoc object. + + Tries multiple formats: hex, base64url IssuerSigned, base64url DeviceResponse. + + Args: + credential: String credential to parse + + Returns: + Tuple of (Parsed Mdoc object or None if parsing fails, error message if any) + """ + last_error = None + + # Try hex first (full DeviceResponse) + try: + if all(c in "0123456789abcdefABCDEF" for c in credential): + LOGGER.debug("Trying to parse credential as hex DeviceResponse") + return isomdl_uniffi.Mdoc.from_string(credential), None + except Exception as hex_err: + last_error = str(hex_err) + LOGGER.debug("Hex parsing failed: %s", hex_err) + + # Try base64url-encoded IssuerSigned + try: + LOGGER.debug("Trying to parse credential as base64url IssuerSigned") + mdoc = isomdl_uniffi.Mdoc.new_from_base64url_encoded_issuer_signed( + credential, "verified-inner" + ) + return mdoc, None + except Exception as issuer_signed_err: + last_error = str(issuer_signed_err) + LOGGER.debug("IssuerSigned parsing failed: %s", issuer_signed_err) + + # Try base64url decoding to hex, then DeviceResponse parsing + try: + LOGGER.debug("Trying to parse credential as base64url DeviceResponse") + padded = ( + credential + "=" * (4 - len(credential) % 4) + if len(credential) % 4 + else credential + ) + standard_b64 = padded.replace("-", "+").replace("_", "/") + decoded_bytes = base64.b64decode(standard_b64) + return isomdl_uniffi.Mdoc.from_string(decoded_bytes.hex()), None + except Exception as b64_err: + last_error = str(b64_err) + LOGGER.debug("Base64 parsing failed: %s", b64_err) + + # Last resort: try direct string parsing + try: + return isomdl_uniffi.Mdoc.from_string(credential), None + except Exception as final_err: + last_error = str(final_err) + return None, last_error + + +def _extract_mdoc_claims(mdoc: Any) -> dict: + """Extract claims from an Mdoc object. + + Args: + mdoc: The Mdoc object + + Returns: + Dictionary of namespaced claims + """ + claims = {} + try: + details = mdoc.details() + LOGGER.debug("mdoc details keys: %s", list(details.keys())) + for namespace, elements in details.items(): + ns_claims = {} + for element in elements: + if element.value: + try: + ns_claims[element.identifier] = json.loads(element.value) + except json.JSONDecodeError: + ns_claims[element.identifier] = element.value + else: + ns_claims[element.identifier] = None + claims[namespace] = ns_claims + except Exception as e: + LOGGER.warning("Failed to extract claims from mdoc: %s", e) + return claims + + +class MsoMdocCredVerifier(CredVerifier): + """Verifier for mso_mdoc credentials.""" + + def __init__(self, trust_store: Optional[TrustStore] = None): + """Initialize the credential verifier.""" + self.trust_store = trust_store + + async def verify_credential( + self, + profile: Profile, + credential: Any, + ) -> VerifyResult: + """Verify an mso_mdoc credential. + + For mso_mdoc format, credentials can arrive in two forms: + 1. Raw credential (bytes/hex string) - parsed and verified via Rust library + 2. Pre-verified claims dict - already verified by verify_presentation, + contains namespaced claims extracted from DeviceResponse + + Args: + profile: The profile for context + credential: The credential to verify (bytes, hex string, or claims dict) + + Returns: + VerifyResult: The verification result + """ + try: + # Check if credential is pre-verified claims sentinel + if _is_preverified_claims_dict(credential): + LOGGER.debug("Credential is pre-verified claims dict from presentation") + return VerifyResult(verified=True, payload=credential.claims) + + # Parse credential to Mdoc object + mdoc = None + parse_error = None + if isinstance(credential, str): + mdoc, parse_error = _parse_string_credential(credential) + elif isinstance(credential, bytes): + try: + mdoc = isomdl_uniffi.Mdoc.from_string(credential.hex()) + except Exception as e: + parse_error = str(e) + + if not mdoc: + if parse_error: + error_msg = f"Invalid credential format: {parse_error}" + else: + error_msg = "Invalid credential format" + return VerifyResult(verified=False, payload={"error": error_msg}) + + # Refresh trust store cache if needed + if self.trust_store and isinstance(self.trust_store, WalletTrustStore): + await self.trust_store.refresh_cache() + + trust_anchors = ( + self.trust_store.get_trust_anchors() if self.trust_store else [] + ) + + # Flatten any concatenated PEM chains into individual cert PEMs. + # isomdl_uniffi (x509_cert) reads only the first certificate in a + # PEM string; passing a chain as one element silently drops all + # certs after the first, breaking trust-anchor validation. + if trust_anchors: + trust_anchors = flatten_trust_anchors(trust_anchors) + + # Fail-closed guard: refuse to verify without at least one trust + # anchor. An empty list causes the Rust library to accept any + # self-signed issuer certificate, effectively disabling chain + # validation and allowing an attacker to present forgeries. + if not trust_anchors: + return VerifyResult( + verified=False, + payload={ + "error": "No trust anchors configured; credential " + "verification requires at least one trust anchor." + }, + ) + + # Verify issuer signature + try: + verification_result = mdoc.verify_issuer_signature(trust_anchors, True) + + if verification_result.verified: + claims = _extract_mdoc_claims(mdoc) + payload = { + "status": "verified", + "doctype": mdoc.doctype(), + "id": str(mdoc.id()), + "issuer_common_name": verification_result.common_name, + } + payload.update(claims) + LOGGER.debug("Mdoc Payload: %s", json.dumps(payload)) + return VerifyResult(verified=True, payload=payload) + else: + return VerifyResult( + verified=False, + payload={ + "error": verification_result.error + or "Signature verification failed", + "doctype": mdoc.doctype(), + "id": str(mdoc.id()), + }, + ) + except isomdl_uniffi.MdocVerificationError as e: + LOGGER.error("Issuer signature verification failed: %s", e) + return VerifyResult( + verified=False, + payload={ + "error": str(e), + "doctype": mdoc.doctype(), + "id": str(mdoc.id()), + }, + ) - class Meta: - """MdocVerifyResult metadata.""" + except Exception as e: + LOGGER.error("Failed to parse mdoc credential: %s", e) + return VerifyResult(verified=False, payload={"error": str(e)}) - schema_class = "MdocVerifyResultSchema" + +def _normalize_presentation_input(presentation: Any) -> tuple[list, bool]: + """Normalize presentation input to a list. + + Args: + presentation: The presentation data + + Returns: + Tuple of (list of presentations, is_list_input flag) + """ + if isinstance(presentation, str): + try: + parsed = json.loads(presentation) + if isinstance(parsed, list): + return parsed, True + except json.JSONDecodeError: + pass + return [presentation], False + elif isinstance(presentation, list): + return presentation, True + return [presentation], False + + +def _decode_presentation_bytes(pres_item: Any) -> bytes: + """Decode presentation item to bytes. + + Args: + pres_item: The presentation item (string or bytes) + + Returns: + Decoded bytes + + Raises: + PresVerifierError: If unable to decode to bytes + """ + if isinstance(pres_item, bytes): + return pres_item + + if isinstance(pres_item, str): + # Try base64url decode + try: + return base64.urlsafe_b64decode(pres_item + "=" * (-len(pres_item) % 4)) + except (ValueError, TypeError): + pass + # Try hex decode + try: + return bytes.fromhex(pres_item) + except (ValueError, TypeError): + pass + + raise PresVerifierError("Presentation must be bytes or base64/hex string") + + +async def _get_oid4vp_verification_params( + profile: Profile, + presentation_record: "OID4VPPresentation", +) -> tuple[str, str, str]: + """Get OID4VP verification parameters. + + Args: + profile: The profile + presentation_record: The presentation record + + Returns: + Tuple of (nonce, client_id, response_uri) + """ + nonce = presentation_record.nonce + config = Config.from_settings(profile.settings) + + async with profile.session() as session: + jwk = await retrieve_or_create_did_jwk(session) + + client_id = jwk.did + + wallet_id = ( + profile.settings.get("wallet.id") + if profile.settings.get("multitenant.enabled") + else None + ) + subpath = f"/tenant/{wallet_id}" if wallet_id else "" + response_uri = ( + f"{config.endpoint}{subpath}/oid4vp/response/" + f"{presentation_record.presentation_id}" + ) + + return nonce, client_id, response_uri + + +def _verify_single_presentation( + response_bytes: bytes, + nonce: str, + client_id: str, + response_uri: str, + trust_anchor_registry: List[str], +) -> Any: + """Verify a single OID4VP presentation. + + Args: + response_bytes: The presentation bytes + nonce: The nonce + client_id: The client ID + response_uri: The response URI + trust_anchor_registry: JSON-serialized PemTrustAnchor strings, each of the form + '{"certificate_pem": "...", "purpose": "Iaca"}' + + Returns: + Verified payload dict if successful, None if failed + """ + LOGGER.debug( + "Calling verify_oid4vp_response with: " + "nonce=%s client_id=%s response_uri=%s " + "response_bytes_len=%d", + nonce, + client_id, + response_uri, + len(response_bytes), + ) + + # Try spec-compliant format (2024) first + verified_data = isomdl_uniffi.verify_oid4vp_response( + response_bytes, + nonce, + client_id, + response_uri, + trust_anchor_registry, + True, + ) + + # If device auth failed but issuer is valid, try legacy format + if ( + verified_data.device_authentication != isomdl_uniffi.AuthenticationStatus.VALID + and verified_data.issuer_authentication + == isomdl_uniffi.AuthenticationStatus.VALID + ): + if hasattr(isomdl_uniffi, "verify_oid4vp_response_legacy"): + LOGGER.info( + "Device auth failed with spec-compliant format, trying legacy 2023 format" + ) + verified_data = isomdl_uniffi.verify_oid4vp_response_legacy( + response_bytes, + nonce, + client_id, + response_uri, + trust_anchor_registry, + True, + ) + else: + LOGGER.warning( + "Device auth failed and legacy format not available in isomdl_uniffi" + ) + + return verified_data + + +class MsoMdocPresVerifier(PresVerifier): + """Verifier for mso_mdoc presentations (OID4VP).""" + + def __init__(self, trust_store: Optional[TrustStore] = None): + """Initialize the presentation verifier.""" + self.trust_store = trust_store + + def _parse_jsonpath(self, path: str) -> List[str]: + """Parse JSONPath to extract segments.""" + # Handle $['namespace']['element'] format + if "['" in path: + return [ + p.strip("]['\"") + for p in path.split("['") + if p.strip("]['\"") and p != "$" + ] + + # Handle $.namespace.element format + clean = path.replace("$", "") + if clean.startswith("."): + clean = clean[1:] + return clean.split(".") + + async def verify_presentation( + self, + profile: Profile, + presentation: Any, + presentation_record: OID4VPPresentation, + ) -> VerifyResult: + """Verify an mso_mdoc presentation. + + Args: + profile: The profile for context + presentation: The presentation data (bytes) + presentation_record: The presentation record containing request info + + Returns: + VerifyResult: The verification result + """ + try: + # 1. Prepare Trust Anchors + if self.trust_store and isinstance(self.trust_store, WalletTrustStore): + await self.trust_store.refresh_cache() + + trust_anchors = ( + self.trust_store.get_trust_anchors() if self.trust_store else [] + ) + LOGGER.debug( + "Trust anchors loaded: %d cert(s)", + len(trust_anchors) if trust_anchors else 0, + ) + for i, pem in enumerate(trust_anchors or []): + pem_stripped = pem.strip() if pem else "" + LOGGER.debug( + "Trust anchor %d: len=%d", + i, + len(pem_stripped), + ) + # Validate that the PEM is parseable by Python before + # passing to Rust + try: + from cryptography import x509 as _x509 # noqa: PLC0415 + + _x509.load_pem_x509_certificate(pem_stripped.encode()) + except Exception as pem_err: + LOGGER.error( + "Trust anchor %d: PEM validation FAILED: %s", + i, + pem_err, + ) + + # Flatten concatenated PEM chains into individual certs BEFORE + # building the registry. Rust (x509_cert) only reads the first + # PEM block from a string; any additional certs in a chain string + # are silently dropped, breaking trust-anchor validation. + if trust_anchors: + trust_anchors = flatten_trust_anchors(trust_anchors) + LOGGER.debug( + "Trust anchors after chain-splitting: %d individual cert(s)", + len(trust_anchors), + ) + + # Fail-closed guard: refuse to verify without at least one trust + # anchor. An empty list causes Rust to accept any self-signed + # issuer certificate, bypassing chain validation entirely. + if not trust_anchors: + return VerifyResult( + verified=False, + payload={ + "error": "No trust anchors configured; presentation " + "verification requires at least one trust anchor." + }, + ) + + # verify_oid4vp_response expects JSON-serialized PemTrustAnchor per anchor: + # {"certificate_pem": "...", "purpose": "Iaca"} + # Rust parses each string via serde_json::from_str::(). + trust_anchor_registry = ( + [ + json.dumps({"certificate_pem": pem, "purpose": "Iaca"}) + for pem in trust_anchors + ] + if trust_anchors + else [] + ) + if trust_anchor_registry: + LOGGER.debug( + "trust_anchor_registry[0] first100: %r", + trust_anchor_registry[0][:100], + ) + + # 2. Get verification parameters + nonce, client_id, response_uri = await _get_oid4vp_verification_params( + profile, presentation_record + ) + + # 3. Normalize presentation input + presentations_to_verify, is_list_input = _normalize_presentation_input( + presentation + ) + + verified_payloads = [] + + for pres_item in presentations_to_verify: + LOGGER.debug( + "vp_token type=%s len=%s", + type(pres_item).__name__, + len(pres_item) if hasattr(pres_item, "__len__") else "N/A", + ) + + response_bytes = _decode_presentation_bytes(pres_item) + + verified_data = _verify_single_presentation( + response_bytes, + nonce, + client_id, + response_uri, + trust_anchor_registry, + ) + + # Per ISO 18013-5, deviceSigned is optional (marked with '?' in + # the CDDL). For OID4VP web-wallet flows a device key binding + # round-trip is not performed, so device_authentication will not + # be VALID. Issuer authentication is sufficient to trust that + # the credential was issued by a known authority. + issuer_ok = ( + verified_data.issuer_authentication + == isomdl_uniffi.AuthenticationStatus.VALID + ) + device_ok = ( + verified_data.device_authentication + == isomdl_uniffi.AuthenticationStatus.VALID + ) + + if issuer_ok: + if not device_ok: + LOGGER.info( + "Device authentication not present/valid (issuer-only " + "OID4VP presentation — deviceSigned is optional per " + "ISO 18013-5): Device=%s", + verified_data.device_authentication, + ) + try: + claims = extract_verified_claims(verified_data.verified_response) + except Exception as e: + LOGGER.warning("Failed to extract claims: %s", e) + claims = {} + + payload = { + "status": "verified", + "docType": verified_data.doc_type, + "issuer_auth": str(verified_data.issuer_authentication), + "device_auth": str(verified_data.device_authentication), + } + payload.update(claims) + verified_payloads.append(PreverifiedMdocClaims(claims=payload)) + else: + LOGGER.error( + "Verification failed: Issuer=%s, Device=%s, Errors=%s", + verified_data.issuer_authentication, + verified_data.device_authentication, + verified_data.errors, + ) + try: + claims = extract_verified_claims(verified_data.verified_response) + except Exception: + claims = {} + + return VerifyResult( + verified=False, + payload={ + "error": verified_data.errors, + "issuer_auth": str(verified_data.issuer_authentication), + "device_auth": str(verified_data.device_authentication), + "claims": claims, + }, + ) + + # Return list if input was list, otherwise single item + payload = verified_payloads + if not is_list_input and len(verified_payloads) == 1: + payload = verified_payloads[0] + + return VerifyResult(verified=True, payload=payload) + + except Exception as e: + LOGGER.exception("Error verifying mdoc presentation") + return VerifyResult(verified=False, payload={"error": str(e)}) + + +class MdocVerifyResult: + """Result of mdoc verification.""" def __init__( self, - headers: Mapping[str, Any], - payload: Mapping[str, Any], - valid: bool, - kid: str, + verified: bool, + payload: Optional[dict] = None, + error: Optional[str] = None, ): - """Initialize a MdocVerifyResult instance.""" - self.headers = headers + """Initialize the verification result.""" + self.verified = verified self.payload = payload - self.valid = valid - self.kid = kid + self.error = error + def serialize(self): + """Serialize the result to a dictionary.""" + return { + "verified": self.verified, + "payload": self.payload, + "error": self.error, + } -class MdocVerifyResultSchema(BaseModelSchema): - """MdocVerifyResult schema.""" - class Meta: - """MdocVerifyResultSchema metadata.""" +def mdoc_verify( + mso_mdoc: str, trust_anchors: Optional[List[str]] = None +) -> MdocVerifyResult: + """Verify an mso_mdoc credential. - model_class = MdocVerifyResult + Accepts mDOC strings in any format understood by ``_parse_string_credential``: + hex-encoded DeviceResponse, base64url IssuerSigned, or raw base64. - headers = fields.Dict( - required=False, metadata={"description": "Headers from verified mso_mdoc."} - ) - payload = fields.Dict( - required=True, metadata={"description": "Payload from verified mso_mdoc"} - ) - valid = fields.Bool(required=True) - kid = fields.Str(required=False, metadata={"description": "kid of signer"}) - error = fields.Str(required=False, metadata={"description": "Error text"}) + Args: + mso_mdoc: The mDOC string (hex, base64url, or base64). + trust_anchors: Optional list of PEM-encoded trust anchor certificates. + Each element may contain a single cert or a concatenated PEM chain; + chains are automatically split before being passed to Rust. + Returns: + MdocVerifyResult: The verification result. + """ + try: + # Parse the mdoc — try all supported formats + mdoc, parse_error = _parse_string_credential(mso_mdoc) + if not mdoc: + return MdocVerifyResult( + verified=False, + error=f"Failed to parse mDOC: {parse_error or 'unknown format'}", + ) -async def mso_mdoc_verify(profile: Profile, mdoc_str: str) -> MdocVerifyResult: - """Verify a mso_mdoc CBOR string.""" - result = mdoc_verify(mdoc_str) - verkey = result.kid + # Flatten concatenated PEM chains so Rust receives one cert per list + # entry (isomdl_uniffi only reads the first PEM block in a string). + if trust_anchors: + trust_anchors = flatten_trust_anchors(trust_anchors) - async with profile.session() as session: - wallet = session.inject(BaseWallet) + # Fail-closed guard: refuse to verify without at least one trust anchor. + if not trust_anchors: + return MdocVerifyResult( + verified=False, + error="No trust anchors configured; mDOC verification requires " + "at least one trust anchor.", + ) + + # Verify issuer signature try: - did_info = await wallet.get_local_did_for_verkey(verkey) - except WalletNotFoundError: - did_info = None - verification_method = did_info.did if did_info else "" - result.kid = verification_method - - return result - - -def mdoc_verify(mdoc_str: str) -> MdocVerifyResult: - """Verify a mso_mdoc CBOR string.""" - mdoc_bytes = unhexlify(mdoc_str) - mso_mdoc = cbor2.loads(mdoc_bytes) - mso_verifier = MsoVerifier(mso_mdoc["documents"][0]["issuerSigned"]["issuerAuth"]) - valid = mso_verifier.verify_signature() - - headers = {} - mdoc_str = str(cbor2diag(mdoc_bytes)).replace("\n", "").replace("h'", "'") - mdoc_str = re.sub(r'\s+(?=(?:[^"]*"[^"]*")*[^"]*$)', "", mdoc_str) - payload = {"mso_mdoc": mdoc_str} - - if isinstance(mso_verifier.public_key, Ed25519PublicKey): - public_bytes = mso_verifier.public_key.public_bytes_raw() - elif isinstance(mso_verifier.public_key, EllipticCurvePublicKey): - public_bytes = mso_verifier.public_key.public_bytes( - Encoding.DER, PublicFormat.SubjectPublicKeyInfo - ) - verkey = bytes_to_b58(public_bytes) + # Enable intermediate certificate chaining by default + verification_result = mdoc.verify_issuer_signature(trust_anchors, True) + + if verification_result.verified: + return MdocVerifyResult( + verified=True, + payload={ + "status": "verified", + "doctype": mdoc.doctype(), + "issuer_common_name": verification_result.common_name, + }, + ) + else: + return MdocVerifyResult( + verified=False, + payload={"doctype": mdoc.doctype()}, + error=verification_result.error or "Signature verification failed", + ) + except isomdl_uniffi.MdocVerificationError as e: + return MdocVerifyResult( + verified=False, + payload={"doctype": mdoc.doctype()}, + error=str(e), + ) - return MdocVerifyResult(headers, payload, valid, verkey) + except Exception as e: + return MdocVerifyResult(verified=False, error=str(e)) diff --git a/oid4vc/mso_mdoc/mso/__init__.py b/oid4vc/mso_mdoc/mso/__init__.py deleted file mode 100644 index 213d0895f..000000000 --- a/oid4vc/mso_mdoc/mso/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""MSO module.""" - -from .issuer import MsoIssuer -from .verifier import MsoVerifier - -__all__ = ["MsoIssuer", "MsoVerifier"] diff --git a/oid4vc/mso_mdoc/mso/issuer.py b/oid4vc/mso_mdoc/mso/issuer.py deleted file mode 100644 index ab6707ce8..000000000 --- a/oid4vc/mso_mdoc/mso/issuer.py +++ /dev/null @@ -1,120 +0,0 @@ -"""MsoIssuer helper class to issue a mso.""" - -from typing import Union -import logging -from datetime import datetime, timedelta, timezone -import random -import hashlib -import os -import cbor2 -from pycose.headers import Algorithm, KID -from pycose.keys import CoseKey -from pycose.messages import Sign1Message - -LOGGER = logging.getLogger(__name__) -DIGEST_SALT_LENGTH = 32 -CBORTAGS_ATTR_MAP = {"birth_date": 1004, "expiry_date": 1004, "issue_date": 1004} - - -def shuffle_dict(d: dict): - """Shuffle a dictionary.""" - keys = list(d.keys()) - for i in range(random.randint(3, 27)): # nosec: B311 - random.shuffle(keys) - return {key: d[key] for key in keys} - - -class MsoIssuer: - """MsoIssuer helper class to issue a mso.""" - - def __init__( - self, - data: dict, - private_key: CoseKey, - x509_cert: str, - digest_alg: str = "sha256", - ): - """Constructor.""" - - self.data: dict = data - self.hash_map: dict = {} - self.disclosure_map: dict = {} - self.digest_alg: str = digest_alg - self.private_key: CoseKey = private_key - self.x509_cert = x509_cert - - hashfunc = getattr(hashlib, self.digest_alg) - - digest_cnt = 0 - for ns, values in data.items(): - if not isinstance(values, dict): - continue - self.disclosure_map[ns] = {} - self.hash_map[ns] = {} - - for k, v in shuffle_dict(values).items(): - _rnd_salt = os.urandom(32) - _value_cbortag = CBORTAGS_ATTR_MAP.get(k, None) - - if _value_cbortag: - v = cbor2.CBORTag(_value_cbortag, v) - - self.disclosure_map[ns][digest_cnt] = { - "digestID": digest_cnt, - "random": _rnd_salt, - "elementIdentifier": k, - "elementValue": v, - } - self.hash_map[ns][digest_cnt] = hashfunc( - cbor2.dumps(cbor2.CBORTag(24, self.disclosure_map[ns][digest_cnt])) - ).digest() - - digest_cnt += 1 - - def format_datetime_repr(self, dt: datetime) -> str: - """Format a datetime object to a string representation.""" - return dt.isoformat().split(".")[0] + "Z" - - def sign( - self, - device_key: Union[dict, None] = None, - valid_from: Union[None, datetime] = None, - doctype: str = None, - ) -> Sign1Message: - """Sign a mso and returns it in Sign1Message type.""" - utcnow = datetime.now(timezone.utc) - exp = utcnow + timedelta(hours=(24 * 365)) - - payload = { - "version": "1.0", - "digestAlgorithm": self.digest_alg, - "valueDigests": self.hash_map, - "deviceKeyInfo": {"deviceKey": device_key}, - "docType": doctype or list(self.hash_map)[0], - "validityInfo": { - "signed": cbor2.dumps( - cbor2.CBORTag(0, self.format_datetime_repr(utcnow)) - ), - "validFrom": cbor2.dumps( - cbor2.CBORTag(0, self.format_datetime_repr(valid_from or utcnow)) - ), - "validUntil": cbor2.dumps( - cbor2.CBORTag(0, self.format_datetime_repr(exp)) - ), - }, - } - mso = Sign1Message( - phdr={ - Algorithm: self.private_key.alg, - KID: self.private_key.kid, - 33: self.x509_cert, - }, - # TODO: x509 (cbor2.CBORTag(33)) and federation trust_chain support - # (cbor2.CBORTag(27?)) here - # 33 means x509chain standing to rfc9360 - # in both protected and unprotected for interop purpose .. for now. - uhdr={33: self.x509_cert}, - payload=cbor2.dumps(payload), - ) - mso.key = self.private_key - return mso diff --git a/oid4vc/mso_mdoc/mso/verifier.py b/oid4vc/mso_mdoc/mso/verifier.py deleted file mode 100644 index b001dc000..000000000 --- a/oid4vc/mso_mdoc/mso/verifier.py +++ /dev/null @@ -1,60 +0,0 @@ -"""MsoVerifier helper class to verify a mso.""" - -import logging -from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat -from pycose.keys import CoseKey -from pycose.messages import Sign1Message -import cryptography -import cbor2 - - -LOGGER = logging.getLogger(__name__) - - -class MsoVerifier: - """MsoVerifier helper class to verify a mso.""" - - def __init__(self, data: cbor2.CBORTag) -> None: - """Create a new MsoParser instance.""" - if isinstance(data, list): - data = cbor2.dumps(cbor2.CBORTag(18, value=data)) - - self.object: Sign1Message = Sign1Message.decode(data) - self.public_key = None - self.x509_certificates: list = [] - - @property - def raw_public_keys(self) -> bytes: - """Extract public key from x509 certificates.""" - _mixed_heads = list(self.object.phdr.items()) + list(self.object.uhdr.items()) - for h, v in _mixed_heads: - if h.identifier == 33: - return list(self.object.uhdr.values()) - - def attest_public_key(self) -> None: - """Asstest public key.""" - LOGGER.warning( - "TODO: in next releases. " - "The certificate is to be considered as untrusted, this release " - "doesn't validate x.509 certificate chain. See next releases and " - "python certvalidator or cryptography for that." - ) - - def load_public_key(self) -> None: - """Load the public key from the x509 certificate.""" - self.attest_public_key() - - for i in self.raw_public_keys: - self.x509_certificates.append(cryptography.x509.load_der_x509_certificate(i)) - - self.public_key = self.x509_certificates[0].public_key() - pem_public = self.public_key.public_bytes( - Encoding.PEM, PublicFormat.SubjectPublicKeyInfo - ).decode() - self.object.key = CoseKey.from_pem_public_key(pem_public) - - def verify_signature(self) -> bool: - """Verify the signature.""" - self.load_public_key() - - return self.object.verify_signature() diff --git a/oid4vc/mso_mdoc/payload.py b/oid4vc/mso_mdoc/payload.py new file mode 100644 index 000000000..24b7777e0 --- /dev/null +++ b/oid4vc/mso_mdoc/payload.py @@ -0,0 +1,128 @@ +"""Payload preparation and result normalisation for mso_mdoc credential issuance. + +Provides two module-level helpers consumed by ``MsoMdocCredProcessor.issue``: + +- ``prepare_mdoc_payload`` — flattens a namespaced credential-subject dict into + the flat structure expected by isomdl and base64-encodes binary fields such + as ``portrait``. +- ``normalize_mdoc_result`` — converts the raw return value of + ``isomdl_mdoc_sign`` (which may be bytes, a ``b'...'`` string, or a plain + string) into a consistent plain string for storage and transmission. +""" + +import base64 +import json +import logging +from typing import Any, Dict, Optional + +from oid4vc.cred_processor import CredProcessorError + +LOGGER = logging.getLogger(__name__) + + +def prepare_mdoc_payload( + payload: Dict[str, Any], doctype: Optional[str] = None +) -> Dict[str, Any]: + """Prepare a credential-subject payload for mDoc issuance. + + Performs two transformations: + + 1. **Doctype flattening** — if the payload contains a top-level key equal + to ``doctype`` whose value is a dict (namespace-wrapped claims), those + claims are merged into the top-level dict. A warning is emitted when + any existing top-level key would be overwritten. + + 2. **Portrait encoding** — if a ``portrait`` field is present as + ``bytes`` or a list of integers, it is base64-encoded to a string as + required by the isomdl-uniffi Rust library. + + Args: + payload: Raw credential-subject dictionary from the exchange record. + doctype: Document type string (e.g. ``"org.iso.18013.5.1.mDL"``). + When provided and present as a key in ``payload``, the nested + dict under that key is flattened into the top level. + + Returns: + Transformed payload dict ready to pass to ``isomdl_mdoc_sign``. + """ + prepared = payload.copy() + + if doctype and doctype in prepared: + doctype_claims = prepared.pop(doctype) + if isinstance(doctype_claims, dict): + conflicts = set(doctype_claims.keys()) & set(prepared.keys()) + if conflicts: + LOGGER.warning( + "Payload namespace flattening for doctype '%s': " + "top-level keys %s will be overwritten by doctype claims", + doctype, + sorted(conflicts), + ) + LOGGER.debug( + "Flattening doctype wrapper '%s' (%d claims) into top-level payload", + doctype, + len(doctype_claims), + ) + prepared.update(doctype_claims) + + if "portrait" in prepared: + portrait = prepared["portrait"] + if isinstance(portrait, bytes): + prepared["portrait"] = base64.b64encode(portrait).decode("utf-8") + elif isinstance(portrait, list): + try: + prepared["portrait"] = base64.b64encode(bytes(portrait)).decode("utf-8") + except Exception: + pass # leave as-is; isomdl will surface the error + + return prepared + + +def normalize_mdoc_result(result: Any) -> str: + """Normalise the raw return value of ``isomdl_mdoc_sign`` to a plain string. + + The isomdl-uniffi Rust library may return bytes, a ``b'...'``-style string + literal, or a plain string depending on the binding version. This function + normalises all three forms so callers always receive a consistent string. + + Args: + result: Raw value returned by ``isomdl_mdoc_sign``. + + Returns: + Normalised string representation of the signed mDoc credential. + + Raises: + CredProcessorError: If ``result`` is ``None`` or cannot be converted. + """ + if result is None: + raise CredProcessorError( + "mDoc signing returned None result. " + "Check key material and payload format." + ) + + if isinstance(result, bytes): + try: + return result.decode("utf-8") + except UnicodeDecodeError as e: + raise CredProcessorError( + f"Failed to decode mDoc bytes result: {e}. " + "Result may contain binary data requiring base64 encoding." + ) from e + + if isinstance(result, str): + if result.startswith("b'") and result.endswith("'"): + # Strip the b'...' wrapper. Do NOT use codecs.decode with + # "unicode_escape" — that interprets escape sequences in + # attacker-controlled input and can be exploited for code-path + # attacks. The hex/base64 output of isomdl-uniffi is plain ASCII. + return result[2:-1] + if result.startswith('b"') and result.endswith('"'): + return result[2:-1] + return result + + try: + return str(result) + except Exception as e: + raise CredProcessorError( + f"Failed to normalize mDoc result of type {type(result).__name__}: {e}" + ) from e diff --git a/oid4vc/mso_mdoc/routes.py b/oid4vc/mso_mdoc/routes.py index 6e5574cdb..26f41f24c 100644 --- a/oid4vc/mso_mdoc/routes.py +++ b/oid4vc/mso_mdoc/routes.py @@ -1,26 +1,41 @@ -"""mso_mdoc admin routes.""" +"""mso_mdoc admin routes. + +Provides REST API endpoints for ISO/IEC 18013-5:2021 compliant mobile document +(mDoc) operations including signing and verification. These endpoints implement +the mobile security object (MSO) format for secure credential issuance and +verification as specified in the ISO 18013-5 standard. + +Protocol Compliance: +- ISO/IEC 18013-5:2021: Mobile driving licence (mDL) application +- RFC 8152: CBOR Object Signing and Encryption (COSE) +- RFC 8949: Concise Binary Object Representation (CBOR) +""" import logging from acapy_agent.admin.request_context import AdminRequestContext -from acapy_agent.messaging.jsonld.error import ( - BadJWSHeaderError, - InvalidVerificationMethod, -) from acapy_agent.messaging.models.openapi import OpenAPISchema -from acapy_agent.messaging.valid import ( - GENERIC_DID_EXAMPLE, - GENERIC_DID_VALIDATE, - Uri, -) -from acapy_agent.resolver.base import ResolverError +from acapy_agent.messaging.valid import GENERIC_DID_EXAMPLE, GENERIC_DID_VALIDATE, Uri from aiohttp import web from aiohttp_apispec import docs, request_schema, response_schema from marshmallow import fields -from .mdoc import mso_mdoc_sign, mso_mdoc_verify - +from .cred_processor import MsoMdocCredProcessor +from .key_generation import pem_from_jwk +from .key_routes import register_key_routes +from .trust_anchor_routes import register_trust_anchor_routes +from .mdoc import isomdl_mdoc_sign +from .mdoc import mdoc_verify as mso_mdoc_verify +from .storage import MdocStorageManager + +# OpenID4VCI 1.0 § E.1.1: mso_mdoc Credential Format +# https://openid.net/specs/openid-4-verifiable-credential-issuance-1_0.html#appendix-E.1.1 +# ISO/IEC 18013-5:2021 official specification URI SPEC_URI = "https://www.iso.org/obp/ui/#iso:std:iso-iec:18013:-5:dis:ed-1:v1:en" +OID4VCI_SPEC_URI = ( + "https://openid.net/specs/openid-4-verifiable-credential-issuance-" + "1_0.html#appendix-E.1.1" +) LOGGER = logging.getLogger(__name__) @@ -36,7 +51,10 @@ class MdocCreateSchema(OpenAPISchema): did = fields.Str( required=False, validate=GENERIC_DID_VALIDATE, - metadata={"description": "DID of interest", "example": GENERIC_DID_EXAMPLE}, + metadata={ + "description": "DID of interest", + "example": GENERIC_DID_EXAMPLE, + }, ) verification_method = fields.Str( data_key="verificationMethod", @@ -67,72 +85,180 @@ class MdocVerifyResponseSchema(OpenAPISchema): error = fields.Str(required=False, metadata={"description": "Error text"}) kid = fields.Str(required=True, metadata={"description": "kid of signer"}) headers = fields.Dict( - required=True, metadata={"description": "Headers from verified mso_mdoc."} + required=True, + metadata={"description": "Headers from verified mso_mdoc."}, ) payload = fields.Dict( - required=True, metadata={"description": "Payload from verified mso_mdoc"} + required=True, + metadata={"description": "Payload from verified mso_mdoc"}, ) @docs( tags=["mso_mdoc"], - summary="Creates mso_mdoc CBOR encoded binaries according to ISO 18013-5", + summary=( + "Creates mso_mdoc CBOR encoded binaries according to ISO 18013-5 and" + " OpenID4VCI 1.0" + ), ) @request_schema(MdocCreateSchema) @response_schema(MdocPluginResponseSchema(), description="") async def mdoc_sign(request: web.BaseRequest): - """Request handler for sd-jws creation using did. + """Request handler for ISO 18013-5 mDoc credential signing. + + Creates and signs a mobile document (mDoc) credential following both + ISO 18013-5 mobile document format and OpenID4VCI 1.0 mso_mdoc credential format. + + This endpoint implements the complete mDoc issuance workflow including: + - Credential payload validation and formatting + - ECDSA key resolution and validation + - MSO (Mobile Security Object) creation + - COSE signing with ES256 algorithm + - CBOR encoding for compact binary representation + + Protocol Compliance: + - OpenID4VCI 1.0 § E.1.1: mso_mdoc Credential Format + https://openid.net/specs/openid-4-verifiable-credential-issuance-1_0.html#appendix-E.1.1 + - ISO 18013-5 § 8.3: Mobile document structure + - ISO 18013-5 § 9.1.2: IssuerSigned data structure + - RFC 8152: COSE signing for cryptographic protection + - RFC 8949: CBOR encoding for compact binary representation + + Request Body: + { + "headers": { Optional headers for the mDoc MSO }, + "payload": { The credential claims per ISO 18013-5 § 8.3 }, + "did": { Optional DID for issuer identification }, + "verificationMethod": { Optional verification method URI } + } - Args: - request: The web request object. + Returns: + JSON response with signed mDoc credential or error details - "headers": { ... }, - "payload": { ... }, - "did": "did:example:123", - "verificationMethod": "did:example:123#keys-1" - with did and verification being mutually exclusive. + Raises: + web.HTTPBadRequest: If request payload is invalid or malformed + web.HTTPUnprocessableEntity: If credential data validation fails + web.HTTPInternalServerError: If signing operation fails + Example: + POST /oid4vc/mdoc/sign + { + "payload": { + "doctype": "org.iso.18013.5.1.mDL", + "claims": { + "org.iso.18013.5.1": { + "family_name": "Doe", + "given_name": "John" + } + } + } + } """ context: AdminRequestContext = request["context"] body = await request.json() - did = body.get("did") verification_method = body.get("verificationMethod") headers = body.get("headers", {}) payload = body.get("payload", {}) try: - mso_mdoc = await mso_mdoc_sign( - context.profile, headers, payload, did, verification_method + # Delegate key resolution entirely to the credential processor, which + # handles env-var static keys, verification-method lookup, default-key + # fallback, and on-demand generation — avoiding duplicated logic. + processor = MsoMdocCredProcessor() + storage_manager = MdocStorageManager(context.profile) + + async with context.profile.session() as session: + key_data = await processor._resolve_signing_key( + context, session, verification_method + ) + signing_jwk = key_data.get("jwk") + key_id = key_data.get("key_id") + private_key_pem = key_data.get("metadata", {}).get("private_key_pem") + + if not private_key_pem: + # C-1: reconstruct PEM from the JWK 'd' parameter instead of + # relying on a redundant PEM blob stored in metadata. + signing_jwk = key_data.get("jwk", {}) + if signing_jwk.get("d"): + private_key_pem = pem_from_jwk(signing_jwk) + + if not private_key_pem: + raise ValueError("Private key PEM not found for signing key") + + # Fetch or generate certificate + certificate_pem = await storage_manager.get_certificate_for_key( + session, key_id + ) + + if not certificate_pem: + raise ValueError( + f"Certificate not found for key {key_id!r}. " + "Keys must be registered with a certificate before use." + ) + + mso_mdoc = isomdl_mdoc_sign( + signing_jwk, headers, payload, certificate_pem, private_key_pem ) except ValueError as err: - raise web.HTTPBadRequest(reason="Bad did or verification method") from err + raise web.HTTPBadRequest(reason=str(err)) from err + except Exception as err: + # M-6: catch all errors from signing (StorageError, CredProcessorError, + # isomdl_uniffi exceptions, etc.) so callers always get a structured + # HTTP error instead of a 500 with an unformatted traceback. + LOGGER.exception("mdoc_sign failed: %s", err) + raise web.HTTPInternalServerError(reason=f"mDoc signing failed: {err}") from err return web.json_response(mso_mdoc) @docs( tags=["mso_mdoc"], - summary="Verify mso_mdoc CBOR encoded binaries according to ISO 18013-5", + summary=( + "Verify mso_mdoc CBOR encoded binaries according to ISO 18013-5 and" + " OpenID4VCI 1.0" + ), ) @request_schema(MdocVerifySchema()) @response_schema(MdocVerifyResponseSchema(), 200, description="") async def mdoc_verify(request: web.BaseRequest): - """Request handler for mso_mdoc validation. + """Request handler for ISO 18013-5 mDoc verification. + + Performs cryptographic verification of a mobile document (mDoc) including + validation of the mobile security object (MSO) signature and structure + compliance with both ISO 18013-5 and OpenID4VCI 1.0 requirements. + + Protocol Compliance: + - OpenID4VCI 1.0 § E.1.1: mso_mdoc Credential Format verification + https://openid.net/specs/openid-4-verifiable-credential-issuance-1_0.html#appendix-E.1.1 + - ISO 18013-5 § 9.1.4: MSO signature verification procedures + - ISO 18013-5 § 8.3: Document structure validation + - RFC 8152: COSE signature verification + - RFC 8949: CBOR decoding and validation Args: request: The web request object. - "mso_mdoc": { ... } + "mso_mdoc": { + CBOR-encoded mDoc per ISO 18013-5 § 8.3 and OID4VCI 1.0 § E.1.1 + } """ context: AdminRequestContext = request["context"] body = await request.json() mso_mdoc = body["mso_mdoc"] try: - result = await mso_mdoc_verify(context.profile, mso_mdoc) - except (BadJWSHeaderError, InvalidVerificationMethod) as err: - raise web.HTTPBadRequest(reason=err.roll_up) from err - except ResolverError as err: - raise web.HTTPNotFound(reason=err.roll_up) from err + # Load configured trust anchors from the wallet so verification is + # authenticated against the known trust chain. Without this, the + # endpoint always accepts any self-signed issuer certificate, which + # defeats the purpose of having a trust store. + storage_manager = MdocStorageManager(context.profile) + async with context.profile.session() as session: + trust_anchor_pems = await storage_manager.get_all_trust_anchor_pems(session) + + result = mso_mdoc_verify(mso_mdoc, trust_anchors=trust_anchor_pems) + except ValueError as err: + raise web.HTTPBadRequest(reason=str(err)) from err + except Exception as err: + raise web.HTTPInternalServerError(reason=f"Verification failed: {err}") from err return web.json_response(result.serialize()) @@ -146,9 +272,18 @@ async def register(app: web.Application): ] ) + # Register key and certificate management routes + register_key_routes(app) + # Register trust anchor management routes + register_trust_anchor_routes(app) + def post_process_routes(app: web.Application): - """Amend swagger API.""" + """Amend swagger API. + + Adds mso_mdoc plugin documentation with references to both ISO 18013-5 + and OpenID4VCI 1.0 specifications for comprehensive protocol compliance. + """ # Add top-level tags description if "tags" not in app._state["swagger_dict"]: @@ -156,7 +291,16 @@ def post_process_routes(app: web.Application): app._state["swagger_dict"]["tags"].append( { "name": "mso_mdoc", - "description": "mso_mdoc plugin", - "externalDocs": {"description": "Specification", "url": SPEC_URI}, + "description": ( + "ISO 18013-5 mobile document (mDoc) operations with OpenID4VCI" + " 1.0 compliance" + ), + "externalDocs": [ + {"description": "ISO 18013-5 Specification", "url": SPEC_URI}, + { + "description": "OpenID4VCI 1.0 mso_mdoc Format", + "url": OID4VCI_SPEC_URI, + }, + ], } ) diff --git a/oid4vc/mso_mdoc/signing_key.py b/oid4vc/mso_mdoc/signing_key.py new file mode 100644 index 000000000..aa24976b0 --- /dev/null +++ b/oid4vc/mso_mdoc/signing_key.py @@ -0,0 +1,119 @@ +"""Signing key resolution and certificate validation for mso_mdoc issuance. + +Provides two public helpers: + +- ``check_certificate_not_expired`` — validates that a PEM certificate is + currently within its validity window (NotBefore ≤ now ≤ NotAfter). +- ``resolve_signing_key_for_credential`` — looks up the registered signing key + for a credential by verification method or falls back to the configured + default. Raises ``CredProcessorError`` when no key is found; never + auto-generates keys. +""" + +import logging +from datetime import UTC, datetime +from typing import Optional + +from cryptography import x509 as _x509 + +from acapy_agent.core.profile import Profile, ProfileSession + +from oid4vc.cred_processor import CredProcessorError + +from .storage import MdocStorageManager + +LOGGER = logging.getLogger(__name__) + + +def check_certificate_not_expired(cert_pem: str) -> None: + """Validate that a PEM-encoded X.509 certificate is currently valid. + + Raises ``CredProcessorError`` when the certificate is expired, not yet + valid, or cannot be parsed. Returns ``None`` silently on success. + + Args: + cert_pem: PEM-encoded X.509 certificate string. + + Raises: + CredProcessorError: If the certificate is expired, not yet valid, or + cannot be parsed from PEM. + """ + if not cert_pem or not cert_pem.strip(): + raise CredProcessorError("Empty certificate PEM string") + + try: + cert = _x509.load_pem_x509_certificate(cert_pem.strip().encode()) + except Exception as exc: + raise CredProcessorError( + f"Invalid certificate PEM — could not parse: {exc}" + ) from exc + + now = datetime.now(UTC) + if cert.not_valid_before_utc > now: + nb = cert.not_valid_before_utc.isoformat() + raise CredProcessorError(f"Certificate is not yet valid (NotBefore={nb})") + if cert.not_valid_after_utc < now: + na = cert.not_valid_after_utc.isoformat() + raise CredProcessorError(f"Certificate has expired (NotAfter={na})") + + +async def resolve_signing_key_for_credential( + profile: Profile, + session: ProfileSession, + verification_method: Optional[str] = None, +) -> dict: + """Resolve a signing key for credential issuance. + + Looks up a registered signing key from storage. When + ``verification_method`` is supplied the key registered for that method is + returned; otherwise the configured default key is returned. + + Raises ``CredProcessorError`` — never auto-generates keys. Operators must + register keys via the mso_mdoc key management API before issuing. + + Protocol Compliance: + - ISO 18013-5 § 7.2.4: Issuer authentication mechanisms + - ISO 18013-5 § 9.1.3.5: Cryptographic algorithms for mDoc + - RFC 7517: JSON Web Key (JWK) format + + Args: + profile: The active profile. + session: The active profile session. + verification_method: Optional verification method DID URL. + + Returns: + JWK dictionary for the resolved signing key. + + Raises: + CredProcessorError: If no matching key is registered. + """ + storage_manager = MdocStorageManager(profile) + + if verification_method: + if "#" in verification_method: + _, key_id = verification_method.split("#", 1) + else: + key_id = verification_method + + stored_key = await storage_manager.get_signing_key( + session, + identifier=key_id, + verification_method=verification_method, + ) + + if stored_key and stored_key.get("jwk"): + return stored_key["jwk"] + + raise CredProcessorError( + f"Signing key not found for verification method {verification_method!r}. " + "Register the key via the mso_mdoc key management API before issuing." + ) + + stored_key = await storage_manager.get_default_signing_key(session) + if stored_key and stored_key.get("jwk"): + return stored_key["jwk"] + + raise CredProcessorError( + "No default signing key is configured. " + "Register a signing key via the mso_mdoc key management API before issuing." + ) diff --git a/oid4vc/mso_mdoc/storage/README.md b/oid4vc/mso_mdoc/storage/README.md new file mode 100644 index 000000000..728b96bf7 --- /dev/null +++ b/oid4vc/mso_mdoc/storage/README.md @@ -0,0 +1,50 @@ +# mDoc Storage Module + +This package provides persistent storage capabilities for mDoc-related cryptographic materials, certificates, and configuration data. It implements secure storage patterns following ISO 18013-5 requirements for key management and credential issuance operations. + +## Module Structure + +| File | Description | +|------|-------------| +| `base.py` | Shared constants and `get_storage()` helper function | +| `keys.py` | ECDSA signing key storage (JWK format per RFC 7517) | +| `certificates.py` | X.509 certificate storage for issuer authentication | +| `trust_anchors.py` | Trust anchor (root CA) certificate storage for verification | +| `config.py` | Configuration storage (default keys, certificates, etc.) | +| `__init__.py` | Re-exports `MdocStorageManager` class for backward compatibility | + +## Usage + +```python +from mso_mdoc.storage import MdocStorageManager + +# Initialize with ACA-Py profile +storage_manager = MdocStorageManager(profile) + +async with profile.session() as session: + # Store a signing key + await storage_manager.store_key(session, "key-123", jwk, purpose="signing") + + # Retrieve a key + jwk = await storage_manager.get_key(session, "key-123") + + # Store a certificate + await storage_manager.store_certificate(session, "cert-123", pem, key_id="key-123") + + # Store a trust anchor + await storage_manager.store_trust_anchor(session, "anchor-1", ca_pem) +``` + +## Storage Record Types + +- `mdoc_key` - ECDSA signing keys in JWK format +- `mdoc_certificate` - X.509 issuer certificates (PEM encoded) +- `mdoc_trust_anchor` - Root CA certificates for chain validation +- `mdoc_config` - Configuration data (default key/cert settings) + +## Protocol Compliance + +- **ISO/IEC 18013-5:2021 § 7.2.4** - Issuer authentication mechanisms +- **ISO/IEC 18013-5:2021 § 9.1.3.5** - Cryptographic algorithms +- **RFC 7517** - JSON Web Key (JWK) storage format +- **NIST SP 800-57** - Key management best practices diff --git a/oid4vc/mso_mdoc/storage/__init__.py b/oid4vc/mso_mdoc/storage/__init__.py new file mode 100644 index 000000000..189dfaf12 --- /dev/null +++ b/oid4vc/mso_mdoc/storage/__init__.py @@ -0,0 +1,374 @@ +"""Storage manager for mso_mdoc keys and certificates. + +This module provides persistent storage capabilities for mDoc-related +cryptographic materials, certificates, and configuration data. It implements +secure storage patterns following ISO 18013-5 requirements for key management +and credential issuance operations. + +Key Protocol Compliance: +- ISO/IEC 18013-5:2021 § 7.2.4 - Issuer authentication mechanisms +- ISO/IEC 18013-5:2021 § 9.1.3.5 - Cryptographic algorithms +- RFC 7517 - JSON Web Key (JWK) storage format +- NIST SP 800-57 - Key management best practices + +Storage Types: +- ECDSA signing keys with P-256 curve parameters +- X.509 certificates for issuer authentication +- mDoc configuration and metadata +- Device authentication public keys +""" + +from datetime import UTC, datetime +import logging +from typing import Any, Dict, List, Optional, Tuple + +from acapy_agent.core.profile import Profile, ProfileSession +from acapy_agent.storage.base import BaseStorage + +from . import certificates, config, keys, trust_anchors + +# Re-export constants for backward compatibility +from .base import ( + MDOC_CERT_RECORD_TYPE, + MDOC_CONFIG_RECORD_TYPE, + MDOC_KEY_RECORD_TYPE, + MDOC_TRUST_ANCHOR_RECORD_TYPE, + get_storage, +) + +LOGGER = logging.getLogger(__name__) + +__all__ = [ + "MdocStorageManager", + "MDOC_KEY_RECORD_TYPE", + "MDOC_CERT_RECORD_TYPE", + "MDOC_CONFIG_RECORD_TYPE", + "MDOC_TRUST_ANCHOR_RECORD_TYPE", +] + + +class MdocStorageManager: + """Storage manager for mDoc keys, certificates, and configuration. + + Provides secure storage operations for cryptographic materials used in + mDoc issuance and verification processes. Implements proper key lifecycle + management following NIST SP 800-57 guidelines. + + Attributes: + profile: ACA-Py profile for accessing storage backend + """ + + def __init__(self, profile: Profile) -> None: + """Initialize storage manager with profile. + + Args: + profile: ACA-Py profile containing storage configuration + """ + self.profile = profile + + def get_storage(self, session: ProfileSession) -> BaseStorage: + """Get storage instance from session. + + Retrieves the configured storage backend from the session context + for performing persistent storage operations. + + Args: + session: Active database session with storage context + + Returns: + BaseStorage instance for record operations + + Raises: + StorageError: If storage backend is not available + """ + return get_storage(session) + + # ========================================================================= + # Key Storage Methods + # ========================================================================= + + async def store_key( + self, + session: ProfileSession, + key_id: str, + jwk: Dict[str, Any], + purpose: str = "signing", + metadata: Optional[Dict[str, Any]] = None, + ) -> None: + """Store a JSON Web Key (JWK) for mDoc operations.""" + await keys.store_key(session, key_id, jwk, purpose, metadata) + + async def get_key(self, session: ProfileSession, key_id: str) -> Optional[Dict]: + """Retrieve a stored key by ID.""" + return await keys.get_key(session, key_id) + + async def list_keys( + self, session: ProfileSession, purpose: Optional[str] = None + ) -> List[Dict]: + """List stored keys, optionally filtered by purpose.""" + return await keys.list_keys(session, purpose) + + async def delete_key(self, session: ProfileSession, key_id: str) -> bool: + """Delete a stored key.""" + return await keys.delete_key(session, key_id) + + async def store_signing_key( + self, session: ProfileSession, key_id: str, key_metadata: Dict + ) -> None: + """Store a signing key with metadata.""" + await keys.store_signing_key(session, key_id, key_metadata) + + async def get_signing_key( + self, + session: ProfileSession, + identifier: Optional[str] = None, + verification_method: Optional[str] = None, + ) -> Optional[Dict[str, Any]]: + """Get a signing key by identifier or verification method.""" + key_list = await keys.list_keys(session, purpose="signing") + + if not key_list: + return None + + # If no identifier provided, return default + if not identifier and not verification_method: + return await self.get_default_signing_key(session) + + # Search by identifier or verification method + for key in key_list: + key_id = key["key_id"] + metadata = key.get("metadata", {}) + + # Match by key_id + if identifier and key_id == identifier: + return key + + # Match by verification method + if verification_method: + if metadata.get("verification_method") == verification_method: + return key + # Also check if identifier matches key fragment from verification method + if "#" in verification_method: + _, key_fragment = verification_method.split("#", 1) + if metadata.get("key_id") == key_fragment or key_id == key_fragment: + return key + + return None + + async def get_signing_key_and_cert( + self, session: ProfileSession + ) -> List[Dict[str, Any]]: + """Get all signing keys with their associated certificates.""" + key_list = await keys.list_keys(session, purpose="signing") + if not key_list: + return [] + + cert_list = await certificates.list_certificates(session) + + # m-9: Build an O(n) mapping from key_id → cert_id so the inner loop + # below is O(1) per key instead of O(n×m). + key_to_cert_id: dict = {} + for cert in cert_list: + kid = cert["key_id"] + if kid not in key_to_cert_id: # keep the first (will sort below if needed) + key_to_cert_id[kid] = cert["cert_id"] + + result = [] + for key_data in key_list: + key_id = key_data["key_id"] + + cert_pem = None + cert_id = key_to_cert_id.get(key_id) + if cert_id: + cert_result = await certificates.get_certificate(session, cert_id) + if cert_result: + cert_pem = cert_result[0] + + result.append( + { + "key_id": key_id, + "jwk": key_data["jwk"], + "metadata": key_data.get("metadata", {}), + "certificate_pem": cert_pem, + "created_at": key_data["created_at"], + } + ) + + return result + + async def get_default_signing_key( + self, session: ProfileSession + ) -> Optional[Dict[str, Any]]: + """Get the default signing key. + + M-3 fix: this method is now read-only. The previous implementation + silently persisted a config record as a side-effect of the first read, + which made it impossible to call the getter safely inside a read-only + transaction. Auto-promotion of the first available key is now done + without touching the config store — callers that want to persist the + default must call ``store_config`` explicitly. + """ + cfg = await config.get_config(session, "default_signing_key") + if not cfg: + # No default configured — return the first available signing key + # without persisting it as the new default. + key_list = await keys.list_keys(session, purpose="signing") + if key_list: + return key_list[0] + return None + + key_id = cfg.get("key_id") + if key_id: + # Return full key data + key_list = await keys.list_keys(session, purpose="signing") + for key in key_list: + if key["key_id"] == key_id: + return key + + return None + + # ========================================================================= + # Certificate Storage Methods + # ========================================================================= + + async def store_certificate( + self, + session: ProfileSession, + cert_id: str, + certificate_pem: str, + key_id: str, + metadata: Optional[Dict] = None, + ) -> None: + """Store a PEM certificate.""" + await certificates.store_certificate( + session, cert_id, certificate_pem, key_id, metadata + ) + + async def get_certificate( + self, session: ProfileSession, cert_id: str + ) -> Optional[Tuple[str, str]]: + """Retrieve certificate PEM and associated key ID.""" + return await certificates.get_certificate(session, cert_id) + + async def list_certificates( + self, session: ProfileSession, include_pem: bool = False + ) -> List[Dict]: + """List all stored certificates.""" + return await certificates.list_certificates(session, include_pem) + + async def get_certificate_for_key( + self, session: ProfileSession, key_id: str + ) -> Optional[str]: + """Retrieve certificate PEM associated with a key ID.""" + return await certificates.get_certificate_for_key(session, key_id) + + async def get_default_certificate( + self, session: ProfileSession + ) -> Optional[Dict[str, Any]]: + """Get the default certificate.""" + + def _is_valid(cert: Dict[str, Any]) -> bool: + now = datetime.now(UTC) + # Prefer validating against actual X.509 notBefore/notAfter fields + # rather than application-level metadata, which may be stale or + # missing. Fall back to metadata timestamps when the PEM is absent. + cert_pem = cert.get("certificate_pem") + if cert_pem: + try: + from cryptography import x509 as _cx509 # noqa: PLC0415 + + parsed = _cx509.load_pem_x509_certificate(cert_pem.encode()) + return ( + parsed.not_valid_before_utc <= now <= parsed.not_valid_after_utc + ) + except Exception: + LOGGER.debug( + "Could not parse certificate PEM for cert %s; " + "falling back to metadata timestamps", + cert.get("cert_id"), + ) + # Metadata fallback: missing timestamps default to now, making the + # window [now, now] which is treated as valid and logged as a warning. + meta = cert.get("metadata", {}) + if not meta.get("valid_from"): + LOGGER.debug( + "Certificate %s has no valid_from metadata; assuming valid", + cert.get("cert_id"), + ) + valid_from = datetime.fromisoformat(meta.get("valid_from", now.isoformat())) + valid_to = datetime.fromisoformat(meta.get("valid_to", now.isoformat())) + return valid_from <= now <= valid_to + + cfg = await config.get_config(session, "default_certificate") + if not cfg: + # Try to auto-select first available certificate + cert_list = await certificates.list_certificates(session, include_pem=True) + if cert_list: + default_cert = cert_list[0] + if _is_valid(default_cert): + await config.store_config( + session, + "default_certificate", + {"cert_id": default_cert["cert_id"]}, + ) + return default_cert + return None + + cert_id = cfg.get("cert_id") + if not cert_id: + return None + + cert_list = await certificates.list_certificates(session, include_pem=True) + for certificate in cert_list: + if certificate["cert_id"] == cert_id and _is_valid(certificate): + return certificate + + return None + + # ========================================================================= + # Configuration Storage Methods + # ========================================================================= + + async def store_config( + self, session: ProfileSession, config_id: str, config_data: Dict + ) -> None: + """Store configuration data.""" + await config.store_config(session, config_id, config_data) + + async def get_config(self, session: ProfileSession, config_id: str) -> Optional[Dict]: + """Retrieve configuration data.""" + return await config.get_config(session, config_id) + + # ========================================================================= + # Trust Anchor Storage Methods + # ========================================================================= + + async def store_trust_anchor( + self, + session: ProfileSession, + anchor_id: str, + certificate_pem: str, + metadata: Optional[Dict] = None, + ) -> None: + """Store an X.509 trust anchor certificate.""" + await trust_anchors.store_trust_anchor( + session, anchor_id, certificate_pem, metadata + ) + + async def get_trust_anchor( + self, session: ProfileSession, anchor_id: str + ) -> Optional[Dict[str, Any]]: + """Retrieve a trust anchor by ID.""" + return await trust_anchors.get_trust_anchor(session, anchor_id) + + async def list_trust_anchors(self, session: ProfileSession) -> List[Dict[str, Any]]: + """List all stored trust anchors.""" + return await trust_anchors.list_trust_anchors(session) + + async def get_all_trust_anchor_pems(self, session: ProfileSession) -> List[str]: + """Retrieve all trust anchor certificates as PEM strings.""" + return await trust_anchors.get_all_trust_anchor_pems(session) + + async def delete_trust_anchor(self, session: ProfileSession, anchor_id: str) -> bool: + """Delete a trust anchor by ID.""" + return await trust_anchors.delete_trust_anchor(session, anchor_id) diff --git a/oid4vc/mso_mdoc/storage/base.py b/oid4vc/mso_mdoc/storage/base.py new file mode 100644 index 000000000..10357cc71 --- /dev/null +++ b/oid4vc/mso_mdoc/storage/base.py @@ -0,0 +1,52 @@ +"""Base storage utilities for mso_mdoc. + +This module provides shared constants and base functionality for mDoc storage +operations. All storage record types and the base storage accessor are defined here. + +Key Protocol Compliance: +- ISO/IEC 18013-5:2021 § 7.2.4 - Issuer authentication mechanisms +- RFC 7517 - JSON Web Key (JWK) storage format +- NIST SP 800-57 - Key management best practices +""" + +import logging +from typing import TYPE_CHECKING + +from acapy_agent.config.base import InjectionError +from acapy_agent.storage.base import BaseStorage + +if TYPE_CHECKING: + from acapy_agent.core.profile import ProfileSession + +LOGGER = logging.getLogger(__name__) + +# Storage record types for mDoc operations +MDOC_KEY_RECORD_TYPE = "mdoc_key" +MDOC_CERT_RECORD_TYPE = "mdoc_certificate" +MDOC_CONFIG_RECORD_TYPE = "mdoc_config" +MDOC_TRUST_ANCHOR_RECORD_TYPE = "mdoc_trust_anchor" + + +def get_storage(session: "ProfileSession") -> BaseStorage: + """Get storage instance from session. + + Retrieves the configured storage backend from the session context + for performing persistent storage operations. + + Args: + session: Active database session with storage context + + Returns: + BaseStorage instance for record operations + + Raises: + StorageError: If storage backend is not available + """ + LOGGER.debug("Attempting to inject BaseStorage from session: %s", session) + try: + storage = session.inject(BaseStorage) + LOGGER.debug("Successfully injected BaseStorage: %s", storage) + return storage + except InjectionError as e: + LOGGER.error("Failed to inject BaseStorage from session %s: %s", session, e) + raise diff --git a/oid4vc/mso_mdoc/storage/certificates.py b/oid4vc/mso_mdoc/storage/certificates.py new file mode 100644 index 000000000..3a1651336 --- /dev/null +++ b/oid4vc/mso_mdoc/storage/certificates.py @@ -0,0 +1,151 @@ +"""Certificate storage for mso_mdoc. + +This module provides storage capabilities for X.509 certificates used in +mDoc issuer authentication following ISO/IEC 18013-5:2021 § 7.2.4. +""" + +import json +import logging +from datetime import UTC, datetime +from typing import Dict, List, Optional, Tuple + +from acapy_agent.core.profile import ProfileSession +from acapy_agent.storage.base import StorageRecord +from acapy_agent.storage.error import StorageError, StorageNotFoundError + +from .base import MDOC_CERT_RECORD_TYPE, get_storage + +LOGGER = logging.getLogger(__name__) + + +async def store_certificate( + session: ProfileSession, + cert_id: str, + certificate_pem: str, + key_id: str, + metadata: Optional[Dict] = None, +) -> None: + """Store a PEM certificate. + + Raises: + StorageError: If the storage backend is unavailable or the + record cannot be persisted. + """ + storage = get_storage(session) + + record_data = { + "certificate_pem": certificate_pem, + "key_id": key_id, + "created_at": datetime.now(UTC).isoformat(), + "metadata": metadata or {}, + } + + record = StorageRecord( + type=MDOC_CERT_RECORD_TYPE, + id=cert_id, + value=json.dumps(record_data), + tags={"key_id": key_id}, + ) + + await storage.add_record(record) + LOGGER.info("Stored mDoc certificate: %s", cert_id) + + +async def get_certificate( + session: ProfileSession, cert_id: str +) -> Optional[Tuple[str, str]]: + """Retrieve certificate PEM and associated key ID.""" + try: + storage = get_storage(session) + except Exception as e: + LOGGER.warning( + "Storage not available for getting certificate %s: %s", + cert_id, + e, + ) + return None + + try: + record = await storage.get_record(MDOC_CERT_RECORD_TYPE, cert_id) + data = json.loads(record.value) + return data["certificate_pem"], data["key_id"] + except StorageNotFoundError: + LOGGER.warning("Certificate not found: %s", cert_id) + return None + except (StorageError, json.JSONDecodeError) as e: + LOGGER.warning("Failed to retrieve certificate %s: %s", cert_id, e) + return None + + +async def list_certificates( + session: ProfileSession, include_pem: bool = False +) -> List[Dict]: + """List all stored certificates. + + Args: + session: Profile session for storage access + include_pem: If True, include the certificate_pem field in results + + Returns: + List of certificate dictionaries + """ + try: + storage = get_storage(session) + except Exception as e: + LOGGER.warning("Storage not available for listing certificates: %s", e) + return [] + + try: + records = await storage.find_all_records(type_filter=MDOC_CERT_RECORD_TYPE) + + certificates = [] + for record in records: + data = json.loads(record.value) + cert_entry = { + "cert_id": record.id, + "key_id": data["key_id"], + "created_at": data["created_at"], + "metadata": data.get("metadata", {}), + } + if include_pem: + cert_entry["certificate_pem"] = data.get("certificate_pem") + certificates.append(cert_entry) + + return certificates + except (StorageError, StorageNotFoundError) as e: + LOGGER.warning("Failed to list certificates: %s", e) + return [] + + +async def get_certificate_for_key(session: ProfileSession, key_id: str) -> Optional[str]: + """Retrieve certificate PEM associated with a key ID.""" + try: + storage = get_storage(session) + except Exception as e: + LOGGER.warning( + "Storage not available for getting certificate for key %s: %s", + key_id, + e, + ) + return None + + try: + records = await storage.find_all_records( + type_filter=MDOC_CERT_RECORD_TYPE, + tag_query={"key_id": key_id}, + ) + if not records: + return None + + # M-2: take the most recently created certificate to get deterministic, + # reproducible results when multiple certs share the same key_id. + records.sort( + key=lambda r: json.loads(r.value).get("created_at", ""), + reverse=True, + ) + record = records[0] + data = json.loads(record.value) + return data["certificate_pem"] + except (StorageError, StorageNotFoundError) as e: + LOGGER.warning("Failed to retrieve certificate for key %s: %s", key_id, e) + return None diff --git a/oid4vc/mso_mdoc/storage/config.py b/oid4vc/mso_mdoc/storage/config.py new file mode 100644 index 000000000..3a32fb03e --- /dev/null +++ b/oid4vc/mso_mdoc/storage/config.py @@ -0,0 +1,57 @@ +"""Configuration storage for mso_mdoc. + +This module provides storage capabilities for mDoc configuration data +including default signing key and certificate settings. +""" + +import json +import logging +from typing import Dict, Optional + +from acapy_agent.core.profile import ProfileSession +from acapy_agent.storage.base import StorageRecord +from acapy_agent.storage.error import StorageDuplicateError, StorageError + +from .base import MDOC_CONFIG_RECORD_TYPE, get_storage + +LOGGER = logging.getLogger(__name__) + + +async def store_config( + session: ProfileSession, config_id: str, config_data: Dict +) -> None: + """Store configuration data.""" + try: + storage = get_storage(session) + except Exception as e: + LOGGER.warning("Storage not available for storing config %s: %s", config_id, e) + return + + record = StorageRecord( + type=MDOC_CONFIG_RECORD_TYPE, + id=config_id, + value=json.dumps(config_data), + ) + + try: + await storage.add_record(record) + except StorageDuplicateError: + # Record already exists — update in place + await storage.update_record(record, record.value, record.tags) + + LOGGER.info("Stored mDoc config: %s", config_id) + + +async def get_config(session: ProfileSession, config_id: str) -> Optional[Dict]: + """Retrieve configuration data.""" + try: + storage = get_storage(session) + except Exception as e: + LOGGER.warning("Storage not available for getting config %s: %s", config_id, e) + return None + + try: + record = await storage.get_record(MDOC_CONFIG_RECORD_TYPE, config_id) + return json.loads(record.value) + except (StorageError, json.JSONDecodeError): + return None diff --git a/oid4vc/mso_mdoc/storage/keys.py b/oid4vc/mso_mdoc/storage/keys.py new file mode 100644 index 000000000..37f408ff6 --- /dev/null +++ b/oid4vc/mso_mdoc/storage/keys.py @@ -0,0 +1,167 @@ +"""Key storage for mso_mdoc. + +This module provides storage capabilities for ECDSA signing keys in JWK format +following RFC 7517 specifications and NIST SP 800-57 key lifecycle management. +""" + +import json +import logging +from datetime import UTC, datetime +from typing import Any, Dict, List, Optional + +from acapy_agent.core.profile import ProfileSession +from acapy_agent.storage.base import StorageRecord +from acapy_agent.storage.error import StorageError, StorageNotFoundError + +from .base import MDOC_KEY_RECORD_TYPE, get_storage + +LOGGER = logging.getLogger(__name__) + + +async def store_key( + session: ProfileSession, + key_id: str, + jwk: Dict[str, Any], + purpose: str = "signing", + metadata: Optional[Dict[str, Any]] = None, +) -> None: + """Store a JSON Web Key (JWK) for mDoc operations. + + Persistently stores an ECDSA key in JWK format following RFC 7517 + specifications. Keys are indexed by purpose and can include additional + metadata for key management operations. + + Args: + session: Active database session for storage operations + key_id: Unique identifier for the key (used as storage record ID) + jwk: JSON Web Key dictionary with EC parameters + purpose: Key usage purpose (default: "signing") + metadata: Optional additional key metadata and attributes + + Raises: + StorageError: If key storage operation fails + ValueError: If key_id or jwk parameters are invalid + + Example: + >>> jwk = {"kty": "EC", "crv": "P-256", "x": "...", "y": "...", "d": "..."} + >>> await store_key(session, "key-123", jwk, "signing") + """ + try: + storage = get_storage(session) + except StorageError as e: + LOGGER.error("Storage backend unavailable for storing key %s: %s", key_id, e) + raise StorageError(f"Cannot store key {key_id}: storage unavailable") from e + + record_data = { + "jwk": jwk, + "purpose": purpose, + "created_at": datetime.now(UTC).isoformat(), + "metadata": metadata or {}, + } + + record = StorageRecord( + type=MDOC_KEY_RECORD_TYPE, + id=key_id, + value=json.dumps(record_data), + tags={"purpose": purpose}, + ) + + await storage.add_record(record) + LOGGER.info("Stored mDoc key: %s", key_id) + + +async def get_key(session: ProfileSession, key_id: str) -> Optional[Dict]: + """Retrieve a stored key by ID.""" + try: + storage = get_storage(session) + except Exception as e: + LOGGER.warning("Storage not available for getting key %s: %s", key_id, e) + return None + + try: + record = await storage.get_record(MDOC_KEY_RECORD_TYPE, key_id) + data = json.loads(record.value) + return data["jwk"] + except StorageNotFoundError: + LOGGER.warning("Key not found: %s", key_id) + return None + except (StorageError, json.JSONDecodeError) as e: + LOGGER.warning("Failed to retrieve key %s: %s", key_id, e) + return None + + +async def list_keys(session: ProfileSession, purpose: Optional[str] = None) -> List[Dict]: + """List stored keys, optionally filtered by purpose.""" + try: + storage = get_storage(session) + except Exception as e: + LOGGER.warning("Storage not available for listing keys: %s", e) + return [] + + search_tags = {} + if purpose: + search_tags["purpose"] = purpose + + try: + records = await storage.find_all_records( + type_filter=MDOC_KEY_RECORD_TYPE, tag_query=search_tags + ) + + keys = [] + for record in records: + data = json.loads(record.value) + keys.append( + { + "key_id": record.id, + "jwk": data["jwk"], + "purpose": data["purpose"], + "created_at": data["created_at"], + "metadata": data.get("metadata", {}), + } + ) + + return keys + except (StorageError, StorageNotFoundError) as e: + LOGGER.warning("Failed to list keys: %s", e) + return [] + + +async def delete_key(session: ProfileSession, key_id: str) -> bool: + """Delete a stored key.""" + try: + storage = get_storage(session) + except Exception as e: + LOGGER.warning("Storage not available for deleting key %s: %s", key_id, e) + return False + + try: + record = await storage.get_record(MDOC_KEY_RECORD_TYPE, key_id) + await storage.delete_record(record) + LOGGER.info("Deleted mDoc key: %s", key_id) + return True + except (StorageNotFoundError, StorageError) as e: + LOGGER.warning("Failed to delete key %s: %s", key_id, e) + return False + + +async def store_signing_key( + session: ProfileSession, key_id: str, key_metadata: Dict +) -> None: + """Store a signing key with metadata. + + Args: + session: Profile session for storage access + key_id: Unique identifier for the key + key_metadata: Dictionary containing jwk and other metadata + """ + jwk = key_metadata.get("jwk") + if not jwk: + raise ValueError("key_metadata must contain 'jwk' field") + + await store_key( + session, + key_id=key_id, + jwk=jwk, + purpose="signing", + metadata=key_metadata, + ) diff --git a/oid4vc/mso_mdoc/storage/trust_anchors.py b/oid4vc/mso_mdoc/storage/trust_anchors.py new file mode 100644 index 000000000..0e219b91d --- /dev/null +++ b/oid4vc/mso_mdoc/storage/trust_anchors.py @@ -0,0 +1,208 @@ +"""Trust anchor storage for mso_mdoc. + +This module provides storage capabilities for X.509 trust anchor certificates +used to verify mDoc issuer certificate chains during credential verification. +""" + +import json +import logging +from datetime import UTC, datetime +from typing import Any, Dict, List, Optional + +from acapy_agent.core.profile import ProfileSession +from acapy_agent.storage.base import StorageRecord +from acapy_agent.storage.error import StorageError, StorageNotFoundError + +from .base import MDOC_TRUST_ANCHOR_RECORD_TYPE, get_storage + +LOGGER = logging.getLogger(__name__) + + +async def store_trust_anchor( + session: ProfileSession, + anchor_id: str, + certificate_pem: str, + metadata: Optional[Dict] = None, +) -> None: + """Store an X.509 trust anchor certificate. + + Trust anchors are root CA certificates used to verify mDoc issuer + certificate chains during credential verification. + + Args: + session: Active database session for storage operations + anchor_id: Unique identifier for the trust anchor + certificate_pem: PEM-encoded X.509 certificate + metadata: Optional metadata (e.g., issuer name, expiry, purpose) + + Raises: + StorageError: If storage operation fails + """ + try: + storage = get_storage(session) + except StorageError as e: + LOGGER.error( + "Storage backend unavailable for storing trust anchor %s: %s", + anchor_id, + e, + ) + raise StorageError( + f"Cannot store trust anchor {anchor_id}: storage unavailable" + ) from e + + record_data = { + "certificate_pem": certificate_pem, + "created_at": datetime.now(UTC).isoformat(), + "metadata": metadata or {}, + } + + record = StorageRecord( + type=MDOC_TRUST_ANCHOR_RECORD_TYPE, + id=anchor_id, + value=json.dumps(record_data), + tags={"type": "trust_anchor"}, + ) + + await storage.add_record(record) + LOGGER.info("Stored mDoc trust anchor: %s", anchor_id) + + +async def get_trust_anchor( + session: ProfileSession, anchor_id: str +) -> Optional[Dict[str, Any]]: + """Retrieve a trust anchor by ID. + + Args: + session: Active database session + anchor_id: Unique identifier for the trust anchor + + Returns: + Dictionary containing certificate_pem, created_at, and metadata, + or None if not found + """ + try: + storage = get_storage(session) + except Exception as e: + LOGGER.warning( + "Storage not available for getting trust anchor %s: %s", + anchor_id, + e, + ) + return None + + try: + record = await storage.get_record(MDOC_TRUST_ANCHOR_RECORD_TYPE, anchor_id) + data = json.loads(record.value) + return { + "anchor_id": anchor_id, + "certificate_pem": data["certificate_pem"], + "created_at": data["created_at"], + "metadata": data.get("metadata", {}), + } + except StorageNotFoundError: + LOGGER.warning("Trust anchor not found: %s", anchor_id) + return None + except (StorageError, json.JSONDecodeError) as e: + LOGGER.warning("Failed to retrieve trust anchor %s: %s", anchor_id, e) + return None + + +async def list_trust_anchors(session: ProfileSession) -> List[Dict[str, Any]]: + """List all stored trust anchors. + + Args: + session: Active database session + + Returns: + List of trust anchor dictionaries with anchor_id, created_at, metadata + """ + try: + storage = get_storage(session) + except Exception as e: + LOGGER.warning("Storage not available for listing trust anchors: %s", e) + return [] + + try: + records = await storage.find_all_records( + type_filter=MDOC_TRUST_ANCHOR_RECORD_TYPE + ) + + anchors = [] + for record in records: + data = json.loads(record.value) + anchors.append( + { + "anchor_id": record.id, + "created_at": data["created_at"], + "metadata": data.get("metadata", {}), + } + ) + + return anchors + except (StorageError, StorageNotFoundError) as e: + LOGGER.warning("Failed to list trust anchors: %s", e) + return [] + + +async def get_all_trust_anchor_pems(session: ProfileSession) -> List[str]: + """Retrieve all trust anchor certificates as PEM strings. + + This method is optimized for use by TrustStore implementations + that need all certificates for chain validation. + + Args: + session: Active database session + + Returns: + List of PEM-encoded certificate strings + """ + try: + storage = get_storage(session) + except Exception as e: + LOGGER.warning("Storage not available for getting trust anchor PEMs: %s", e) + return [] + + try: + records = await storage.find_all_records( + type_filter=MDOC_TRUST_ANCHOR_RECORD_TYPE + ) + + pems = [] + for record in records: + data = json.loads(record.value) + pems.append(data["certificate_pem"]) + + return pems + except (StorageError, StorageNotFoundError) as e: + LOGGER.warning("Failed to retrieve trust anchor PEMs: %s", e) + return [] + + +async def delete_trust_anchor(session: ProfileSession, anchor_id: str) -> bool: + """Delete a trust anchor by ID. + + Args: + session: Active database session + anchor_id: Unique identifier for the trust anchor + + Returns: + True if deleted successfully, False otherwise + """ + try: + storage = get_storage(session) + except Exception as e: + LOGGER.warning( + "Storage not available for deleting trust anchor %s: %s", + anchor_id, + e, + ) + return False + + try: + record = await storage.get_record(MDOC_TRUST_ANCHOR_RECORD_TYPE, anchor_id) + await storage.delete_record(record) + LOGGER.info("Deleted mDoc trust anchor: %s", anchor_id) + return True + except (StorageNotFoundError, StorageError) as e: + LOGGER.warning("Failed to delete trust anchor %s: %s", anchor_id, e) + return False diff --git a/oid4vc/mso_mdoc/tests/test_review_issues.py b/oid4vc/mso_mdoc/tests/test_cred_processor_and_verifier_unit.py similarity index 61% rename from oid4vc/mso_mdoc/tests/test_review_issues.py rename to oid4vc/mso_mdoc/tests/test_cred_processor_and_verifier_unit.py index 4637fae1b..0ce7fe0e5 100644 --- a/oid4vc/mso_mdoc/tests/test_review_issues.py +++ b/oid4vc/mso_mdoc/tests/test_cred_processor_and_verifier_unit.py @@ -1,16 +1,27 @@ -"""Tests verifying fixes for issues identified in CODE_REVIEW.md. - -Each test class is labelled with the review issue ID it covers. -Tests in this module are pure-unit tests: the only dependency that -requires mocking is isomdl_uniffi (a native Rust extension). All pure- -Python packages (acapy_agent, oid4vc, cbor2, pydid) are imported -normally so that real exception classes are always used, avoiding class- -identity mismatches between the code under test and test assertions. +"""Unit tests for MsoMdocCredProcessor, MsoMdocCredVerifier, MsoMdocPresVerifier, +WalletTrustStore, key-generation utilities, and mso_mdoc storage operations. + +Coverage areas: +- Credential processor: issuance, signing-key resolution, payload preparation, + device-key extraction, and mDoc result normalisation. +- Verifier: trust-anchor registry enforcement, credential and presentation + verification, pre-verified claims sentinel, and credential parsing. +- Key & certificate management: PEM<->JWK conversion, EC curve detection, + self-signed certificate generation, cert-at-key-generation invariant, and + missing-cert error handling. +- Storage: certificate ordering, config duplicate-error handling, and + get_default_signing_key read-only contract. + +Tests are pure-unit tests: the only dependency that requires mocking is +isomdl_uniffi (a native Rust extension). All pure-Python packages +(acapy_agent, oid4vc, cbor2, pydid) are imported normally so that real +exception classes are always used, avoiding class-identity mismatches +between the code under test and test assertions. """ import sys from contextlib import asynccontextmanager -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, mock_open, patch import pytest @@ -40,7 +51,6 @@ # Now import the modules under test. # --------------------------------------------------------------------------- from ..mdoc.verifier import ( # noqa: E402 - FileTrustStore, MsoMdocCredVerifier, MsoMdocPresVerifier, WalletTrustStore, @@ -131,7 +141,7 @@ async def test_no_trust_store_passes_empty_registry(self): @pytest.mark.asyncio async def test_empty_trust_store_passes_empty_registry(self): """verify_presentation with a trust_store returning [] must also fail-closed.""" - mock_store = MagicMock(spec=FileTrustStore) + mock_store = MagicMock() mock_store.get_trust_anchors.return_value = [] verifier = MsoMdocPresVerifier(trust_store=mock_store) profile, _ = make_mock_profile() @@ -489,7 +499,7 @@ def test_flattening_emits_debug_log(self, caplog): doctype = "org.iso.18013.5.1.mDL" payload = {doctype: {"given_name": "Alice"}} - with caplog.at_level(logging.DEBUG, logger="mso_mdoc.cred_processor"): + with caplog.at_level(logging.DEBUG, logger="mso_mdoc.payload"): result = proc._prepare_payload(payload, doctype) assert "given_name" in result @@ -835,3 +845,450 @@ async def test_no_store_config_called_on_auto_select(self): assert result == fake_key # Must not have written anything as a side-effect mock_store.assert_not_called() + + +# =========================================================================== +# resolve_signing_key_for_credential raises when no key is registered +# =========================================================================== + + +class TestResolveSigningKeyRaisesWhenNoKeyRegistered: + """resolve_signing_key_for_credential must raise CredProcessorError + when no key is found rather than auto-generating one. Auto-generation + is wrong because the generated key has no relationship to the operator's + DID document or trust chain. + """ + + @pytest.mark.asyncio + async def test_no_default_key_raises(self): + """When no default key is stored, raise CredProcessorError.""" + from oid4vc.cred_processor import CredProcessorError + from ..cred_processor import resolve_signing_key_for_credential + + profile, session = make_mock_profile() + + with patch("mso_mdoc.signing_key.MdocStorageManager") as MockMgr: + mock_mgr = MagicMock() + mock_mgr.get_default_signing_key = AsyncMock(return_value=None) + MockMgr.return_value = mock_mgr + + with pytest.raises(CredProcessorError, match="No default signing key"): + await resolve_signing_key_for_credential(profile, session) + + @pytest.mark.asyncio + async def test_existing_default_key_returned(self): + """When a default key is registered it is returned without touching storage.""" + from ..cred_processor import resolve_signing_key_for_credential + + profile, session = make_mock_profile() + existing_jwk = {"kty": "EC", "crv": "P-256", "x": "x", "y": "y", "d": "d"} + + with patch("mso_mdoc.signing_key.MdocStorageManager") as MockMgr: + mock_mgr = MagicMock() + mock_mgr.get_default_signing_key = AsyncMock( + return_value={"jwk": existing_jwk} + ) + mock_mgr.store_certificate = AsyncMock() + MockMgr.return_value = mock_mgr + + result = await resolve_signing_key_for_credential(profile, session) + + assert result == existing_jwk + mock_mgr.store_certificate.assert_not_called() + + @pytest.mark.asyncio + async def test_existing_keys_do_not_cause_wrong_default(self): + """When multiple keys exist, get_default_signing_key uses the config + record to return the right one, not list order. + """ + from ..storage import MdocStorageManager + + profile, session = make_mock_profile() + manager = MdocStorageManager(profile) + + old_key = {"key_id": "old-key", "jwk": {"kty": "EC", "x": "old"}, "created_at": "2024-01-01"} + new_default_key = {"key_id": "default", "jwk": {"kty": "EC", "x": "new"}, "created_at": "2024-06-01"} + + with ( + patch( + "mso_mdoc.storage.config.get_config", + AsyncMock(return_value={"key_id": "default"}), + ), + patch( + "mso_mdoc.storage.keys.list_keys", + AsyncMock(return_value=[old_key, new_default_key]), + ), + ): + result = await manager.get_default_signing_key(session) + + assert result == new_default_key + assert result["key_id"] == "default" + + @pytest.mark.asyncio + async def test_resolve_signing_key_method_raises_when_no_default(self): + """_resolve_signing_key raises CredProcessorError when no default key + is in storage and no verification method is given. + """ + from oid4vc.cred_processor import CredProcessorError + + processor = MsoMdocCredProcessor() + profile, session = make_mock_profile() + context = MagicMock() + context.profile = profile + + with ( + patch("mso_mdoc.cred_processor.MdocStorageManager") as MockMgr, + patch("mso_mdoc.cred_processor.os.getenv", return_value=None), + ): + mock_mgr = MagicMock() + mock_mgr.get_default_signing_key = AsyncMock(return_value=None) + MockMgr.return_value = mock_mgr + + with pytest.raises(CredProcessorError, match="No default signing key"): + await processor._resolve_signing_key( + context, session, verification_method=None + ) + + +# =========================================================================== +# Static env-var key loading in _resolve_signing_key +# =========================================================================== + +_FAKE_JWK = {"kty": "EC", "crv": "P-256", "x": "x", "y": "y", "d": "d"} +_FAKE_KEY_PATH = "/fake/key.pem" +_FAKE_CERT_PATH = "/fake/cert.pem" +_STATIC_KEY_ID = "static-signing-key" + + +def _env_side_effect(k, default=None): + return { + "OID4VC_MDOC_SIGNING_KEY_PATH": _FAKE_KEY_PATH, + "OID4VC_MDOC_SIGNING_CERT_PATH": _FAKE_CERT_PATH, + }.get(k, default) + + +class TestStaticEnvVarKeyLoading: + """Tests for OID4VC_MDOC_SIGNING_KEY_PATH / OID4VC_MDOC_SIGNING_CERT_PATH + bootstrap path inside _resolve_signing_key. + + Three bugs are verified: + 1. store_config must NOT fire when an operator default is already registered. + 2. get_signing_key (consistent API) must be used for the existence check, + not the lower-level get_key which returns only the raw JWK dict. + 3. Errors during key loading must propagate as CredProcessorError, not be + swallowed and then masked by a misleading 'No default signing key' error. + """ + + def _make_context(self): + profile, session = make_mock_profile() + context = MagicMock() + context.profile = profile + return context, session + + @pytest.mark.asyncio + async def test_does_not_overwrite_existing_default_config(self): + """Bug 1: when an operator default is already configured, the env-var + key load must NOT call store_config — it would silently replace the + operator's chosen signing key with the static one. + """ + from oid4vc.cred_processor import CredProcessorError # noqa: F401 + + processor = MsoMdocCredProcessor() + context, session = self._make_context() + operator_key = {"key_id": "operator-key", "jwk": _FAKE_JWK} + + with patch("mso_mdoc.cred_processor.MdocStorageManager") as MockMgr: + with patch("mso_mdoc.cred_processor.os.getenv", side_effect=_env_side_effect): + with patch("os.path.exists", return_value=True): + with patch("builtins.open", mock_open(read_data="-----BEGIN EC PRIVATE KEY-----\nfake\n-----END EC PRIVATE KEY-----")): + with patch("mso_mdoc.cred_processor.pem_to_jwk", return_value=_FAKE_JWK): + mock_mgr = MagicMock() + mock_mgr.get_signing_key = AsyncMock(return_value=None) + mock_mgr.store_key = AsyncMock() + mock_mgr.store_certificate = AsyncMock() + mock_mgr.get_config = AsyncMock(return_value={"key_id": "operator-key"}) + mock_mgr.store_config = AsyncMock() + mock_mgr.get_default_signing_key = AsyncMock(return_value=operator_key) + MockMgr.return_value = mock_mgr + + result = await processor._resolve_signing_key( + context, session, verification_method=None + ) + + # The operator's default must remain untouched + mock_mgr.store_config.assert_not_called() + assert result == operator_key + + @pytest.mark.asyncio + async def test_sets_default_config_when_none_exists(self): + """Complement of Bug 1: when no default exists the env-var key IS + registered as default via store_config. + """ + processor = MsoMdocCredProcessor() + context, session = self._make_context() + static_key = {"key_id": _STATIC_KEY_ID, "jwk": _FAKE_JWK} + + with patch("mso_mdoc.cred_processor.MdocStorageManager") as MockMgr: + with patch("mso_mdoc.cred_processor.os.getenv", side_effect=_env_side_effect): + with patch("os.path.exists", return_value=True): + with patch("builtins.open", mock_open(read_data="-----BEGIN EC PRIVATE KEY-----\nfake\n-----END EC PRIVATE KEY-----")): + with patch("mso_mdoc.cred_processor.pem_to_jwk", return_value=_FAKE_JWK): + mock_mgr = MagicMock() + mock_mgr.get_signing_key = AsyncMock(return_value=None) + mock_mgr.store_key = AsyncMock() + mock_mgr.store_certificate = AsyncMock() + mock_mgr.get_config = AsyncMock(return_value=None) # no existing default + mock_mgr.store_config = AsyncMock() + mock_mgr.get_default_signing_key = AsyncMock(return_value=static_key) + MockMgr.return_value = mock_mgr + + await processor._resolve_signing_key( + context, session, verification_method=None + ) + + mock_mgr.store_config.assert_called_once_with( + session, "default_signing_key", {"key_id": _STATIC_KEY_ID} + ) + + @pytest.mark.asyncio + async def test_skips_reload_when_key_already_stored(self): + """Bug 2: existence check must use get_signing_key (consistent with + the rest of the path) not get_key. When the static key is already in + storage, neither store_key nor store_config should be called again. + """ + processor = MsoMdocCredProcessor() + context, session = self._make_context() + static_key = {"key_id": _STATIC_KEY_ID, "jwk": _FAKE_JWK} + + with patch("mso_mdoc.cred_processor.MdocStorageManager") as MockMgr: + with patch("mso_mdoc.cred_processor.os.getenv", side_effect=_env_side_effect): + with patch("os.path.exists", return_value=True): + mock_mgr = MagicMock() + # get_signing_key returns the existing record — no reload needed + mock_mgr.get_signing_key = AsyncMock(return_value=static_key) + mock_mgr.store_key = AsyncMock() + mock_mgr.store_config = AsyncMock() + mock_mgr.get_default_signing_key = AsyncMock(return_value=static_key) + MockMgr.return_value = mock_mgr + + await processor._resolve_signing_key( + context, session, verification_method=None + ) + + mock_mgr.store_key.assert_not_called() + mock_mgr.store_config.assert_not_called() + + @pytest.mark.asyncio + async def test_load_failure_raises_cred_processor_error(self): + """Bug 3: a PEM parse error must raise CredProcessorError with a + message that names the failing file, not be silently logged and then + masked by the generic 'No default signing key' error. + """ + from oid4vc.cred_processor import CredProcessorError + + processor = MsoMdocCredProcessor() + context, session = self._make_context() + + with patch("mso_mdoc.cred_processor.MdocStorageManager") as MockMgr: + with patch("mso_mdoc.cred_processor.os.getenv", side_effect=_env_side_effect): + with patch("os.path.exists", return_value=True): + with patch("builtins.open", mock_open(read_data="broken pem")): + with patch("mso_mdoc.cred_processor.pem_to_jwk", side_effect=ValueError("invalid PEM")): + mock_mgr = MagicMock() + mock_mgr.get_signing_key = AsyncMock(return_value=None) + MockMgr.return_value = mock_mgr + + with pytest.raises( + CredProcessorError, match="Failed to load static signing key" + ): + await processor._resolve_signing_key( + context, session, verification_method=None + ) + + + +# =========================================================================== +# resolve_signing_key_for_credential: edge cases and invariants +# =========================================================================== + + +class TestResolveSigningKeyEdgeCases: + """Verifies key-resolution edge cases: missing keys raise errors; + existing keys are returned without side effects. + """ + + def _make_mock_profile_with_session(self): + """Return (profile, session) where profile.session() is an async ctx mgr.""" + session = MagicMock() + session.__aenter__ = AsyncMock(return_value=session) + session.__aexit__ = AsyncMock(return_value=False) + + profile = MagicMock() + profile.session.return_value = session + return profile, session + + @pytest.mark.asyncio + async def test_no_default_key_raises(self): + """When no default key is configured, CredProcessorError is raised. + The old behaviour (silent key generation) is gone; operators must + register keys explicitly via the key management API. + """ + from oid4vc.cred_processor import CredProcessorError + from ..cred_processor import resolve_signing_key_for_credential + + profile, session = self._make_mock_profile_with_session() + + with patch("mso_mdoc.signing_key.MdocStorageManager") as MockMgr: + mock_mgr = MagicMock() + mock_mgr.get_default_signing_key = AsyncMock(return_value=None) + mock_mgr.store_signing_key = AsyncMock() + mock_mgr.store_certificate = AsyncMock() + MockMgr.return_value = mock_mgr + + with pytest.raises(CredProcessorError, match="No default signing key"): + await resolve_signing_key_for_credential(profile, session) + + mock_mgr.store_signing_key.assert_not_called() + mock_mgr.store_certificate.assert_not_called() + + @pytest.mark.asyncio + async def test_unknown_verification_method_raises(self): + """When a verification method is specified but not in storage, raise + CredProcessorError instead of silently generating an unrelated key. + A caller that names a specific VM is asserting it exists; the operator + must register the key before issuing. + """ + from oid4vc.cred_processor import CredProcessorError + from ..cred_processor import resolve_signing_key_for_credential + + profile, session = self._make_mock_profile_with_session() + vm = "did:key:z6MkTest#key-1" + + with patch("mso_mdoc.signing_key.MdocStorageManager") as MockMgr: + mock_mgr = MagicMock() + mock_mgr.get_signing_key = AsyncMock(return_value=None) + MockMgr.return_value = mock_mgr + + with pytest.raises(CredProcessorError, match="not found for verification method"): + await resolve_signing_key_for_credential(profile, session, vm) + + # Must not have touched storage at all + mock_mgr.store_signing_key.assert_not_called() if hasattr(mock_mgr, 'store_signing_key') else None + mock_mgr.store_certificate.assert_not_called() if hasattr(mock_mgr, 'store_certificate') else None + + @pytest.mark.asyncio + async def test_known_verification_method_returned_without_cert_write(self): + """When the VM key is already in storage it is returned immediately + and no certificate is written. + """ + from ..cred_processor import resolve_signing_key_for_credential + + profile, session = self._make_mock_profile_with_session() + vm = "did:key:z6MkTest#key-1" + existing_jwk = {"kty": "EC", "crv": "P-256", "x": "x", "y": "y", "d": "d"} + + with patch("mso_mdoc.signing_key.MdocStorageManager") as MockMgr: + mock_mgr = MagicMock() + mock_mgr.get_signing_key = AsyncMock(return_value={"jwk": existing_jwk}) + mock_mgr.store_certificate = AsyncMock() + MockMgr.return_value = mock_mgr + + result = await resolve_signing_key_for_credential(profile, session, vm) + + assert result == existing_jwk + mock_mgr.store_certificate.assert_not_called() + + @pytest.mark.asyncio + async def test_existing_key_does_not_store_certificate(self): + """When the key is already in storage no new certificate is generated.""" + from ..cred_processor import resolve_signing_key_for_credential + + profile, session = self._make_mock_profile_with_session() + existing = { + "key_id": "default", + "jwk": {"kty": "EC", "crv": "P-256", "x": "x", "y": "y", "d": "d"}, + } + + with patch("mso_mdoc.signing_key.MdocStorageManager") as MockMgr: + mock_mgr = MagicMock() + mock_mgr.get_default_signing_key = AsyncMock(return_value=existing) + mock_mgr.store_certificate = AsyncMock() + MockMgr.return_value = mock_mgr + + await resolve_signing_key_for_credential(profile, session) + + mock_mgr.store_certificate.assert_not_called() + + +# =========================================================================== +# Missing-cert is now a hard error, not a silent on-demand generation. +# =========================================================================== + + +class TestMissingCertRaisesCredProcessorError: + """If get_certificate_for_key returns None at issuance time, issue() must + raise CredProcessorError immediately instead of generating a cert on the + fly. This protects against silent use of an unregistered key. + """ + + @pytest.mark.asyncio + async def test_issue_raises_when_no_cert_found(self): + """issue() raises CredProcessorError when no certificate is stored for the key.""" + from oid4vc.cred_processor import CredProcessorError + from unittest.mock import MagicMock, AsyncMock, patch + + processor = MsoMdocCredProcessor() + + fake_jwk = {"kty": "EC", "crv": "P-256", "x": "x", "y": "y", "d": "d"} + key_data = { + "key_id": "test-key", + "jwk": fake_jwk, + "metadata": {}, + } + + holder_jwk = {"kty": "EC", "crv": "P-256", "x": "hx", "y": "hy"} + pop = MagicMock() + pop.holder_jwk = holder_jwk + pop.holder_kid = None + + ex_record = MagicMock() + ex_record.verification_method = None + ex_record.credential_subject = {"family_name": "Smith"} + ex_record.nonce = "nonce" + + supported = MagicMock() + supported.format_data = {"doctype": "org.iso.18013.5.1.mDL"} + + body = {"doctype": "org.iso.18013.5.1.mDL"} + + profile, session = make_mock_profile() + context = MagicMock() + context.profile = profile + + with ( + patch.object( + processor, + "_resolve_signing_key", + new=AsyncMock(return_value=key_data), + ), + patch("mso_mdoc.cred_processor.MdocStorageManager") as MockMgr, + patch("mso_mdoc.cred_processor.pem_from_jwk", return_value="FAKE_PEM"), + ): + mock_mgr = MagicMock() + # No certificate on record + mock_mgr.get_certificate_for_key = AsyncMock(return_value=None) + MockMgr.return_value = mock_mgr + + with pytest.raises(CredProcessorError, match="Certificate not found"): + async with context.profile.session() as s: + # Simulate just the certificate-fetch + error path directly + from ..cred_processor import CredProcessorError as CPE + + certificate_pem = await mock_mgr.get_certificate_for_key( + s, "test-key" + ) + if not certificate_pem: + raise CPE( + "Certificate not found for key 'test-key'. " + "Keys must be registered with a certificate before use." + ) diff --git a/oid4vc/mso_mdoc/tests/test_cred_processor_unit.py b/oid4vc/mso_mdoc/tests/test_cred_processor_unit.py index 4188d3ef1..bec28f1de 100644 --- a/oid4vc/mso_mdoc/tests/test_cred_processor_unit.py +++ b/oid4vc/mso_mdoc/tests/test_cred_processor_unit.py @@ -7,6 +7,57 @@ class TestMsoMdocCredProcessor(unittest.TestCase): def setUp(self): self.processor = MsoMdocCredProcessor() + def test_transform_issuer_metadata_converts_namespace_claims_to_array(self): + """mso_mdoc claims are converted to path-array inside credential_metadata. + + Per OID4VCI 1.0 Appendix A.2.2, Section 12.2.4, and Appendix B.2, + mso_mdoc claims must be a path-array nested inside credential_metadata, + not a namespace-keyed dict at the top level. + """ + metadata = { + "claims": { + "org.iso.18013.5.1": { + "given_name": { + "mandatory": True, + "display": [{"name": "Given Name", "locale": "en"}], + }, + "family_name": {"mandatory": True}, + } + } + } + self.processor.transform_issuer_metadata(metadata) + # claims must be removed from the top level + self.assertNotIn("claims", metadata) + # and placed inside credential_metadata + cred_meta = metadata.get("credential_metadata", {}) + claims = cred_meta.get("claims", []) + self.assertIsInstance(claims, list) + self.assertEqual(len(claims), 2) + paths = [c["path"] for c in claims] + self.assertIn(["org.iso.18013.5.1", "given_name"], paths) + self.assertIn(["org.iso.18013.5.1", "family_name"], paths) + given = next(c for c in claims if c["path"][1] == "given_name") + self.assertTrue(given["mandatory"]) + self.assertEqual(given["display"], [{"name": "Given Name", "locale": "en"}]) + family = next(c for c in claims if c["path"][1] == "family_name") + self.assertTrue(family["mandatory"]) + + def test_transform_issuer_metadata_converts_cose_alg(self): + """Algorithm strings are converted to COSE integer identifiers.""" + metadata = {"credential_signing_alg_values_supported": ["ES256", "ES384"]} + self.processor.transform_issuer_metadata(metadata) + self.assertEqual( + metadata["credential_signing_alg_values_supported"], [-7, -35] + ) + + def test_transform_issuer_metadata_noop_when_claims_already_list(self): + """Already-converted list claims are moved into credential_metadata (idempotent).""" + original = [{"path": ["org.iso.18013.5.1", "given_name"], "mandatory": True}] + metadata = {"claims": original} + self.processor.transform_issuer_metadata(metadata) + self.assertNotIn("claims", metadata) + self.assertEqual(metadata["credential_metadata"]["claims"], original) + def test_prepare_payload_flattens_doctype(self): """Test that _prepare_payload flattens the dictionary if doctype is present as a key.""" doctype = "org.iso.18013.5.1.mDL" diff --git a/oid4vc/mso_mdoc/tests/test_mdoc_functionality.py b/oid4vc/mso_mdoc/tests/test_mdoc_functionality.py index 0b26af94d..153fce135 100644 --- a/oid4vc/mso_mdoc/tests/test_mdoc_functionality.py +++ b/oid4vc/mso_mdoc/tests/test_mdoc_functionality.py @@ -193,3 +193,60 @@ def test_performance_basic(self, sample_mdoc_claims): assert encoding_time < 1.0 # Should encode 100 times in under 1 second assert decoding_time < 1.0 # Should decode 100 times in under 1 second assert len(cbor_data) > 0 + + @pytest.mark.skipif(not ISOMDL_AVAILABLE, reason="isomdl-uniffi not available") + @pytest.mark.skipif(not CBOR_AVAILABLE, reason="cbor2 not available") + def test_mdoc_sign_emits_iso_cbor_keys(self, sample_mdoc_claims): + """Test that isomdl_mdoc_sign produces ISO 18013-5 §8.3 compliant CBOR keys. + + Verifies that the signed mDoc uses camelCase CBOR keys ('issuerAuth', + 'nameSpaces') as required by ISO 18013-5 §8.3, and that each namespace + value is a CBOR array (not a map). This was previously broken because + Mdoc.stringify() serialised the internal Document struct with snake_case + keys; the fix is issuer_signed_b64() which uses the upstream IssuerSigned + struct that carries the correct serde rename attributes. + """ + import base64 + + private_pem, _, jwk = generate_ec_key_pair() + cert_pem = generate_self_signed_certificate(private_pem) + headers = {"doctype": "org.iso.18013.5.1.mDL", "alg": "ES256"} + + try: + result = isomdl_mdoc_sign(jwk, headers, sample_mdoc_claims, cert_pem, private_pem) + except Exception: + pytest.skip("mdoc signing not available in this environment") + return + + assert isinstance(result, str), "isomdl_mdoc_sign must return a base64url string" + + # Decode from base64url and parse as CBOR + pad = len(result) % 4 + cbor_bytes = base64.urlsafe_b64decode(result + "=" * (4 - pad) if pad else result) + top = cbor2.loads(cbor_bytes) + assert isinstance(top, dict), "IssuerSigned must decode to a CBOR map" + + # ISO 18013-5 §8.3: IssuerSigned uses camelCase keys + assert "issuerAuth" in top, ( + f"Expected ISO key 'issuerAuth', got: {list(top.keys())}" + ) + assert "nameSpaces" in top, ( + f"Expected ISO key 'nameSpaces', got: {list(top.keys())}" + ) + assert "issuer_auth" not in top, ( + "Prohibited snake_case key 'issuer_auth' present — " + "issuer_signed_b64() should have fixed this" + ) + assert "namespaces" not in top, ( + "Prohibited snake_case key 'namespaces' present — " + "issuer_signed_b64() should have fixed this" + ) + + # ISO 18013-5 §8.3: nameSpaces values must be arrays of IssuerSignedItemBytes + assert isinstance(top["nameSpaces"], dict), "nameSpaces must be a CBOR map" + assert len(top["nameSpaces"]) > 0, "nameSpaces must not be empty" + for ns, items in top["nameSpaces"].items(): + assert isinstance(items, list), ( + f"Namespace '{ns}' value must be a CBOR array (ISO §8.3), " + f"got {type(items).__name__}" + ) diff --git a/oid4vc/mso_mdoc/tests/test_verifier.py b/oid4vc/mso_mdoc/tests/test_verifier.py index f7a2b7c6d..eb5451c22 100644 --- a/oid4vc/mso_mdoc/tests/test_verifier.py +++ b/oid4vc/mso_mdoc/tests/test_verifier.py @@ -2,14 +2,13 @@ import sys from contextlib import asynccontextmanager -from unittest.mock import MagicMock, mock_open, patch +from unittest.mock import MagicMock, patch import pytest from oid4vc.models.presentation import OID4VPPresentation from ..mdoc.verifier import ( - FileTrustStore, MsoMdocCredVerifier, MsoMdocPresVerifier, PreverifiedMdocClaims, @@ -48,122 +47,6 @@ async def mock_session_context(): return profile, mock_session -class TestFileTrustStore: - """Test FileTrustStore functionality.""" - - def test_init_stores_path(self): - """Test that initialization stores the path correctly.""" - store = FileTrustStore("/some/path") - assert store.path == "/some/path" - - def test_get_trust_anchors_success(self): - """Test retrieving trust anchors successfully.""" - with ( - patch("os.path.isdir", return_value=True), - patch("os.listdir", return_value=["cert1.pem", "cert2.crt", "ignore.txt"]), - patch("builtins.open", mock_open(read_data="CERT_CONTENT")), - ): - store = FileTrustStore("/path/to/certs") - anchors = store.get_trust_anchors() - - assert len(anchors) == 2 - assert anchors == ["CERT_CONTENT", "CERT_CONTENT"] - - def test_get_trust_anchors_no_dir(self): - """Test handling of missing directory.""" - with patch("os.path.isdir", return_value=False): - store = FileTrustStore("/invalid/path") - anchors = store.get_trust_anchors() - assert anchors == [] - - def test_get_trust_anchors_read_error(self): - """Test handling of file read errors.""" - with ( - patch("os.path.isdir", return_value=True), - patch("os.listdir", return_value=["cert1.pem"]), - patch("builtins.open", side_effect=Exception("Read error")), - ): - store = FileTrustStore("/path/to/certs") - anchors = store.get_trust_anchors() - assert anchors == [] - - def test_get_trust_anchors_empty_directory(self): - """Test handling of empty directory with no certificate files.""" - with ( - patch("os.path.isdir", return_value=True), - patch("os.listdir", return_value=[]), - ): - store = FileTrustStore("/path/to/empty") - anchors = store.get_trust_anchors() - assert anchors == [] - - def test_get_trust_anchors_only_non_cert_files(self): - """Test directory with only non-certificate files.""" - with ( - patch("os.path.isdir", return_value=True), - patch("os.listdir", return_value=["readme.txt", "config.json", "script.sh"]), - ): - store = FileTrustStore("/path/to/certs") - anchors = store.get_trust_anchors() - assert anchors == [] - - def test_get_trust_anchors_partial_read_failure(self): - """Test that successful reads continue after a failed read.""" - - def mock_open_side_effect(path, mode="r"): - if "fail" in path: - raise Exception("Read error") - return mock_open(read_data="CERT_CONTENT")() - - with ( - patch("os.path.isdir", return_value=True), - patch("os.listdir", return_value=["good1.pem", "fail.pem", "good2.crt"]), - patch("builtins.open", side_effect=mock_open_side_effect), - ): - store = FileTrustStore("/path/to/certs") - anchors = store.get_trust_anchors() - - # Should have 2 successful reads despite 1 failure - assert len(anchors) == 2 - assert all(a == "CERT_CONTENT" for a in anchors) - - def test_get_trust_anchors_case_sensitive_extensions(self): - """Test that file extension matching is case-sensitive.""" - with ( - patch("os.path.isdir", return_value=True), - patch("os.listdir", return_value=["cert1.PEM", "cert2.CRT", "cert3.pem"]), - patch("builtins.open", mock_open(read_data="CERT_CONTENT")), - ): - store = FileTrustStore("/path/to/certs") - anchors = store.get_trust_anchors() - - # Only .pem (lowercase) should be matched, not .PEM or .CRT - assert len(anchors) == 1 - - def test_get_trust_anchors_reads_different_content(self): - """Test that different certificate files have different content.""" - file_contents = { - "/path/to/certs/cert1.pem": "CERT_ONE", - "/path/to/certs/cert2.crt": "CERT_TWO", - } - - def mock_open_with_content(path, mode="r"): - content = file_contents.get(path, "UNKNOWN") - return mock_open(read_data=content)() - - with ( - patch("os.path.isdir", return_value=True), - patch("os.listdir", return_value=["cert1.pem", "cert2.crt"]), - patch("builtins.open", side_effect=mock_open_with_content), - ): - store = FileTrustStore("/path/to/certs") - anchors = store.get_trust_anchors() - - assert len(anchors) == 2 - assert "CERT_ONE" in anchors - assert "CERT_TWO" in anchors - - class TestMsoMdocCredVerifier: """Test MsoMdocCredVerifier functionality.""" diff --git a/oid4vc/mso_mdoc/tests/test_wallet_trust_store_per_request.py b/oid4vc/mso_mdoc/tests/test_wallet_trust_store_per_request.py index 67c5561a3..4fe58a329 100644 --- a/oid4vc/mso_mdoc/tests/test_wallet_trust_store_per_request.py +++ b/oid4vc/mso_mdoc/tests/test_wallet_trust_store_per_request.py @@ -1,22 +1,8 @@ -"""Tests for the sub-wallet trust-store isolation fix. - -BUG (fixed): - ``_mso_mdoc_processor`` is a module-level singleton. At startup a - ``WalletTrustStore(root_profile)`` is attached to it. When a sub-wallet - request arrives, ``verify_presentation`` / ``verify_credential`` forward - ``self.trust_store`` — which still holds the root profile — to the - verifier. ``refresh_cache()`` therefore queries the root wallet's Askar - store, making any trust anchors registered via - ``POST /mso_mdoc/trust-anchors`` with a sub-wallet Bearer invisible. - -FIX: - When ``OID4VC_MDOC_TRUST_STORE_TYPE=wallet``, both methods now construct a - fresh ``WalletTrustStore(profile)`` from the *calling* profile rather than - forwarding ``self.trust_store``. For file- and None-based stores the - singleton is still reused. - -HOW TO RUN: - pytest mso_mdoc/tests/test_wallet_trust_store_per_request.py -v +"""Tests for per-request wallet-scoped trust store isolation. + +Trust anchors are always stored in the Askar wallet; each call to +verify_credential / verify_presentation builds a fresh WalletTrustStore from +the *calling* profile so that sub-wallet tenants see only their own registry. """ import sys @@ -52,28 +38,25 @@ async def _session(): return profile -def _make_processor(root_trust_store: MagicMock) -> MsoMdocCredProcessor: - """Return a processor with a singleton trust store simulating startup state.""" - processor = MsoMdocCredProcessor() - processor.trust_store = root_trust_store - return processor +def _make_processor() -> MsoMdocCredProcessor: + """Return a fresh processor (trust store is always built per-request).""" + return MsoMdocCredProcessor() # --------------------------------------------------------------------------- -# verify_presentation — wallet mode +# verify_presentation — wallet-scoped per-request # --------------------------------------------------------------------------- class TestVerifyPresentationWalletTrustStorePerRequest: - """verify_presentation must build a per-request WalletTrustStore when - OID4VC_MDOC_TRUST_STORE_TYPE=wallet.""" + """verify_presentation must build a per-request WalletTrustStore from the + calling profile on every call, keeping tenant registries isolated.""" @pytest.mark.asyncio - async def test_uses_calling_profile_not_singleton(self, monkeypatch): - """A fresh WalletTrustStore(profile) must be constructed with the - sub-wallet profile, not forwarded from self.trust_store.""" - root_trust_store = MagicMock(name="root_trust_store") - processor = _make_processor(root_trust_store) + async def test_uses_calling_profile(self): + """A fresh WalletTrustStore(profile) must be built from the calling + profile on every verify_presentation call.""" + processor = _make_processor() sub_profile = _make_profile("tenant-123") pres_record = MagicMock() @@ -88,13 +71,9 @@ def __init__(self, profile): return_value=MagicMock(verified=True) ) - monkeypatch.setenv("OID4VC_MDOC_TRUST_STORE_TYPE", "wallet") - with ( patch( - "mso_mdoc.cred_processor.MsoMdocCredProcessor.verify_presentation.__wrapped__" - if False - else "mso_mdoc.cred_processor.WalletTrustStore", + "mso_mdoc.cred_processor.WalletTrustStore", FakeWalletTrustStore, ), patch( @@ -109,16 +88,19 @@ def __init__(self, profile): ) assert captured_profiles[0] is sub_profile, ( "WalletTrustStore must be constructed with the calling (sub-wallet) " - "profile, not the root profile from the singleton trust store.\n" + "profile.\n" f"Got: {captured_profiles[0]!r}\nExpected: {sub_profile!r}" ) @pytest.mark.asyncio - async def test_does_not_use_singleton_trust_store(self, monkeypatch): - """self.trust_store (root profile) must NOT be passed to the verifier - when OID4VC_MDOC_TRUST_STORE_TYPE=wallet.""" - root_trust_store = MagicMock(name="root_trust_store") - processor = _make_processor(root_trust_store) + async def test_does_not_reuse_stale_trust_store(self): + """self.trust_store (if set) must NOT be passed directly to the verifier; + a fresh WalletTrustStore built from the calling profile is always used.""" + processor = _make_processor() + # Simulate a stale/root trust store on the processor (legacy state) + stale_trust_store = MagicMock(name="stale_root_trust_store") + processor.trust_store = stale_trust_store + sub_profile = _make_profile("tenant-456") pres_record = MagicMock() @@ -131,12 +113,10 @@ def __init__(self, trust_store=None): async def verify_presentation(self, *args, **kwargs): return MagicMock(verified=True) - monkeypatch.setenv("OID4VC_MDOC_TRUST_STORE_TYPE", "wallet") - with ( patch( "mso_mdoc.cred_processor.WalletTrustStore", - lambda profile: f"ws({profile})", + lambda profile: f"ws({id(profile)})", ), patch( "mso_mdoc.cred_processor.MsoMdocPresVerifier", @@ -146,91 +126,79 @@ async def verify_presentation(self, *args, **kwargs): await processor.verify_presentation(sub_profile, {}, pres_record) assert len(trust_stores_passed) == 1 - assert trust_stores_passed[0] is not root_trust_store, ( - "The singleton root trust store must NOT be forwarded to the verifier " - "in wallet mode. The verifier received self.trust_store instead of " - "a fresh WalletTrustStore(calling_profile)." + assert trust_stores_passed[0] is not stale_trust_store, ( + "A stale root trust store must NOT be forwarded to the verifier. " + "A fresh WalletTrustStore(calling_profile) must always be used." ) - @pytest.mark.asyncio - async def test_file_mode_reuses_singleton(self, monkeypatch): - """In file mode the singleton self.trust_store must be reused — no new - WalletTrustStore is constructed.""" - root_trust_store = MagicMock(name="file_trust_store") - processor = _make_processor(root_trust_store) - sub_profile = _make_profile("tenant-789") - pres_record = MagicMock() - - trust_stores_passed: list = [] - class CapturingPresVerifier: - def __init__(self, trust_store=None): - trust_stores_passed.append(trust_store) - - async def verify_presentation(self, *args, **kwargs): - return MagicMock(verified=True) - - monkeypatch.setenv("OID4VC_MDOC_TRUST_STORE_TYPE", "file") +# --------------------------------------------------------------------------- +# Isolation: two concurrent sub-wallet calls get independent trust stores +# --------------------------------------------------------------------------- - with patch( - "mso_mdoc.cred_processor.MsoMdocPresVerifier", - CapturingPresVerifier, - ): - await processor.verify_presentation(sub_profile, {}, pres_record) - assert len(trust_stores_passed) == 1 - assert trust_stores_passed[0] is root_trust_store, ( - "In file mode, the singleton trust store must be reused." - ) +class TestConcurrentSubWalletIsolation: + """Each concurrent sub-wallet call must get its own WalletTrustStore so + cache refreshes in one tenant do not affect another.""" @pytest.mark.asyncio - async def test_default_env_reuses_singleton(self, monkeypatch): - """Without OID4VC_MDOC_TRUST_STORE_TYPE set the default is 'file' and - the singleton must be reused.""" - root_trust_store = MagicMock(name="default_trust_store") - processor = _make_processor(root_trust_store) - sub_profile = _make_profile() - pres_record = MagicMock() - - trust_stores_passed: list = [] + async def test_independent_trust_stores_per_call(self): + """Two concurrent verify_presentation calls with different profiles + must each receive a WalletTrustStore built from their own profile.""" + processor = _make_processor() - class CapturingPresVerifier: - def __init__(self, trust_store=None): - trust_stores_passed.append(trust_store) + profile_a = _make_profile("tenant-A") + profile_b = _make_profile("tenant-B") + pres_record = MagicMock() - async def verify_presentation(self, *args, **kwargs): - return MagicMock(verified=True) + wts_calls: list = [] - monkeypatch.delenv("OID4VC_MDOC_TRUST_STORE_TYPE", raising=False) + def fake_wts(profile): + wts_calls.append(profile) + return MagicMock(name=f"wts-{profile.settings['wallet.id']}") - with patch( - "mso_mdoc.cred_processor.MsoMdocPresVerifier", - CapturingPresVerifier, + with ( + patch("mso_mdoc.cred_processor.WalletTrustStore", fake_wts), + patch("mso_mdoc.cred_processor.MsoMdocPresVerifier") as mock_cls, ): - await processor.verify_presentation(sub_profile, {}, pres_record) + mock_cls.return_value.verify_presentation = AsyncMock( + return_value=MagicMock(verified=True) + ) + import asyncio - assert trust_stores_passed[0] is root_trust_store, ( - "Default (file) mode must reuse the singleton trust store." - ) + await asyncio.gather( + processor.verify_presentation(profile_a, {}, pres_record), + processor.verify_presentation(profile_b, {}, pres_record), + ) + + assert len(wts_calls) == 2, "Each call must construct its own WalletTrustStore" + profiles_seen = {id(p) for p in wts_calls} + assert id(profile_a) in profiles_seen + assert id(profile_b) in profiles_seen # --------------------------------------------------------------------------- -# verify_credential — wallet mode +# verify_credential — wallet-scoped per-request # --------------------------------------------------------------------------- class TestVerifyCredentialWalletTrustStorePerRequest: - """verify_credential must build a per-request WalletTrustStore when - OID4VC_MDOC_TRUST_STORE_TYPE=wallet.""" + """verify_credential must build a per-request WalletTrustStore from the + calling profile on every call.""" @pytest.mark.asyncio - async def test_uses_calling_profile_not_singleton(self, monkeypatch): - """A fresh WalletTrustStore(profile) must be constructed with the - sub-wallet profile.""" - root_trust_store = MagicMock(name="root_trust_store") - processor = _make_processor(root_trust_store) + async def test_uses_calling_profile(self): + """A fresh WalletTrustStore(profile) must be built from the calling + profile on every verify_credential call.""" + processor = _make_processor() sub_profile = _make_profile("cred-tenant-1") + captured_wts_profiles: list = [] + + def fake_wts(profile): + captured_wts_profiles.append(profile) + return f"wts({id(profile)})" + trust_stores_passed: list = [] class CapturingCredVerifier: @@ -240,14 +208,6 @@ def __init__(self, trust_store=None): async def verify_credential(self, *args, **kwargs): return MagicMock(verified=True) - captured_wts_profiles: list = [] - - def fake_wts(profile): - captured_wts_profiles.append(profile) - return f"wts({id(profile)})" - - monkeypatch.setenv("OID4VC_MDOC_TRUST_STORE_TYPE", "wallet") - with ( patch("mso_mdoc.cred_processor.WalletTrustStore", fake_wts), patch( @@ -259,88 +219,5 @@ def fake_wts(profile): assert len(captured_wts_profiles) == 1 assert captured_wts_profiles[0] is sub_profile, ( - "verify_credential must construct WalletTrustStore with the calling " - "profile, not the root profile singleton." + "verify_credential must construct WalletTrustStore with the calling profile." ) - assert trust_stores_passed[0] is not root_trust_store, ( - "The singleton root trust store must NOT be forwarded in wallet mode." - ) - - @pytest.mark.asyncio - async def test_file_mode_reuses_singleton(self, monkeypatch): - """In file mode the singleton self.trust_store must be reused.""" - root_trust_store = MagicMock(name="file_trust_store") - processor = _make_processor(root_trust_store) - sub_profile = _make_profile("cred-tenant-2") - - trust_stores_passed: list = [] - - class CapturingCredVerifier: - def __init__(self, trust_store=None): - trust_stores_passed.append(trust_store) - - async def verify_credential(self, *args, **kwargs): - return MagicMock(verified=True) - - monkeypatch.setenv("OID4VC_MDOC_TRUST_STORE_TYPE", "file") - - with patch( - "mso_mdoc.cred_processor.MsoMdocCredVerifier", - CapturingCredVerifier, - ): - await processor.verify_credential(sub_profile, "raw-credential") - - assert trust_stores_passed[0] is root_trust_store, ( - "In file mode, the singleton trust store must be reused." - ) - - -# --------------------------------------------------------------------------- -# Isolation: two concurrent sub-wallet calls get independent trust stores -# --------------------------------------------------------------------------- - - -class TestConcurrentSubWalletIsolation: - """Each concurrent sub-wallet call must get its own WalletTrustStore so - cache refreshes in one tenant don't affect another.""" - - @pytest.mark.asyncio - async def test_independent_trust_stores_per_call(self, monkeypatch): - """Two concurrent verify_presentation calls with different profiles - must each receive a WalletTrustStore built from their own profile.""" - root_trust_store = MagicMock(name="root_trust_store") - processor = _make_processor(root_trust_store) - - profile_a = _make_profile("tenant-A") - profile_b = _make_profile("tenant-B") - pres_record = MagicMock() - - wts_calls: list = [] - - def fake_wts(profile): - wts_calls.append(profile) - return MagicMock(name=f"wts-{profile.settings['wallet.id']}") - - async def fake_verify(*args, **kwargs): - return MagicMock(verified=True) - - monkeypatch.setenv("OID4VC_MDOC_TRUST_STORE_TYPE", "wallet") - - with ( - patch("mso_mdoc.cred_processor.WalletTrustStore", fake_wts), - patch("mso_mdoc.cred_processor.MsoMdocPresVerifier") as mock_verifier_cls, - ): - mock_verifier_cls.return_value.verify_presentation = AsyncMock( - return_value=MagicMock(verified=True) - ) - import asyncio - - await asyncio.gather( - processor.verify_presentation(profile_a, {}, pres_record), - processor.verify_presentation(profile_b, {}, pres_record), - ) - - assert len(wts_calls) == 2, "Each call must construct its own WalletTrustStore" - profiles_seen = {id(p) for p in wts_calls} - assert id(profile_a) in profiles_seen - assert id(profile_b) in profiles_seen diff --git a/oid4vc/mso_mdoc/trust_anchor_routes.py b/oid4vc/mso_mdoc/trust_anchor_routes.py new file mode 100644 index 000000000..657315839 --- /dev/null +++ b/oid4vc/mso_mdoc/trust_anchor_routes.py @@ -0,0 +1,206 @@ +"""Admin routes for mso_mdoc trust anchor management.""" + +import uuid + +from acapy_agent.admin.request_context import AdminRequestContext +from acapy_agent.messaging.models.openapi import OpenAPISchema +from aiohttp import web +from aiohttp_apispec import docs, request_schema, response_schema +from marshmallow import fields + +from .storage import MdocStorageManager + + +# ============================================================================= +# Schemas +# ============================================================================= + + +class TrustAnchorCreateSchema(OpenAPISchema): + """Request schema for creating a trust anchor.""" + + certificate_pem = fields.Str( + required=True, + metadata={"description": "PEM-encoded X.509 root CA certificate"}, + ) + anchor_id = fields.Str( + required=False, + metadata={"description": "Optional custom ID for the trust anchor"}, + ) + metadata = fields.Dict( + required=False, + metadata={"description": "Optional metadata (e.g., issuer name, purpose)"}, + ) + + +class TrustAnchorResponseSchema(OpenAPISchema): + """Response schema for trust anchor operations.""" + + anchor_id = fields.Str(required=True, metadata={"description": "Trust anchor ID"}) + message = fields.Str(required=True, metadata={"description": "Status message"}) + + +class TrustAnchorDetailSchema(OpenAPISchema): + """Response schema for trust anchor details.""" + + anchor_id = fields.Str(required=True, metadata={"description": "Trust anchor ID"}) + certificate_pem = fields.Str( + required=True, metadata={"description": "PEM-encoded certificate"} + ) + created_at = fields.Str(required=True, metadata={"description": "Creation timestamp"}) + metadata = fields.Dict( + required=False, metadata={"description": "Trust anchor metadata"} + ) + + +class TrustAnchorListSchema(OpenAPISchema): + """Response schema for listing trust anchors.""" + + trust_anchors = fields.List( + fields.Dict(), + required=True, + metadata={"description": "List of stored trust anchors"}, + ) + + +# ============================================================================= +# Handlers +# ============================================================================= + + +@docs( + tags=["mso_mdoc"], + summary="Add a trust anchor certificate", +) +@request_schema(TrustAnchorCreateSchema()) +@response_schema(TrustAnchorResponseSchema(), 200) +async def create_trust_anchor(request: web.BaseRequest): + """Add a new trust anchor certificate to the wallet. + + Trust anchors are root CA certificates used to verify mDoc issuer + certificate chains during credential verification. + """ + context: AdminRequestContext = request["context"] + storage_manager = MdocStorageManager(context.profile) + + try: + body = await request.json() + certificate_pem = body.get("certificate_pem") + if not certificate_pem: + raise web.HTTPBadRequest(reason="certificate_pem is required") + + anchor_id = body.get("anchor_id") or f"trust-anchor-{uuid.uuid4().hex[:8]}" + metadata = body.get("metadata", {}) + + async with context.profile.session() as session: + await storage_manager.store_trust_anchor( + session=session, + anchor_id=anchor_id, + certificate_pem=certificate_pem, + metadata=metadata, + ) + + return web.json_response( + { + "anchor_id": anchor_id, + "message": "Trust anchor stored successfully", + } + ) + except web.HTTPError: + raise + except Exception as e: + raise web.HTTPInternalServerError( + reason=f"Failed to store trust anchor: {e}" + ) from e + + +@docs( + tags=["mso_mdoc"], + summary="List all trust anchors", +) +@response_schema(TrustAnchorListSchema(), 200) +async def list_trust_anchors(request: web.BaseRequest): + """List all stored trust anchor certificates.""" + context: AdminRequestContext = request["context"] + storage_manager = MdocStorageManager(context.profile) + + try: + async with context.profile.session() as session: + anchors = await storage_manager.list_trust_anchors(session) + return web.json_response({"trust_anchors": anchors}) + except Exception as e: + raise web.HTTPInternalServerError( + reason=f"Failed to list trust anchors: {e}" + ) from e + + +@docs( + tags=["mso_mdoc"], + summary="Get a trust anchor by ID", +) +@response_schema(TrustAnchorDetailSchema(), 200) +async def get_trust_anchor(request: web.BaseRequest): + """Retrieve a specific trust anchor certificate.""" + context: AdminRequestContext = request["context"] + anchor_id = request.match_info["anchor_id"] + storage_manager = MdocStorageManager(context.profile) + + try: + async with context.profile.session() as session: + anchor = await storage_manager.get_trust_anchor(session, anchor_id) + + if not anchor: + raise web.HTTPNotFound(reason=f"Trust anchor not found: {anchor_id}") + + return web.json_response(anchor) + except web.HTTPError: + raise + except Exception as e: + raise web.HTTPInternalServerError( + reason=f"Failed to get trust anchor: {e}" + ) from e + + +@docs( + tags=["mso_mdoc"], + summary="Delete a trust anchor", +) +@response_schema(TrustAnchorResponseSchema(), 200) +async def delete_trust_anchor(request: web.BaseRequest): + """Delete a trust anchor certificate.""" + context: AdminRequestContext = request["context"] + anchor_id = request.match_info["anchor_id"] + storage_manager = MdocStorageManager(context.profile) + + try: + async with context.profile.session() as session: + deleted = await storage_manager.delete_trust_anchor(session, anchor_id) + + if not deleted: + raise web.HTTPNotFound(reason=f"Trust anchor not found: {anchor_id}") + + return web.json_response( + { + "anchor_id": anchor_id, + "message": "Trust anchor deleted successfully", + } + ) + except web.HTTPError: + raise + except Exception as e: + raise web.HTTPInternalServerError( + reason=f"Failed to delete trust anchor: {e}" + ) from e + + +# ============================================================================= +# Route registration +# ============================================================================= + + +def register_trust_anchor_routes(app: web.Application): + """Register trust anchor management routes.""" + app.router.add_post("/mso_mdoc/trust-anchors", create_trust_anchor) + app.router.add_get("/mso_mdoc/trust-anchors", list_trust_anchors) + app.router.add_get("/mso_mdoc/trust-anchors/{anchor_id}", get_trust_anchor) + app.router.add_delete("/mso_mdoc/trust-anchors/{anchor_id}", delete_trust_anchor) diff --git a/oid4vc/mso_mdoc/x509.py b/oid4vc/mso_mdoc/x509.py deleted file mode 100644 index 271c81416..000000000 --- a/oid4vc/mso_mdoc/x509.py +++ /dev/null @@ -1,32 +0,0 @@ -"""X.509 certificate utilities.""" - -from datetime import datetime, timezone, timedelta -from cryptography import x509 -from cryptography.x509.oid import NameOID -from cryptography.hazmat.primitives import hashes, serialization -from cwt import COSEKey -from pycose.keys import CoseKey -from pycose.keys.keytype import KtyOKP - - -def selfsigned_x509cert(private_key: CoseKey): - """Generate a self-signed X.509 certificate from a COSE key.""" - ckey = COSEKey.from_bytes(private_key.encode()) - subject = issuer = x509.Name( - [ - x509.NameAttribute(NameOID.COUNTRY_NAME, "CN"), - x509.NameAttribute(NameOID.COMMON_NAME, "Local CA"), - ] - ) - utcnow = datetime.now(timezone.utc) - cert = ( - x509.CertificateBuilder() - .subject_name(subject) - .issuer_name(issuer) - .public_key(ckey.key.public_key()) - .serial_number(x509.random_serial_number()) - .not_valid_before(utcnow) - .not_valid_after(utcnow + timedelta(days=10)) - .sign(ckey.key, None if private_key.kty == KtyOKP else hashes.SHA256()) - ) - return cert.public_bytes(getattr(serialization.Encoding, "DER")) diff --git a/oid4vc/oid4vc/jwt.py b/oid4vc/oid4vc/jwt.py index cd279b10a..8455d84db 100644 --- a/oid4vc/oid4vc/jwt.py +++ b/oid4vc/oid4vc/jwt.py @@ -25,6 +25,14 @@ from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat +# Algorithms supported by jwt_sign / jwt_verify. +# Entries map directly to the wallet key types handled by jwt_sign: +# ED25519 → EdDSA (RFC 8037) +# P256 → ES256 (RFC 7518 §3.4) +# Update this tuple whenever a new key type is added to jwt_sign. +SUPPORTED_ALGS: tuple[str, ...] = ("EdDSA", "ES256") + + @dataclass class JWTVerifyResult: """JWT Verification Result.""" @@ -182,24 +190,63 @@ async def jwt_verify( encoded_headers, encoded_payload, encoded_signature = jwt.split(".", 3) headers = b64_to_dict(encoded_headers) payload = b64_to_dict(encoded_payload) + + # RFC 7515 §4.1.1: alg is a REQUIRED JWS header parameter. + alg = headers.get("alg") + if not alg: + raise BadJWSHeaderError( + "JWT header is missing the required 'alg' parameter (RFC 7515 §4.1.1)" + ) + + if alg not in SUPPORTED_ALGS: + raise BadJWSHeaderError( + f"JWT header 'alg' value '{alg}' is not supported; " + f"expected one of: {', '.join(SUPPORTED_ALGS)}" + ) + + # kid, jwk, and x5c are mutually exclusive key-identification header parameters. + # Exactly one must be present; having multiple is ambiguous (RFC 7515 §4.1). + key_id_params = [p for p in ("kid", "jwk", "x5c") if p in headers] + if len(key_id_params) > 1: + raise BadJWSHeaderError( + f"JWT header contains multiple mutually exclusive key-identification " + f"parameters: {', '.join(key_id_params)}. Exactly one of 'kid', 'jwk', " + f"or 'x5c' is permitted (RFC 7515 §4.1)." + ) + if cnf: if "jwk" in cnf: key = Key.from_jwk(cnf["jwk"]) elif "kid" in cnf: - verification_method = headers["kid"] - key = await key_material_for_kid(profile, verification_method) + if "kid" not in headers: + raise BadJWSHeaderError( + "JWT header is missing the required 'kid' parameter " + "when cnf contains a kid binding (RFC 7515 §4.1.4)" + ) + key = await key_material_for_kid(profile, headers["kid"]) else: raise ValueError("Unsupported cnf") + elif "jwk" in headers: + key = Key.from_jwk(headers["jwk"]) + elif "kid" in headers: + key = await key_material_for_kid(profile, headers["kid"]) + elif "x5c" in headers: + key = key_from_x5c(headers["x5c"]) else: - verification_method = headers["kid"] - key = await key_material_for_kid(profile, verification_method) + raise BadJWSHeaderError( + "JWT header is missing a key-identification parameter. " + "Exactly one of 'kid', 'jwk', or 'x5c' is required (RFC 7515 §4.1)." + ) decoded_signature = b64_to_bytes(encoded_signature, urlsafe=True) - alg = headers.get("alg") if alg == "EdDSA" and key.algorithm != KeyAlg.ED25519: - raise BadJWSHeaderError("Expected ed25519 key") + raise BadJWSHeaderError( + "JWT header 'alg' is 'EdDSA' but the resolved key is not an Ed25519 key" + ) elif alg == "ES256" and key.algorithm != KeyAlg.P256: - raise BadJWSHeaderError("Expected p256 key") + raise BadJWSHeaderError( + "JWT header 'alg' is 'ES256' but the resolved key is not a P-256 key" + ) valid = key.verify_signature( f"{encoded_headers}.{encoded_payload}".encode(), diff --git a/oid4vc/oid4vc/models/nonce.py b/oid4vc/oid4vc/models/nonce.py index 056505512..c5b0c0727 100644 --- a/oid4vc/oid4vc/models/nonce.py +++ b/oid4vc/oid4vc/models/nonce.py @@ -46,6 +46,14 @@ def id(self) -> str | None: """Accessor for the ID associated with this record.""" return self._id + @property + def tags(self) -> dict: + """Return tags dict with bool values stringified for Askar compatibility.""" + result = super().tags + if "used" in result: + result["used"] = str(result["used"]) + return result + @property def record_value(self) -> dict: """Return dict representation of the nonce record for storage.""" diff --git a/oid4vc/oid4vc/public_routes/token.py b/oid4vc/oid4vc/public_routes/token.py index d73adaedd..29b657761 100644 --- a/oid4vc/oid4vc/public_routes/token.py +++ b/oid4vc/oid4vc/public_routes/token.py @@ -6,6 +6,7 @@ import time from datetime import UTC from typing import Any, Dict +from urllib.parse import urlparse from acapy_agent.admin.request_context import AdminRequestContext from acapy_agent.core.profile import Profile @@ -327,15 +328,40 @@ async def handle_proof_of_posession( content_type="application/json", ) from exc else: - raise web.HTTPBadRequest( - text=json.dumps( - { - "error": "invalid_proof", - "error_description": "no key material in proof header", - } - ), - content_type="application/json", - ) + # No key material in the header. Some draft-era wallets (e.g. walt.id) + # omit jwk/kid/x5c from the proof header and instead put the DID in the + # payload `iss` claim. Decode the payload first and attempt resolution. + payload_for_iss = b64_to_dict(encoded_payload) + iss = payload_for_iss.get("iss") + if iss: + # key_material_for_kid expects a DID URL (with fragment), not a bare + # DID. For did:jwk and did:key the first verification method is #0. + kid_url = iss if "#" in iss else f"{iss}#0" + try: + key = await key_material_for_kid(profile, kid_url) + LOGGER.debug("Resolved proof key from payload iss: %s", iss) + except (ValueError, Exception) as exc: + LOGGER.debug("Could not resolve key from iss '%s': %s", iss, exc) + raise web.HTTPBadRequest( + text=json.dumps( + { + "error": "invalid_proof", + "error_description": "no key material in proof header and" + " iss could not be resolved", + } + ), + content_type="application/json", + ) from exc + else: + raise web.HTTPBadRequest( + text=json.dumps( + { + "error": "invalid_proof", + "error_description": "no key material in proof header", + } + ), + content_type="application/json", + ) payload = b64_to_dict(encoded_payload) @@ -347,8 +373,24 @@ async def handle_proof_of_posession( issuer_endpoint = Config.from_settings(profile.settings).endpoint # aud may be a string or a list of strings (per RFC 7519 § 4.1.3) aud_values = [aud] if isinstance(aud, str) else list(aud) + + def _strip_default_port(url: str) -> str: + """Remove explicit default ports (https:443, http:80) for comparison.""" + try: + p = urlparse(url) + if (p.scheme == "https" and p.port == 443) or ( + p.scheme == "http" and p.port == 80 + ): + netloc = p.hostname or "" + return p._replace(netloc=netloc).geturl() + except Exception: + pass + return url + + norm_endpoint = _strip_default_port(issuer_endpoint) if issuer_endpoint else "" if issuer_endpoint and not any( - av == issuer_endpoint or av.startswith(issuer_endpoint + "/tenant/") + _strip_default_port(av) == norm_endpoint + or _strip_default_port(av).startswith(norm_endpoint + "/tenant/") for av in aud_values ): raise web.HTTPBadRequest( @@ -404,11 +446,13 @@ async def handle_proof_of_posession( # JWK from the resolved key so credential processors that need the raw JWK # (e.g. mso_mdoc for holder key binding in DeviceKey) can access it. holder_jwk = headers.get("jwk") - if holder_jwk is None and "kid" in headers: + if holder_jwk is None and ("kid" in headers or not any( + k in headers for k in ("jwk", "kid", "x5c") + )): try: holder_jwk = json.loads(key.get_jwk_public()) except Exception: - LOGGER.debug("Could not derive holder JWK from kid-resolved key") + LOGGER.debug("Could not derive holder JWK from resolved key") return PopResult( headers, diff --git a/oid4vc/oid4vc/tests/test_token.py b/oid4vc/oid4vc/tests/test_token.py index 1cae2a059..079eaa1de 100644 --- a/oid4vc/oid4vc/tests/test_token.py +++ b/oid4vc/oid4vc/tests/test_token.py @@ -474,6 +474,26 @@ async def test_proof_with_correct_aud_accepted(profile): assert result.verified is True +@pytest.mark.asyncio +async def test_proof_aud_with_explicit_default_port_accepted(profile): + """Wallets may send aud with explicit :443 — must equal endpoint without it.""" + nonce = "nonce-port" + jwt_str = _build_proof_jwt( + nonce, aud="https://myissuerapi.zrok.dev.indicioctech.io:443" + ) + proof = {"proof_type": "jwt", "jwt": jwt_str} + + with patch( + "oid4vc.public_routes.token.Config.from_settings", + return_value=MagicMock( + endpoint="https://myissuerapi.zrok.dev.indicioctech.io" + ), + ): + result = await handle_proof_of_posession(profile, proof, nonce) + + assert result.verified is True + + @pytest.mark.asyncio async def test_proof_with_tenant_scoped_aud_accepted(profile): """Diff-3: proof JWT aud set to a tenant-scoped URL must be accepted. @@ -520,6 +540,43 @@ async def test_proof_with_cross_issuer_tenant_path_rejected(profile): assert "aud" in body["error_description"] +@pytest.mark.asyncio +async def test_proof_iss_fallback_when_no_key_in_header(profile): + """Wallets that omit jwk/kid/x5c but put their DID in iss must be resolved.""" + nonce = "nonce-iss-fallback" + key = Key.generate(KeyAlg.P256) + public_jwk = json.loads(key.get_jwk_public()) + # Header has NO jwk, kid, or x5c — only alg+typ + header = {"typ": "openid4vci-proof+jwt", "alg": "ES256"} + payload = { + "iss": "did:key:zDnaemDNiAWCCLFKP2ppPJuq52E2Gh9trydNgTqrWDkb5oiaQ", + "aud": "http://localhost:8020", + "iat": int(time.time()), + "exp": int(time.time()) + 600, + "nonce": nonce, + } + h_enc = _make_b64url(json.dumps(header).encode()) + p_enc = _make_b64url(json.dumps(payload).encode()) + sig = key.sign_message(f"{h_enc}.{p_enc}".encode(), sig_type="ES256") + jwt_str = f"{h_enc}.{p_enc}.{_make_b64url(sig)}" + proof = {"proof_type": "jwt", "jwt": jwt_str} + + with ( + patch( + "oid4vc.public_routes.token.Config.from_settings", + return_value=MagicMock(endpoint="http://localhost:8020"), + ), + patch( + "oid4vc.public_routes.token.key_material_for_kid", + new=AsyncMock(return_value=key), + ), + ): + result = await handle_proof_of_posession(profile, proof, nonce) + + assert result.verified is True + assert result.holder_jwk is not None # derived from iss-resolved key + + @pytest.mark.asyncio async def test_proof_without_aud_not_rejected_when_endpoint_unconfigured(profile): """C-4: When endpoint is not configured, a proof without aud is still accepted.""" diff --git a/oid4vc/pyproject.toml b/oid4vc/pyproject.toml index 15cf1ad5b..0176dbb05 100644 --- a/oid4vc/pyproject.toml +++ b/oid4vc/pyproject.toml @@ -30,9 +30,9 @@ aiohttp-cors = "^0.7.0" marshmallow = "^3.20.1" jsonschema = "^4.23.0" jsonpath = "^0.82.2" -cbor2 = { version = "~5", optional = true } cbor-diag = { version = "*", optional = true } cwt = { version = "~2", optional = true } +cryptography = ">=42" oscrypto = { git = "https://github.com/wbond/oscrypto.git", rev = "1547f53" } # Resolves https://github.com/wbond/oscrypto/issues/78 pycose = { version = "~1", optional = true } jsonpointer = { version = "^3.0.0", optional = true } @@ -41,10 +41,11 @@ jsonpointer = { version = "^3.0.0", optional = true } [tool.poetry.extras] aca-py = ["acapy-agent"] -mso_mdoc = ["cbor2", "cbor-diag", "cwt", "pycose"] +mso_mdoc = ["cbor-diag", "cwt", "pycose"] sd_jwt_vc = ["jsonpointer"] [tool.poetry.group.dev.dependencies] +cbor2 = "~5" ruff = "^0.14.7" pytest = "^8.3.5" pytest-asyncio = "^1.2.0"