diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 90ae2f16..716ee02b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -167,9 +167,162 @@ jobs: - name: Check docs build cleanly run: mix docs --warnings-as-errors + optional_dep_oban_absent: + name: Optional dep off - oban + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: erlef/setup-beam@fc68ffb90438ef2936bbb3251622353b3dcb2f93 # v1.24.0 + with: + version-file: .tool-versions + version-type: strict + - name: Cache library deps + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: | + deps + _build + key: ${{ runner.os }}-library-oban-off-${{ hashFiles('mix.lock') }} + - name: Install Hex + Rebar + run: | + mix local.hex --force + mix local.rebar --force + - name: Remove oban from this lane + run: | + perl -0pi -e 's/\n\s*\{:oban, "~> 2\.17", optional: true\},//g' mix.exs + - name: Fetch reduced dependency set + run: mix deps.get + - name: Verify oban-off failure surfaces + env: + MIX_ENV: test + run: | + mix test test/sigra/delivery_test.exs:103 test/sigra/workers/optional_deps_test.exs:32 + if mix sigra.doctor --delivery-mode=async; then + echo "expected mix sigra.doctor --delivery-mode=async to fail without oban" + exit 1 + fi + mix run -e ' + try do + Sigra.Workers.AccountDeletion.new(%{"user_id" => 1}, []) + IO.puts("expected MissingDependencyError for oban-off lane") + System.halt(1) + rescue + error in Sigra.OptionalDeps.MissingDependencyError -> + if error.feature == :lifecycle_jobs do + IO.puts("account deletion worker raised tagged lifecycle dependency error") + else + IO.puts("unexpected feature: #{inspect(error.feature)}") + System.halt(1) + end + end + ' + + optional_dep_bcrypt_absent: + name: Optional dep off - bcrypt + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: erlef/setup-beam@fc68ffb90438ef2936bbb3251622353b3dcb2f93 # v1.24.0 + with: + version-file: .tool-versions + version-type: strict + - name: Cache library deps + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: | + deps + _build + key: ${{ runner.os }}-library-bcrypt-off-${{ hashFiles('mix.lock') }} + - name: Install Hex + Rebar + run: | + mix local.hex --force + mix local.rebar --force + - name: Remove bcrypt_elixir from this lane + run: | + perl -0pi -e 's/\n\s*\{:bcrypt_elixir, "~> 3\.3", optional: true\},//g' mix.exs + - name: Fetch reduced dependency set + run: mix deps.get + - name: Verify bcrypt-off failure surfaces + env: + MIX_ENV: test + run: | + mix test test/sigra/crypto_test.exs:126 + mix run -e ' + hash = "$2b$12$WApznUPhDubN0oeveSXHp.Raz0RCbZCjJjVEqMlKsXXYb.1VZFBi2" + + try do + Sigra.Crypto.verify_with_upgrade("password123", hash) + IO.puts("expected MissingDependencyError for bcrypt-off lane") + System.halt(1) + rescue + error in Sigra.OptionalDeps.MissingDependencyError -> + if error.feature == :bcrypt_migration do + IO.puts("bcrypt verification raised tagged dependency error") + else + IO.puts("unexpected feature: #{inspect(error.feature)}") + System.halt(1) + end + end + ' + + optional_dep_eqrcode_absent: + name: Optional dep off - eqrcode + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: erlef/setup-beam@fc68ffb90438ef2936bbb3251622353b3dcb2f93 # v1.24.0 + with: + version-file: .tool-versions + version-type: strict + - name: Cache library deps + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: | + deps + _build + key: ${{ runner.os }}-library-eqrcode-off-${{ hashFiles('mix.lock') }} + - name: Install Hex + Rebar + run: | + mix local.hex --force + mix local.rebar --force + - name: Remove eqrcode from this lane + run: | + perl -0pi -e 's/\n\s*\{:eqrcode, "~> 0\.2\.1", optional: true\},//g' mix.exs + - name: Fetch reduced dependency set + run: mix deps.get + - name: Verify eqrcode-off failure surfaces + env: + MIX_ENV: test + run: | + mix test test/sigra/mfa_test.exs:18 + mix run -e ' + config = Sigra.Config.new!( + repo: Sigra.MockRepo, + user_schema: Sigra.TestUser, + secret_key_base: String.duplicate("a", 64), + mfa: [enabled: true, totp_issuer: nil] + ) + + try do + Sigra.MFA.enroll(config) + IO.puts("expected MissingDependencyError for eqrcode-off lane") + System.halt(1) + rescue + error in Sigra.OptionalDeps.MissingDependencyError -> + if error.feature == :totp_qr do + IO.puts("mfa enrollment raised tagged dependency error") + else + IO.puts("unexpected feature: #{inspect(error.feature)}") + System.halt(1) + end + end + ' + example_unit_smoke: name: Example unit smoke (ExUnit + ConnTest) runs-on: ubuntu-latest + env: + CLOAK_KEY: MDEyMzQ1Njc4OWFiY2RlZjAxMjM0NTY3ODlhYmNkZWY= services: postgres: image: postgres:15 @@ -220,6 +373,8 @@ jobs: # checks (see scripts/ci/install-smoke.sh and docs/uat-ci-coverage.md). name: Install smoke (fresh phx.new + sigra.install) runs-on: ubuntu-latest + permissions: + contents: write services: postgres: image: postgres:15 @@ -253,7 +408,7 @@ jobs: mix local.hex --force mix local.rebar --force - name: Install phx_new archive - run: mix archive.install --force hex phx_new + run: mix archive.install --force hex phx_new 1.8.5 - name: Fetch Sigra library deps run: mix deps.get - name: Run install smoke harness @@ -264,7 +419,73 @@ jobs: GITHUB_WORKSPACE: ${{ github.workspace }} # Fresh tmp_app includes Cloak.Vault; compile/boot needs a dummy key (same as admin-acceptance-smoke.sh). CLOAK_KEY: MDEyMzQ1Njc4OWFiY2RlZjAxMjM0NTY3ODlhYmNkZWY= - run: scripts/ci/install-smoke.sh + run: | + mkdir -p .planning/uat-evidence/v1.20/oauth-gen + scripts/ci/install-smoke.sh 2>&1 | tee .planning/uat-evidence/v1.20/oauth-gen/transcript.log + - name: Generate install-smoke evidence reports + env: + MIX_ENV: test + SIGRA_CI_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + SIGRA_CI_WORKFLOW: .github/workflows/ci.yml / install_smoke + SIGRA_GIT_TAG: ${{ github.ref_name }} + run: | + MIX_ENV=test mix sigra.uat.report --phase=oauth-gen + MIX_ENV=test mix sigra.uat.report --phase=getting-started + - name: Upload oauth-gen bundle (main, 14d retention) + if: always() && github.ref == 'refs/heads/main' + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: oauth-gen-bundle + path: .planning/uat-evidence/v1.20/oauth-gen/ + retention-days: 14 + - name: Upload oauth-gen bundle (PR/push, 7d retention) + if: always() && github.ref != 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/') + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: oauth-gen-bundle + path: .planning/uat-evidence/v1.20/oauth-gen/ + retention-days: 7 + - name: Create oauth-gen release asset archive (v* tag only) + if: startsWith(github.ref, 'refs/tags/v') + run: | + cd /tmp && tar -czf "sigra-oauth-gen-${{ github.ref_name }}.tar.gz" \ + -C "${{ github.workspace }}" .planning/uat-evidence/v1.20/oauth-gen/ + - name: Promote oauth-gen bundle to ${{ github.ref_name }} release asset + if: startsWith(github.ref, 'refs/tags/v') + env: + GH_TOKEN: ${{ github.token }} + run: | + gh release upload "${{ github.ref_name }}" "/tmp/sigra-oauth-gen-${{ github.ref_name }}.tar.gz" \ + --clobber \ + --repo "${{ github.repository }}" + - name: Upload oauth-gen bundle (tag, 90d retention) + if: startsWith(github.ref, 'refs/tags/') + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: oauth-gen-bundle + path: .planning/uat-evidence/v1.20/oauth-gen/ + retention-days: 90 + - name: Upload getting-started bundle (main, 14d retention) + if: always() && github.ref == 'refs/heads/main' + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: getting-started-generated-host-bundle + path: .planning/uat-evidence/v1.20/getting-started-clean-machine/ + retention-days: 14 + - name: Upload getting-started bundle (PR/push, 7d retention) + if: always() && github.ref != 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/') + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: getting-started-generated-host-bundle + path: .planning/uat-evidence/v1.20/getting-started-clean-machine/ + retention-days: 7 + - name: Upload getting-started bundle (tag, 90d retention) + if: startsWith(github.ref, 'refs/tags/') + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: getting-started-generated-host-bundle + path: .planning/uat-evidence/v1.20/getting-started-clean-machine/ + retention-days: 90 passkeys_manual_fallback_smoke: name: Passkeys manual fallback smoke @@ -487,6 +708,8 @@ jobs: example_http_smoke: name: Example HTTP smoke (boot + curl critical routes) runs-on: ubuntu-latest + env: + CLOAK_KEY: MDEyMzQ1Njc4OWFiY2RlZjAxMjM0NTY3ODlhYmNkZWY= services: postgres: image: postgres:15 @@ -551,6 +774,8 @@ jobs: example_playwright_smoke: name: Example Playwright smoke (full lifecycle) runs-on: ubuntu-latest + env: + CLOAK_KEY: MDEyMzQ1Njc4OWFiY2RlZjAxMjM0NTY3ODlhYmNkZWY= services: postgres: image: postgres:15 @@ -788,6 +1013,306 @@ jobs: path: test/example/priv/playwright/test-results/ retention-days: 7 + oauth_e2e_playwright: + name: OAuth E2E Playwright (mock issuer) + runs-on: ubuntu-latest + permissions: + contents: write + env: + EXAMPLE_DB_PROBE_ENABLED: "1" + EXAMPLE_OAUTH_ISSUER_CTL_ENABLED: "1" + CLOAK_KEY: MDEyMzQ1Njc4OWFiY2RlZjAxMjM0NTY3ODlhYmNkZWY= + services: + postgres: + image: postgres:15 + env: + POSTGRES_PASSWORD: postgres + ports: ['5432:5432'] + options: >- + --health-cmd pg_isready --health-interval 10s + --health-timeout 5s --health-retries 5 + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: erlef/setup-beam@fc68ffb90438ef2936bbb3251622353b3dcb2f93 # v1.24.0 + with: + version-file: .tool-versions + version-type: strict + - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 + with: + node-version: '20' + cache: 'npm' + cache-dependency-path: 'test/example/priv/playwright/package-lock.json' + - name: Cache example deps + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: | + test/example/deps + test/example/_build + key: ${{ runner.os }}-example-dev-${{ hashFiles('test/example/mix.lock', 'test/example/config/**', 'test/example/lib/**/*.ex', 'lib/**/*.ex', 'mix.exs') }} + - name: Fetch example deps + working-directory: test/example + env: + MIX_ENV: dev + run: mix deps.get + - name: Compile example + working-directory: test/example + env: + MIX_ENV: dev + run: mix compile --warnings-as-errors + - name: Setup example dev DB + working-directory: test/example + env: + MIX_ENV: dev + PGUSER: postgres + PGPASSWORD: postgres + PGHOST: localhost + run: mix ecto.create && mix ecto.migrate + - name: Install Playwright deps + working-directory: test/example/priv/playwright + run: npm ci + - name: Install Playwright browsers + working-directory: test/example/priv/playwright + run: npx playwright install --with-deps chromium + - name: Boot example app in background + working-directory: test/example + env: + MIX_ENV: dev + PGUSER: postgres + PGPASSWORD: postgres + PGHOST: localhost + PHX_SERVER: "true" + run: mix phx.server > /tmp/example-oauth-playwright-server.log 2>&1 & + - name: Wait for app and warm OAuth routes + run: | + for i in $(seq 1 30); do + if curl -sf http://localhost:4000/ > /dev/null; then + echo "App responding after ${i}s" + break + fi + sleep 1 + done + for path in /users/log_in /users/settings /test/db_probe /test/oauth_issuer/reset; do + curl -s -o /dev/null -w "%{http_code} ${path}\n" "http://localhost:4000${path}" || true + done + - name: Run OAuth Playwright specs + working-directory: test/example/priv/playwright + env: + CI: "true" + SIGRA_EXAMPLE_URL: "http://localhost:4000" + run: | + npx playwright test \ + tests/oauth-register.spec.ts \ + tests/oauth-link.spec.ts \ + tests/oauth-email-match.spec.ts \ + --project=chromium \ + --reporter=line + - name: Generate OAuth evidence reports + # `mix sigra.uat.report` is a sigra-provided mix task. Run from + # test/example/ (where sigra is a path dep with deps already fetched + # by this job's earlier steps in MIX_ENV=dev) rather than the repo + # root (whose mix.exs has not had deps.get run in this job — that's + # why the previous repo-root invocation failed with "the dependency + # is not available, run mix deps.get"). Mirrors the MFA evidence + # report fix in 2974be6. + working-directory: test/example + env: + MIX_ENV: dev + SIGRA_CI_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + SIGRA_CI_WORKFLOW: .github/workflows/ci.yml / oauth_e2e_playwright + SIGRA_GIT_TAG: ${{ github.ref_name }} + run: | + mix sigra.uat.report --phase=oauth-google + mix sigra.uat.report --phase=oauth-link + mix sigra.uat.report --phase=oauth-email-match + - name: Dump example app log (on failure) + if: failure() + run: | + echo "--- /tmp/example-oauth-playwright-server.log ---" + cat /tmp/example-oauth-playwright-server.log || echo "(no log file)" + - name: Assemble oauth e2e bundle + if: always() + run: | + mkdir -p /tmp/oauth-e2e-playwright-bundle + cp -r .planning/uat-evidence/v1.20/oauth-google /tmp/oauth-e2e-playwright-bundle/oauth-google 2>/dev/null || true + cp -r .planning/uat-evidence/v1.20/oauth-link /tmp/oauth-e2e-playwright-bundle/oauth-link 2>/dev/null || true + cp -r .planning/uat-evidence/v1.20/oauth-email-match /tmp/oauth-e2e-playwright-bundle/oauth-email-match 2>/dev/null || true + cp -r test/example/priv/playwright/playwright-report /tmp/oauth-e2e-playwright-bundle/playwright-report 2>/dev/null || true + cp -r test/example/priv/playwright/__snapshots__/oauth-link.spec.ts /tmp/oauth-e2e-playwright-bundle/oauth-link-hero 2>/dev/null || true + echo "Bundle contents:" + find /tmp/oauth-e2e-playwright-bundle -type f | sort || true + - name: Upload oauth e2e playwright bundle (main, 14d retention) + if: always() && github.ref == 'refs/heads/main' + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: oauth-e2e-playwright-bundle + path: /tmp/oauth-e2e-playwright-bundle/ + retention-days: 14 + - name: Upload oauth e2e playwright bundle (PR/push, 7d retention) + if: always() && github.ref != 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/') + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: oauth-e2e-playwright-bundle + path: /tmp/oauth-e2e-playwright-bundle/ + retention-days: 7 + - name: Upload oauth e2e playwright failure diagnostics (main, 14d retention) + if: failure() && github.ref == 'refs/heads/main' + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: oauth-e2e-playwright-failure-diagnostics + path: test/example/priv/playwright/test-results/ + retention-days: 14 + - name: Upload oauth e2e playwright failure diagnostics (PR/push, 7d retention) + if: failure() && github.ref != 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/') + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: oauth-e2e-playwright-failure-diagnostics + path: test/example/priv/playwright/test-results/ + retention-days: 7 + - name: Create oauth e2e playwright release asset archive (v* tag only) + if: startsWith(github.ref, 'refs/tags/v') + run: | + cd /tmp && tar -czf "sigra-oauth-e2e-playwright-${{ github.ref_name }}.tar.gz" oauth-e2e-playwright-bundle/ + echo "Release asset: sigra-oauth-e2e-playwright-${{ github.ref_name }}.tar.gz" + ls -lh "/tmp/sigra-oauth-e2e-playwright-${{ github.ref_name }}.tar.gz" + - name: Promote oauth e2e playwright bundle to ${{ github.ref_name }} release asset + if: startsWith(github.ref, 'refs/tags/v') + env: + GH_TOKEN: ${{ github.token }} + run: | + gh release upload "${{ github.ref_name }}" "/tmp/sigra-oauth-e2e-playwright-${{ github.ref_name }}.tar.gz" \ + --clobber \ + --repo "${{ github.repository }}" + - name: Upload oauth e2e playwright bundle (tag, 90d retention) + if: startsWith(github.ref, 'refs/tags/') + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: oauth-e2e-playwright-bundle + path: /tmp/oauth-e2e-playwright-bundle/ + retention-days: 90 + + mfa_e2e_playwright: + name: MFA backup-code rotation E2E + runs-on: ubuntu-latest + permissions: + contents: write + env: + EXAMPLE_DB_PROBE_ENABLED: "1" + CLOAK_KEY: MDEyMzQ1Njc4OWFiY2RlZjAxMjM0NTY3ODlhYmNkZWY= + services: + postgres: + image: postgres:15 + env: + POSTGRES_PASSWORD: postgres + ports: ['5432:5432'] + options: >- + --health-cmd pg_isready --health-interval 10s + --health-timeout 5s --health-retries 5 + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: erlef/setup-beam@fc68ffb90438ef2936bbb3251622353b3dcb2f93 # v1.24.0 + with: + version-file: .tool-versions + version-type: strict + - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 + with: + node-version: '20' + cache: 'npm' + cache-dependency-path: 'test/example/priv/playwright/package-lock.json' + - name: Cache example deps + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: | + test/example/deps + test/example/_build + key: ${{ runner.os }}-example-dev-${{ hashFiles('test/example/mix.lock', 'test/example/config/**', 'test/example/lib/**/*.ex', 'lib/**/*.ex', 'mix.exs') }} + - name: Fetch example deps + working-directory: test/example + env: + MIX_ENV: dev + run: mix deps.get + - name: Compile example + working-directory: test/example + env: + MIX_ENV: dev + run: mix compile --warnings-as-errors + - name: Setup example dev DB + working-directory: test/example + env: + MIX_ENV: dev + PGUSER: postgres + PGPASSWORD: postgres + PGHOST: localhost + run: mix ecto.create && mix ecto.migrate + - name: Install Playwright deps + working-directory: test/example/priv/playwright + run: npm ci + - name: Install Playwright browsers + working-directory: test/example/priv/playwright + run: npx playwright install --with-deps chromium + - name: Boot example app in background + working-directory: test/example + env: + MIX_ENV: dev + PGUSER: postgres + PGPASSWORD: postgres + PGHOST: localhost + PHX_SERVER: "true" + run: mix phx.server > /tmp/example-mfa-playwright-server.log 2>&1 & + - name: Wait for app + run: | + for i in $(seq 1 30); do + if curl -sf http://localhost:4000/ > /dev/null; then + echo "App responding after ${i}s" + break + fi + sleep 1 + done + - name: Run MFA Playwright spec + working-directory: test/example/priv/playwright + env: + CI: "true" + SIGRA_EXAMPLE_URL: "http://localhost:4000" + run: npx playwright test tests/mfa-backup-rotation.spec.ts --project=chromium --reporter=line + - name: Generate MFA evidence report + # `mix sigra.uat.report` is a sigra-provided mix task. Run from + # test/example/ (where sigra is a path dep with deps already fetched) + # rather than the repo root (whose mix.exs has not had deps.get run + # in this job — that's why the previous "MIX_ENV=test" attempt failed + # with "the dependency is not available, run mix deps.get"). + working-directory: test/example + env: + MIX_ENV: dev + SIGRA_CI_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + SIGRA_CI_WORKFLOW: .github/workflows/ci.yml / mfa_e2e_playwright + SIGRA_GIT_TAG: ${{ github.ref_name }} + run: MIX_ENV=dev mix sigra.uat.report --phase=mfa-backup-rotation + - name: Dump example app log (on failure) + if: failure() + run: | + echo "--- /tmp/example-mfa-playwright-server.log ---" + cat /tmp/example-mfa-playwright-server.log || echo "(no log file)" + - name: Upload MFA bundle (main, 14d retention) + if: always() && github.ref == 'refs/heads/main' + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: mfa-backup-rotation-bundle + path: .planning/uat-evidence/v1.20/mfa-backup-rotation/ + retention-days: 14 + - name: Upload MFA bundle (PR/push, 7d retention) + if: always() && github.ref != 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/') + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: mfa-backup-rotation-bundle + path: .planning/uat-evidence/v1.20/mfa-backup-rotation/ + retention-days: 7 + - name: Upload MFA bundle (tag, 90d retention) + if: startsWith(github.ref, 'refs/tags/') + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: mfa-backup-rotation-bundle + path: .planning/uat-evidence/v1.20/mfa-backup-rotation/ + retention-days: 90 + generated_admin_playwright_smoke: name: Generated admin Playwright smoke runs-on: ubuntu-latest @@ -924,3 +1449,179 @@ jobs: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Run Phase 34 mechanical UAT contracts run: scripts/ci/phase34-uat-contracts.sh + + # Phase 86: GAUAT-01 / GAUAT-02 — email visual regression harness + # Runs the snapshot prerender, evidence report generation, and narrow + # Playwright email-visual spec. Uploads the full raw bundle on every run. + # On any refs/tags/v* tag promotes that exact bundle to the matching GitHub release + # asset without rebuilding from different inputs (D-86-01, D-86-06, D-86-11). + # + # SEED-1/SEED-2 residual columns in docs/uat-ci-coverage.md point at this job. + # snapshot count = 36, contrast min ratio = 4.5, byte budget max = 100000 + email_visual_regression: + name: Email visual regression (GAUAT-01/02) + runs-on: ubuntu-latest + permissions: + contents: write # required for gh release upload on tag runs + env: + CLOAK_KEY: MDEyMzQ1Njc4OWFiY2RlZjAxMjM0NTY3ODlhYmNkZWY= + services: + postgres: + image: postgres:15 + env: + POSTGRES_PASSWORD: postgres + ports: ['5432:5432'] + options: >- + --health-cmd pg_isready --health-interval 10s + --health-timeout 5s --health-retries 5 + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: erlef/setup-beam@fc68ffb90438ef2936bbb3251622353b3dcb2f93 # v1.24.0 + with: + version-file: .tool-versions + version-type: strict + - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 + with: + node-version: '20' + cache: 'npm' + cache-dependency-path: 'test/example/priv/playwright/package-lock.json' + - name: Cache library deps + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: | + deps + _build + key: ${{ runner.os }}-mix-test-${{ hashFiles('mix.lock', 'mix.exs') }} + - name: Cache example deps + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + path: | + test/example/deps + test/example/_build + key: ${{ runner.os }}-example-test-${{ hashFiles('test/example/mix.lock', 'test/example/mix.exs', 'mix.exs') }} + - name: Fetch library deps + run: mix deps.get + - name: Fetch example deps + working-directory: test/example + env: + MIX_ENV: test + run: mix deps.get + - name: Compile library (test env) + env: + MIX_ENV: test + run: mix compile --warnings-as-errors + - name: Compile example (test env) + working-directory: test/example + env: + MIX_ENV: test + run: mix compile --warnings-as-errors + - name: Setup test DB + working-directory: test/example + env: + MIX_ENV: test + PGUSER: postgres + PGPASSWORD: postgres + PGHOST: localhost + run: mix ecto.create && mix ecto.migrate + # L1: snapshot prerender — deterministic email HTML to disk via frozen fixtures + - name: Prerender email HTML snapshots (L1 snapshot lane) + env: + MIX_ENV: test + PGUSER: postgres + PGPASSWORD: postgres + PGHOST: localhost + run: MIX_ENV=test mix sigra.email.snapshot --check + # L2: Playwright email-visual spec — 36-cell matrix (9 templates × 2 engines × 2 themes) + - name: Install Playwright deps + working-directory: test/example/priv/playwright + run: npm ci + - name: Install Playwright browsers (chromium + webkit) + working-directory: test/example/priv/playwright + run: npx playwright install --with-deps chromium webkit + - name: Run email visual regression spec (L2 Playwright lane) + working-directory: test/example/priv/playwright + env: + CI: "true" + # Project names in playwright.config.ts are prefixed with `email-visual-` + # (see Available projects: in CI failure on run 25276147131). The + # short `email-*` names below were correct at one point but drifted + # from the config; the spec exits with "Project(s) ... not found". + run: npx playwright test tests/email-visual.spec.ts --project=email-visual-chromium-light --project=email-visual-chromium-dark --project=email-visual-webkit-light --project=email-visual-webkit-dark + # L3: evidence report generation — manifest + README + contrast/byte-budget reports + - name: Generate Phase 04 evidence report (L3 report lane) + env: + MIX_ENV: test + SIGRA_CI_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + SIGRA_CI_WORKFLOW: .github/workflows/ci.yml / email_visual_regression + SIGRA_GIT_TAG: ${{ github.ref_name }} + run: MIX_ENV=test mix sigra.uat.report --phase=04 + - name: Generate Phase 08 evidence report (L3 report lane) + env: + MIX_ENV: test + SIGRA_CI_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + SIGRA_CI_WORKFLOW: .github/workflows/ci.yml / email_visual_regression + SIGRA_GIT_TAG: ${{ github.ref_name }} + run: MIX_ENV=test mix sigra.uat.report --phase=08 + # Capture playwright test-results on spec failure for snapshot triage. + # The full evidence bundle below is assembled only on success because it + # depends on the L3 report steps that get skipped on L2 failure. Without + # this dedicated failure-path upload, snapshot drift is not triageable + # from the CI logs alone. + - name: Upload email visual test-results on failure + if: failure() + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: email-visual-failure-diagnostics + path: | + test/example/priv/playwright/test-results/ + test/example/priv/playwright/playwright-report/ + retention-days: 7 + if-no-files-found: warn + # Bundle the full evidence payload: committed baselines + generated reports + - name: Assemble email evidence bundle + run: | + mkdir -p /tmp/email-visual-bundle + cp -r .planning/uat-evidence/v1.20/ /tmp/email-visual-bundle/uat-evidence/ + cp -r test/example/priv/playwright/__snapshots__/email-visual.spec.ts/ /tmp/email-visual-bundle/baselines/ + cp -r test/example/priv/email_snapshots/ /tmp/email-visual-bundle/html-snapshots/ 2>/dev/null || true + echo "Bundle contents:" + find /tmp/email-visual-bundle -type f | sort + # Upload the raw bundle artifact on every run (branch/PR and tags) + - name: Upload email visual regression bundle (main, 14d retention) + if: always() && github.ref == 'refs/heads/main' + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: email-visual-regression-bundle + path: /tmp/email-visual-bundle/ + retention-days: 14 + - name: Upload email visual regression bundle (PR/push, 7d retention) + if: always() && github.ref != 'refs/heads/main' && !startsWith(github.ref, 'refs/tags/') + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: email-visual-regression-bundle + path: /tmp/email-visual-bundle/ + retention-days: 7 + # On any v* tag (v1.20.0, v1.20.1, v1.21.0, ...) promote the exact same + # bundle to the matching GitHub release asset without rebuilding from + # different inputs (D-86-01, D-86-06). + - name: Create release asset archive (v* tag only) + if: startsWith(github.ref, 'refs/tags/v') + run: | + cd /tmp && tar -czf "sigra-email-visual-regression-${{ github.ref_name }}.tar.gz" email-visual-bundle/ + echo "Release asset: sigra-email-visual-regression-${{ github.ref_name }}.tar.gz" + ls -lh "/tmp/sigra-email-visual-regression-${{ github.ref_name }}.tar.gz" + - name: Promote bundle to ${{ github.ref_name }} release asset + if: startsWith(github.ref, 'refs/tags/v') + env: + GH_TOKEN: ${{ github.token }} + run: | + gh release upload "${{ github.ref_name }}" "/tmp/sigra-email-visual-regression-${{ github.ref_name }}.tar.gz" \ + --clobber \ + --repo "${{ github.repository }}" + - name: Upload email visual bundle (tag, 90d retention) + if: startsWith(github.ref, 'refs/tags/') + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 + with: + name: email-visual-regression-bundle + path: /tmp/email-visual-bundle/ + retention-days: 90 diff --git a/.github/workflows/hex-publish.yml b/.github/workflows/hex-publish.yml index 8c2c4cf0..27bb7109 100644 --- a/.github/workflows/hex-publish.yml +++ b/.github/workflows/hex-publish.yml @@ -10,16 +10,16 @@ on: workflow_dispatch: inputs: tag: - description: 'Git tag or commit SHA to publish from (e.g. v0.2.1).' + description: 'Git tag or commit SHA to publish from (e.g. v1.20.0).' required: true type: string release_version: - description: 'Expected @version string in mix.exs at that ref (e.g. 0.2.1).' + description: 'Expected @version string in mix.exs at that ref (e.g. 1.20.0).' required: true type: string permissions: - contents: read + contents: write jobs: publish: @@ -66,6 +66,16 @@ jobs: - name: Verify release version in mix.exs run: grep -n "@version \"${{ inputs.release_version }}\"" mix.exs + - name: Sync changelog summary into GitHub release body + env: + GH_TOKEN: ${{ github.token }} + GITHUB_REPOSITORY: ${{ github.repository }} + run: | + chmod +x scripts/release/sync_release_summary.sh + scripts/release/sync_release_summary.sh \ + "${{ inputs.release_version }}" \ + "${{ inputs.tag }}" + - name: Fetch library deps run: mix deps.get diff --git a/.github/workflows/release-please.yml b/.github/workflows/release-please.yml index 809b9b12..4f56b868 100644 --- a/.github/workflows/release-please.yml +++ b/.github/workflows/release-please.yml @@ -47,9 +47,31 @@ jobs: config-file: release-please-config.json manifest-file: .release-please-manifest.json + sync-release-summary: + name: Sync GitHub release summary + needs: release-please + if: ${{ needs.release-please.outputs.release_created == 'true' }} + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + ref: ${{ needs.release-please.outputs.tag_name }} + + - name: Sync changelog summary into GitHub release body + env: + GH_TOKEN: ${{ secrets.RELEASE_PLEASE_TOKEN || github.token }} + GITHUB_REPOSITORY: ${{ github.repository }} + run: | + chmod +x scripts/release/sync_release_summary.sh + scripts/release/sync_release_summary.sh \ + "${{ needs.release-please.outputs.version }}" \ + "${{ needs.release-please.outputs.tag_name }}" + publish-hex: name: Publish to Hex.pm - needs: release-please + needs: [release-please, sync-release-summary] if: ${{ needs.release-please.outputs.release_created == 'true' }} runs-on: ubuntu-latest permissions: diff --git a/.planning/AUDIT-ATOMICITY-DEFAULTS.md b/.planning/AUDIT-ATOMICITY-DEFAULTS.md index 8503edd9..b2b09bae 100644 --- a/.planning/AUDIT-ATOMICITY-DEFAULTS.md +++ b/.planning/AUDIT-ATOMICITY-DEFAULTS.md @@ -34,7 +34,7 @@ ### D-AUD-06 — Caller contract when audit insert fails (audit-only paths) -- Public functions that today return **`:ok`** and use **`log_safe`/`log_multi_safe`** for **side-channel audit** keep **`:ok`** on audit subsystem failure; emit **`[:sigra, :audit, :log_safe_error]`** (or the same telemetry contract as `emit_log_safe_error`) so operators can alert; **raise** only on programmer-wiring errors. **`@doc`** must state **`:ok` does not guarantee** the audit row exists. +- Public functions that today return **`:ok`** and use **`log_safe`/`log_multi_safe`** for **side-channel audit** keep **`:ok`** on audit subsystem failure **when the audit row is not co-fated with a durable partner write**. This covers three legitimate sub-classes: **detection-only** (the audit row is the forensic record), **pre-domain** (the event fires before a persistence target exists), and **audit-only helpers**. Emit **`[:sigra, :audit, :log_safe_error]`** (or the same telemetry contract as `emit_log_safe_error`) so operators can alert; **raise** only on programmer-wiring errors. **`@doc`** must state **`:ok` does not guarantee** the audit row exists. ### D-AUD-07 — ExUnit layout for audit fault injection diff --git a/.planning/MILESTONE-ARC.md b/.planning/MILESTONE-ARC.md new file mode 100644 index 00000000..236c5d9e --- /dev/null +++ b/.planning/MILESTONE-ARC.md @@ -0,0 +1,170 @@ +--- +last_updated: 2026-05-08 +status: active +current_release_followup: completed-REL-01 +default_post_release_candidate: EMAIL-RAILS +--- + +# Sigra Milestone Arc + +## Strategic Goal + +Make Sigra feel batteries-included for Phoenix teams until additional work becomes diminishing-return polish. + +This arc exists so milestone selection starts from a ranked strategic sequence instead of re-researching priorities every time `/gsd-new-milestone` runs. + +## Ranking Rules + +Prefer milestones that: + +1. Deepen already-shipped substrate instead of inventing new greenfield primitives. +2. Remove production or integration friction that adopters hit immediately after install. +3. Improve user/operator trust through clearer control surfaces, diagnostics, and honest docs. +4. Keep Sigra core provider-agnostic and Phoenix-native. + +Deprioritize work that is mostly: + +- generic back-office admin expansion +- hosted-control-plane imitation +- product-specific authorization policy +- compliance theater without executable seams or evidence + +## Ownership Boundaries + +**Sigra core owns:** +- auth, session, passkey, and audit invariants +- provider-agnostic contracts +- diagnostics and verification hooks + +**Generated host code owns:** +- user-facing and operator-facing UX +- email composition/layout overrides +- branding, copy, and host policy + +**Docs and optional integrations own:** +- ESP-specific setup and bounce/complaint handling +- SPF / DKIM / DMARC and sender reputation posture +- compliance recipes and operator guidance +- cross-device passkey operational guidance + +## GSD Defaults + +When milestone selection or roadmap triage is delegated: + +- default to the highest-ranked `candidate` below unless the user explicitly pivots +- only escalate decisions that affect the public contract, semver, security model, generated-host contract, or milestone order +- prefer decisive recommendations over reopening broad product-choice loops + +## Backlog Corrections + +The carried-forward future-requirement labels are not equally fresh. Treat these as corrected before planning the next milestone: + +- `SESS-01` is effectively already shipped through session/device labeling in the generated host session surface. +- `PK-01` is mostly already shipped through passkey list / rename / remove flows. +- `EMAIL-02` should mean a coherent localization workflow and override seam, not merely that gettext calls exist. + +The remaining meaningful work clusters are: + +- email reliability, override seams, and diagnostics +- passkey recovery and cross-device trust +- compliance-friendly export and data-lifecycle seams + +## Candidates + +### active-followup + +**Name:** `REL-01 Release Truth Reset` +**Priority:** required-before-new-feature-milestone +**Why now:** The repo has a milestone boundary but still needs a single coherent release/version story across package metadata, changelog, and release automation. +**Includes:** +- reconcile `mix.exs`, `.release-please-manifest.json`, `CHANGELOG.md`, and maintainer release docs +- verify the next release cut can be explained without mixing planning milestones with Hex versioning +**Non-goals:** +- new feature work +- reopening webhook trust work +- redesigning semver policy beyond restoring one coherent truth + +### shipped + +**ID:** `SESS-CTRL` +**Name:** `Session Control Plane` +**Priority:** 1 +**Status:** Shipped 2026-05-08 via Phases 108-110. +**Why it mattered:** Best leverage-to-risk ratio; strengthened trust for every adopter using mostly shipped primitives. +**Theme:** Turn existing session primitives into a coherent account-security surface. +**Delivered scope:** +- logout-other-sessions-except-current +- clearer current-session truth on user/admin surfaces +- recent security activity over persisted audit/session truth +- cleaner revoke UX and bounded docs truth +**Bounded non-goals held:** no session-store redesign, no generic account-center expansion, no timeout-history over-claim. + +### active-milestone + +**ID:** `EMAIL-RAILS` +**Name:** `Email Reliability & Override Rails` +**Priority:** 1 +**Why now:** Phoenix still leaves email integration rough edges to the host; Sigra can make the default production path legible without claiming to own deliverability itself. +**Theme:** Make auth email integration production-ready after install. +**Likely scope:** +- generated-host override seam for auth email templates +- preview and snapshot rails +- diagnostics and doctor checks for missing or inconsistent setup +- provider-agnostic telemetry and async delivery posture +- bounce / complaint hooks or stubs plus recipes +**Prerequisites:** +- keep core provider-agnostic +- preserve Swoosh and Oban seams +**Non-goals:** +- owning SPF / DKIM / DMARC +- hard-coding a preferred ESP in core +- becoming a generic inbound email-processing platform + +### candidate + +**ID:** `PK-LIFECYCLE` +**Name:** `Passkey Lifecycle Completion` +**Priority:** 2 +**Why now:** Passkey ceremony and basic management exist; the remaining risk is recovery and lifecycle trust. +**Theme:** Make passkey-primary and multi-device use trustworthy instead of just technically functional. +**Likely scope:** +- recovery-first passkey-primary posture +- last-passkey warnings +- cross-device and bootstrap guidance/UX +- tighter lifecycle integration around passkeys +**Prerequisites:** +- stable fallback and recovery story +- browser/platform validation for real flows +**Non-goals:** +- removing fallback auth by default +- inventing a custom sync layer +- broad MFA rewrite outside passkey-specific paths + +### candidate + +**ID:** `DATA-LIFECYCLE` +**Name:** `Compliance Export & Data Lifecycle` +**Priority:** 3 +**Why now:** Valuable, but lower-frequency adopter pain than session and email rough edges. +**Theme:** Extend existing export and anonymize seams into a coherent auth-data lifecycle story. +**Likely scope:** +- extend `Sigra.DataExport` +- include audit-log export posture +- clarify anonymize/delete semantics and operator recipes +**Prerequisites:** +- keep exports narrowly scoped to auth/account data +- reuse existing audit and admin export substrate +**Non-goals:** +- legal or compliance certification +- generic BI/reporting exports +- claiming host-app regulatory ownership + +## Selection Guidance + +`REL-01 Release Truth Reset` is complete. Until a stronger signal appears from real adopter feedback, use this default sequence: + +1. `EMAIL-RAILS Email Reliability & Override Rails` +2. `PK-LIFECYCLE Passkey Lifecycle Completion` +3. `DATA-LIFECYCLE Compliance Export & Data Lifecycle` + +If a future milestone proposal does not clearly advance production trust, integration clarity, or DX on rough edges, treat it as lower priority than the ranked candidates above. diff --git a/.planning/MILESTONES.md b/.planning/MILESTONES.md index ecf70ea1..dd35cbad 100644 --- a/.planning/MILESTONES.md +++ b/.planning/MILESTONES.md @@ -585,3 +585,193 @@ - [v1.15 Requirements](milestones/v1.15-REQUIREMENTS.md) --- + +## v1.20 GA Launch (SEED closure + public release) (Shipped: 2026-04-28) + +**Scope:** 6 phases (**85–90**), 14 on-disk plans. (Phase 90 waived). + +**What shipped:** **AUD-21** — OAuth audit atomicity closure, converting remaining `log_safe/3` clusters in Phase 45 T2 to atomic `Repo.transaction/1` + `Ecto.Multi`. **GAUAT-01..09** — Fully automated E2E harnesses for email visual QA, OAuth real-credential cycles, MFA backup-code rotation, and getting-started proof, resulting in SEED-001 closure. **LAUNCH-01..07** — Hex v1.20.0 publish, README promotion, and CHANGELOG alignment. + +### Key accomplishments + +1. **AUD-21 closure** — Phase 9 C-1 caveat officially downgraded to PASS. +2. **GAUAT zero-human proof** — Replaced all manual SEED-001 testing requirements with deterministic CI automation (Playwright + Premailex). +3. **v1.20.0 Public Launch** — Reached the "use this in production" inflexion point. + +### Stats + +- **Requirements:** 21/21 requirements satisfied/waived. +- **Milestone audit:** **passed** ([`milestones/v1.20-MILESTONE-AUDIT.md`](milestones/v1.20-MILESTONE-AUDIT.md)). +- **Timeline:** 2026-04-25 → 2026-04-28. + +### Tech debt carried forward + +- Lockspire glue package deferred. +- Week-one launch-feedback follow-ups deferred to patch milestone. + +**Archive:** + +- [v1.20 Roadmap](milestones/v1.20-ROADMAP.md) +- [v1.20 Requirements](milestones/v1.20-REQUIREMENTS.md) +- [v1.20 Milestone Audit](milestones/v1.20-MILESTONE-AUDIT.md) + +--- + +## v1.21 B2B-ready & production-honest (Shipped: 2026-05-06) + +**Scope:** 6 phases (**91–96**), 33 on-disk plan summaries (across 26 PLAN.md files; some phases inline-summarized). + +**What shipped:** First milestone after v1.20 public launch. Three legs converged. **Leg 1 — B2B trust** (Phases **91**, **92**, **93**) — `Sigra.Plug.RequireOrgMfa` + `enforce_mfa_for_members` + admin LiveView toggle + atomic `organization.mfa_policy_change` audit row (**B2B-01**); `Sigra.Authz` `can?/3` behaviour + nullable `role` on `OrganizationMembership` + scope-struct `:role` propagation + role-based-access-control recipe (zero opinionated roles in `lib/sigra/`) (**B2B-02**); org-scoped service-account tokens via `client_credentials` grant on existing JWT path + `current_scope.actor_type: :service_account` discriminator + 5 SA-mutation rollback proofs (**B2B-03**, re-verified 22/22 after gap-closure plans 06–10 + critical fixes in commit `bf5a8a8`). **Leg 2 — Production hardening** (Phases **94**, **95**) — `mix sigra.install` refuses non-Postgres adapter at pre-flight + removed MySQL/SQLite placeholder branches + aligned `mix.exs` description / README / getting-started narrative; environmental Oban-test caveat closed in 2026-05-06 audit (**HARD-01**); `Sigra.OptionalDeps` SOT + raise-on-missing for Oban/Bcrypt/EQRCode + `mix sigra.doctor` per-feature dep matrix + 3 dep-off CI lanes (**HARD-02**, only v1.21 phase with `nyquist_compliant: true`). **Leg 3 — OAuth + API polish** (Phase **96**) — per-provider OAuth refresh dispatch for GitHub/Apple/Facebook/Generic via Assent + atomic `oauth.token_refreshed` audit (**HARD-03**); single-pass `Sigra.Plug.RateLimit` emitting `X-RateLimit-Limit/Remaining/Reset` + `Retry-After` from Hammer state, wired into generated host's `:auth_rate_limit` pipeline (**API-01**) — 122 passing tests across 4 evidence sections. + +### Key accomplishments + +1. **Org-level MFA enforcement** — Atomic policy-change audit + plug + LiveView gate; full library suite green (33 doctests, 3 properties, 2214 tests, 0 failures). +2. **RBAC seams without opinions** — `Sigra.Authz` ships as behaviour-only; library has zero `:owner / :admin / :member` constants; recipe is the only place those names appear, illustratively. +3. **M2M service-account tokens** — `client_credentials` grant on existing JWT path; scope-struct `actor_type` discriminator; SA short-circuits user-membership and org-MFA checks; 5/5 mutations co-fated with audit (D-AUD-08). +4. **Honest Postgres-only narrative** — Aligned the documented adapter support to what CI actually exercises and what migrations actually implement. +5. **Optional-dep boot validation** — `mix sigra.doctor` reports per-feature status; missing optional deps raise tagged errors at first use instead of compiling to silent `nil`; CI matrix toggles each off. +6. **OAuth refresh dispatch + rate-limit headers** — Closed the `lib/sigra/oauth.ex:174` "not yet implemented" warning across 4 providers with atomic audit; clients on rate-limited paths get standards-compliant headers for backoff. + +### Stats + +- **Requirements:** 7/7 requirements satisfied (B2B-01, B2B-02, B2B-03, HARD-01, HARD-02, HARD-03, API-01). +- **Milestone audit:** **tech_debt → reconciled** ([`milestones/v1.21-MILESTONE-AUDIT.md`](milestones/v1.21-MILESTONE-AUDIT.md)). Substantive 7/7 with passing test evidence; bookkeeping reconciled 2026-05-06. +- **Timeline:** 2026-04-28 → 2026-05-06 (8 days). +- **Cross-phase wires verified:** B2B-02 `:actor_type` reservation → B2B-03 `:service_account` population; B2B-02 host-supplied `:roles` → B2B-03 SA short-circuit; HARD-01 Postgres-only → HARD-02 `mix sigra.doctor`; HARD-03 OAuth refresh → API-01 rate-limit headers. + +### Known deferred items at close (non-blocking) + +- 2 install-smoke pending todos from 2026-04-30: JOSE.JWT.peek_payload/1 undefined warning + transient Postgres `too_many_connections` during install smoke (both surfaced during Phase 94 work). +- `DEF-92-02-01` — InvitationAcceptLive audit-Multi-step name collision (pre-existing bug from commit `5e6c026`, predates Phase 92; recommended landing point not yet assigned). +- Nyquist VALIDATION.md gaps — only Phase 95 has `nyquist_compliant: true`; 91/92/93 have draft VALIDATION.md (`nyquist_compliant: false`); 94/96 missing entirely. Optional retroactive fill via `/gsd-validate-phase`. + +### Tech debt carried forward + +- Webhooks (`WH-01..03`) — deferred to v1.22 as its own design-first milestone (event schema, signed delivery, retry/dead-letter, host UX). +- Tier-3 polish carried in Future Requirements: Session UX (`SESS-01..03`), Email overrides + i18n + bounce (`EMAIL-01..03`), Passkey multi-authenticator + recovery (`PK-01..03`), DataExport depth (`DATA-01..03`). +- `sigra_lockspire` glue package per **ADR 001** — still awaiting companion-app trigger. + +**Archive:** + +- [v1.21 Roadmap](milestones/v1.21-ROADMAP.md) +- [v1.21 Requirements](milestones/v1.21-REQUIREMENTS.md) +- [v1.21 Milestone Audit](milestones/v1.21-MILESTONE-AUDIT.md) + +--- + +## v1.22 Webhooks / outbound event pipeline (Shipped: 2026-05-06) + +**Scope:** 6 phases (**97–102**), 20 on-disk plans. + +**What shipped:** Sigra now emits real outbound auth and identity webhooks as a first-party product surface. **Phase 97** established the event contract, durable subscription registry, stable payload envelope, and HMAC signing contract. **Phase 98** added persisted attempts, bounded retries, and dead-letter state so delivery reliability no longer depends on raw Oban semantics. **Phase 99** exposed the capability through generated admin LiveViews, routes, and adopter-facing guidance. After the first milestone audit found end-to-end gaps, **Phase 100** restored the production enqueue handoff from persisted delivery rows into the async worker path, **Phase 101** corrected operator-state query truth for retrying and dead-lettered views, and **Phase 102** proved the generated-host flow end to end while reconciling roadmap, requirements, state, and verification artifacts. + +### Key accomplishments + +1. **Stable webhook contract** — durable subscription registry, canonical event catalog, public payload serializers, and documented HMAC verification contract for Sigra-owned auth and identity events. +2. **Reliable delivery pipeline** — persisted summary rows, append-only attempt history, bounded retries, and durable dead-letter state. +3. **Generated-host operator UX** — admin LiveViews for subscription management, delivery history, failure inspection, and secret rotation. +4. **Production handoff repaired** — persisted delivery rows now enqueue the first worker job automatically from the mutation path instead of stalling before async dispatch. +5. **Operator truth restored** — retrying and dead-lettered views now match persisted delivery state before pagination. +6. **Adopter proof closed** — generated-host evidence correlates receiver-side verification with admin-visible delivery history and reconciled planning artifacts. + +### Stats + +- **Requirements:** 3/3 requirements satisfied (`WH-01..03`). +- **Milestone audit:** historical `gaps_found` audit preserved and superseded by [`102-VERIFICATION.md`](phases/102-generated-host-proof-and-planning-reconciliation/102-VERIFICATION.md) after Phases 100–102 closed the listed gaps. +- **Pre-close `audit-open`:** all artifact types clear on 2026-05-07 after resolving quick-task metadata drift and the two install-smoke todos. +- **Git (milestone range):** first milestone commit `6b8ef36` on 2026-05-06; current diff vs that start point is `43` files changed, `5833` insertions, `32` deletions. + +### Tech debt carried forward + +- Webhook follow-ons remain future work only: replay support, safer secret-rotation windows, and tighter outbound egress controls (`WH-04..06`). +- Tier-3 polish stays deferred: session UX completeness, email overrides + i18n + bounce handling, passkey multi-authenticator + recovery, and DataExport depth. +- `sigra_lockspire` glue package per **ADR 001** remains trigger-based. +- Nyquist VALIDATION.md coverage remains thin for earlier B2B phases; not part of the webhook milestone contract. + +**Archive:** + +- [v1.22 Roadmap](milestones/v1.22-ROADMAP.md) +- [v1.22 Requirements](milestones/v1.22-REQUIREMENTS.md) +- [v1.22 Milestone Audit](milestones/v1.22-MILESTONE-AUDIT.md) + +--- + +## v1.23 Webhook operator trust & controls (Shipped: 2026-05-08) + +**Scope:** 5 phases (**103–107**), 16 on-disk plans. + +**What shipped:** v1.23 closes the three operational trust gaps left after the outbound webhook pipeline launch. **Phase 103** replaced one-shot signing-secret rotation with a dual-slot lifecycle, overlap-window signatures, truthful admin controls, and generated-host proof (**WH-04**). **Phase 104** implemented replay as a new delivery lineage with durable parent/root pointers, admin recovery actions, LiveView lineage truth, and generated-host proof; **Phase 106** then turned that evidence into authoritative milestone verification via `104-VERIFICATION.md` (**WH-05**). **Phase 105** implemented enforceable endpoint policy, generated-host policy seams, and deployment guidance; **Phase 107** finished the blocked-policy admin truth, denied-path browser proof, and repaired-form `105-VERIFICATION.md` / `105-VALIDATION.md` closeout (**WH-06**). + +### Key accomplishments + +1. **Overlap-safe secret rotation** — webhook subscriptions can carry current and next secrets through a bounded overlap window without delivery loss or replay-contract drift. +2. **Replay as truthful recovery** — operators can replay dead-lettered deliveries as fresh child rows with new `delivery_id` values while preserving the original failed history and attempt ledger. +3. **Enforceable outbound policy** — Sigra can deny disallowed webhook destinations locally before egress and preserve canonical `policy_reason` / `policy_detail` truth across worker, admin, and proof surfaces. +4. **Generated-host evidence is now adopter-grade** — rotation lifecycle, replay recovery, and blocked-policy operator inspection all have durable `.planning/uat-evidence/v1.23/*` bundles. +5. **Milestone audit closed cleanly** — `WH-04..06` are all satisfied, `104-VERIFICATION.md` and `105-VERIFICATION.md` exist, and the live v1.23 audit now passes. + +### Stats + +- **Requirements:** 3/3 requirements satisfied (`WH-04`, `WH-05`, `WH-06`). +- **Milestone audit:** passed at close ([`milestones/v1.23-MILESTONE-AUDIT.md`](milestones/v1.23-MILESTONE-AUDIT.md)). +- **Pre-close `audit-open`:** all artifact types clear (2026-05-08). +- **Timeline:** 2026-05-07 → 2026-05-08. +- **Worktree delta from milestone start commit `200e131`:** 63 tracked files changed, 6131 insertions, 557 deletions. + +### Known deferred items at close + +- `REL-01` release-cut work is intentionally deferred to the next milestone now that webhook operator trust is closed honestly. +- Tier-3 follow-ons remain future work only: session UX, email overrides and i18n, passkey polish, and data-export depth. +- `sigra_lockspire` glue package per **ADR 001** remains trigger-based and out of scope for this milestone. + +### Technical debt carried forward + +- The repository was still on a dirty worktree at milestone close, so the planning archive is complete but the git closeout commit and release tag must be cut only after the shipped code and docs land in clean commits. + +**Archive:** + +- [v1.23 Roadmap](milestones/v1.23-ROADMAP.md) +- [v1.23 Requirements](milestones/v1.23-REQUIREMENTS.md) +- [v1.23 Milestone Audit](milestones/v1.23-MILESTONE-AUDIT.md) + +--- + +## v1.24 Session Control Plane (Shipped: 2026-05-08) + +**Scope:** 3 phases (**108–110**), 9 on-disk plans. + +**What shipped:** v1.24 turned Sigra's session and audit substrate into a coherent account-security control plane. **Phase 108** shipped preserve-current revoke semantics, truthful current-session labeling, and aligned user/admin/docs behavior for session truth (**SESS-02**, first `SESS-04/05` slice). **Phase 109** shipped the library-owned recent-security-activity seam plus explicit logout/MFA activity truth across generated-host, admin, and docs surfaces (**SESS-03**, remaining `SESS-04/05`). **Phase 110** converted the implementation summary chain into authoritative `108-VERIFICATION.md` and `109-VERIFICATION.md` artifacts, then reconciled the active milestone truth across planning files and the live audit. + +### Key accomplishments + +1. **Preserve-current revoke is now first-class** — users can revoke sibling sessions without losing the current device, and the operation fails closed if the preserved session cannot be proven. +2. **Current-session truth is authoritative** — user and admin surfaces derive the current session from persisted/session-token truth rather than LiveView heuristics or raw-token comparisons. +3. **Recent security activity is now Sigra-owned** — sign-in, suspicious-login, logout, revoke, and MFA verification render through a canonical library seam over persisted audit rows. +4. **Thin-host boundaries held** — generated hosts delegate session-control and activity logic to Sigra-owned seams instead of reimplementing business rules. +5. **Milestone proof is repaired and archive-ready** — `108-VERIFICATION.md`, `109-VERIFICATION.md`, and `v1.24-MILESTONE-AUDIT.md` now provide a coherent authoritative closeout surface. + +### Stats + +- **Requirements:** 4/4 requirements satisfied (`SESS-02`, `SESS-03`, `SESS-04`, `SESS-05`). +- **Milestone audit:** passed at close ([`milestones/v1.24-MILESTONE-AUDIT.md`](milestones/v1.24-MILESTONE-AUDIT.md)). +- **Pre-close `audit-open`:** all artifact types clear (2026-05-08). +- **Timeline:** 2026-05-08. +- **Scoped worktree delta at close:** 17 files changed, 1355 insertions, 243 deletions across the tracked session-control implementation and planning surfaces. + +### Known deferred items at close + +- `EMAIL-RAILS` is now the default next milestone candidate; it was intentionally not pulled into the v1.24 scope. +- `PK-LIFECYCLE` and `DATA-LIFECYCLE` remain ranked follow-ons, not hidden v1.24 gaps. +- Historical Nyquist coverage thin spots from older milestones remain non-blocking carried debt, not session-control misses. + +### Technical debt carried forward + +- The repository is still on a dirty worktree at milestone close. The planning archive can be committed selectively, but a release-accurate `v1.24` git tag must wait until the shipped implementation and proof changes are committed cleanly. + +**Archive:** + +- [v1.24 Roadmap](milestones/v1.24-ROADMAP.md) +- [v1.24 Requirements](milestones/v1.24-REQUIREMENTS.md) +- [v1.24 Milestone Audit](milestones/v1.24-MILESTONE-AUDIT.md) + +--- diff --git a/.planning/PROJECT.md b/.planning/PROJECT.md index 4711de87..d302e4f4 100644 --- a/.planning/PROJECT.md +++ b/.planning/PROJECT.md @@ -18,9 +18,54 @@ Milestone scoping for GSD (`/gsd-new-milestone`, `/gsd-plan-phase`) should prefe **GSD use:** When a phase or milestone proposal does not clearly move one of the bullets above, treat it as lower priority unless it closes a documented adoption gap or security/audit risk. -## Current milestone +**GSD preference:** When the user delegates architecture or product tradeoffs, default to researched decisive recommendations and only escalate choices that materially alter the security model, the public/semver contract, or the generated-host contract. Implementation-level forks should usually be resolved by the agent without reopening broad decision loops. -**v1.19 — JWT refresh persistence + audit co-fate & MFA enrollment failure (SEED-002)** — **Phases 82–83** (opened **2026-04-24**). Closes the **v1.18** footnote deferral: **JWT `user_tokens` rotation** (`Sigra.JWT.RefreshToken` / **`Sigra.JWT.refresh/3`**) must share a **single transactional boundary** with **`api.jwt_refresh`** / **`api.jwt_refresh_reuse`** audit rows when `:audit_schema` is set (no successful persistence with a missing audit row, and no audit row for a rolled-back rotation). Second tranche: **`AUD-04-022`** / **`EX-44-02`** — invalid pre-DB TOTP on **`Sigra.MFA.confirm_enrollment/5`** promoted to the same **`Multi` + `log_multi_safe`** discipline where semantics allow. Live **`.planning/REQUIREMENTS.md`** + **`.planning/ROADMAP.md`**. +## Latest Shipped Milestone: v1.24 Session Control Plane + +**Shipped:** 2026-05-08 + +Sigra now ships a coherent account-security control plane on top of its existing session and audit substrate: preserve-current revoke semantics, truthful current-session labeling, recent security activity over persisted audit rows, and repaired-form milestone proof that keeps the generated host, admin surfaces, docs, and planning artifacts aligned. + +Archives: +- [`.planning/milestones/v1.24-ROADMAP.md`](milestones/v1.24-ROADMAP.md) +- [`.planning/milestones/v1.24-REQUIREMENTS.md`](milestones/v1.24-REQUIREMENTS.md) +- [`.planning/milestones/v1.24-MILESTONE-AUDIT.md`](milestones/v1.24-MILESTONE-AUDIT.md) + +## Current State + +v1.24 closes the highest-leverage post-webhook trust gap for everyday adopters. Users can now revoke all other sessions without losing the current device, both user and admin surfaces derive current-session truth from authoritative persisted state, and recent security activity reflects the real audit/session lifecycle Sigra already owns. + +The active requirement set for v1.24 has been archived. The next milestone should start from a fresh `REQUIREMENTS.md`, not by extending the shipped session-control scope in place. + +## Next Milestone Goals + +- `SESS-CTRL` is now shipped and should not be replanned as open feature work. +- Follow the ranked milestone arc in [`.planning/MILESTONE-ARC.md`](MILESTONE-ARC.md) instead of re-researching candidate themes from scratch. +- Default next milestone order: email reliability/override rails first, then passkey lifecycle completion, then compliance export/data lifecycle. +- Treat stale carried-forward labels carefully: `SESS-01` and most of `PK-01` are already substantially shipped, so future milestones should focus on remaining trust and lifecycle gaps rather than replanning completed surfaces. + +## Current Milestone Status + +No active milestone requirements are defined right now. + +The next milestone should begin with a fresh `REQUIREMENTS.md`. Unless the user explicitly pivots, the ranked default is `EMAIL-RAILS` from [`.planning/MILESTONE-ARC.md`](MILESTONE-ARC.md). + +### Just shipped: v1.24 Session Control Plane + +- preserve-current revoke semantics via a library-owned `revoke_other_sessions` seam +- truthful current-session labeling across user and admin surfaces +- recent security activity over persisted audit rows +- repaired-form verification artifacts for Phases 108-109 and a passing live milestone audit + +### Previously closed milestones + +**v1.22 — Webhooks / outbound event pipeline** — **Phases 97–102** (shipped **2026-05-06**). Phase **97** established the public event catalog, durable subscription registry, stable payload envelope, and signing contract. Phase **98** made delivery reliable with persisted attempts, bounded retries, and dead-letter state. Phase **99** turned that capability into a usable adopter feature through generated admin LiveViews, routing, and host guidance. Gap-closure Phase **100** restored the production enqueue handoff from persisted delivery rows into the async worker path, Phase **101** made retrying/dead-lettered operator views truthful, and Phase **102** proved the generated-host flow end to end while reconciling `ROADMAP.md`, `REQUIREMENTS.md`, `STATE.md`, and verification artifacts. Archives: [`.planning/milestones/v1.22-ROADMAP.md`](milestones/v1.22-ROADMAP.md), [`v1.22-REQUIREMENTS.md`](milestones/v1.22-REQUIREMENTS.md), [`v1.22-MILESTONE-AUDIT.md`](milestones/v1.22-MILESTONE-AUDIT.md). + +**v1.21 — B2B-ready & production-honest** — **Phases 91–96** (shipped **2026-05-06**). Three legs converged: **B2B trust** — Phase **91** org-level MFA enforcement (**B2B-01**) with `Sigra.Plug.RequireOrgMfa` + atomic `organization.mfa_policy_change` audit row, Phase **92** RBAC seams (**B2B-02**) shipping `Sigra.Authz` behaviour + nullable `role` on memberships + scope-struct `:role` propagation + role-based-access-control recipe (zero opinionated roles), Phase **93** M2M / service-account tokens (**B2B-03**) with `client_credentials` grant on existing JWT path + `current_scope.actor_type: :service_account` discriminator + 5 SA-mutation rollback proofs (re-verified 22/22 after gap-closure plans 06–10 + critical fixes in commit `bf5a8a8`). **Production hardening** — Phase **94** Postgres-only declaration (**HARD-01**) refusing non-Postgres at `mix sigra.install` pre-flight + removed MySQL/SQLite placeholder branches + aligned `mix.exs` description / README / getting-started (env Oban-test caveat closed 2026-05-06), Phase **95** optional-dep boot validation (**HARD-02**) via `Sigra.OptionalDeps` SOT + raise-on-missing for Oban/Bcrypt/EQRCode + `mix sigra.doctor` per-feature dep matrix + 3 dep-off CI lanes (only v1.21 phase with `nyquist_compliant: true`). **OAuth + API polish** — Phase **96** OAuth refresh dispatch (**HARD-03**) for GitHub/Apple/Facebook/Generic via Assent + atomic `oauth.token_refreshed` audit + rate-limit headers (**API-01**) emitting `X-RateLimit-Limit/Remaining/Reset` + `Retry-After` from Hammer state in single-pass plug (122 passing tests across 4 evidence sections). Audit: tech_debt → reconciled (substantive 7/7; bookkeeping reconciled 2026-05-06). Open at close (non-blocking): 2 install-smoke todos from 2026-04-30, `DEF-92-02-01` pre-existing audit Multi step-name collision (predates Phase 92), Nyquist VALIDATION.md gaps for 91/92/93/94/96. Archives: [`.planning/milestones/v1.21-ROADMAP.md`](milestones/v1.21-ROADMAP.md), [`v1.21-REQUIREMENTS.md`](milestones/v1.21-REQUIREMENTS.md), [`v1.21-MILESTONE-AUDIT.md`](milestones/v1.21-MILESTONE-AUDIT.md). + +**v1.20 — GA Launch (SEED closure + public release)** — **Phases 85–90** (shipped **2026-04-28**). Closed **SEED-002** OAuth audit atomicity remainder (Phase **45 T2** clusters **052–056**, **058**, **063** to atomic **`Multi` + `log_multi_safe`**; Phase 9 **C-1 PASS-WITH-CAVEATS → PASS**). Closed **SEED-001** GA UAT — all 8 rows machine-substituted via Playwright + Premailex (**GAUAT-01..09**) with evidence under **`.planning/uat-evidence/v1.20/`**. Public launch via **`mix hex.publish`** v1.20.0 + README "use this in production" promotion + CHANGELOG alignment (**LAUNCH-01..07**). **Phase 90** publicity / monitoring waived. Archives: [`.planning/milestones/v1.20-ROADMAP.md`](milestones/v1.20-ROADMAP.md), [`v1.20-REQUIREMENTS.md`](milestones/v1.20-REQUIREMENTS.md), [`v1.20-MILESTONE-AUDIT.md`](milestones/v1.20-MILESTONE-AUDIT.md). + +**v1.19 — JWT refresh persistence + audit co-fate & MFA enrollment failure (SEED-002)** — **Phases 82–83** (shipped **2026-04-24**). Closed the **v1.18** footnote deferral: **JWT `user_tokens` rotation** (`Sigra.JWT.RefreshToken` / **`Sigra.JWT.refresh/3`**) shares a **single transactional boundary** with **`api.jwt_refresh`** / **`api.jwt_refresh_reuse`** audit rows when `:audit_schema` is set. Second tranche: **`AUD-04-022`** / **`EX-44-02`** — invalid pre-DB TOTP on **`Sigra.MFA.confirm_enrollment/5`** promoted to the same **`Multi` + `log_multi_safe`** discipline where semantics allow. Plus **Phase 84** routing-honesty reconciliation (**2026-04-25**). **Previously closed:** **v1.18 — JWT refresh / reuse audit atomicity (SEED-002 / AUD-04-048..049 / AUD-18)** (**Phase 81**, **AUD-18-01**..**AUD-18-04**, **2026-04-24**). **`Sigra.APIToken.audit_jwt_refresh/2`** / **`audit_jwt_refresh_reuse/2`** use **`Repo.transaction/1`** + audit-only **`Multi` + `log_multi_safe`** when `:audit_schema` is set; **`api_token_audit_atomic_test.exs`**; **44** / **45** / **09** / **`CHANGELOG` [Unreleased]**; **JWT persistence co-fate** explicitly deferred to **v1.19**. Verification: **`.planning/phases/81-jwt-refresh-audit-atomicity/81-VERIFICATION.md`**. @@ -48,7 +93,15 @@ Milestone scoping for GSD (`/gsd-new-milestone`, `/gsd-plan-phase`) should prefe ## Current State -**v1.19 (shipped 2026-04-24):** Phases **82–83** — **AUD-19** (JWT **`user_tokens`** persistence + **`api.jwt_refresh*`** co-fate) + **AUD-20** (**`AUD-04-022`** invalid-code → **`commit_ad_hoc_mfa_audit/5`**). **`83-VERIFICATION.md`** / **`82-VERIFICATION.md`** merge gates; live **`REQUIREMENTS.md`** / **`ROADMAP.md`**. +**v1.23 (shipped 2026-05-08):** Phases **103–107** closed the webhook operator-trust follow-ons to v1.22. Phase **103** shipped overlap-safe secret rotation with a dual-slot lifecycle and overlap-window signatures (**WH-04**). Phase **104** implemented replay recovery as new delivery lineage, and Phase **106** authoritatively verified that recovery path through `104-VERIFICATION.md` (**WH-05**). Phase **105** implemented webhook egress policy enforcement, and Phase **107** closed the remaining operator-truth and evidence gap for `WH-06` through `105-VERIFICATION.md`, `105-VALIDATION.md`, and the blocked-policy proof bundle under `.planning/uat-evidence/v1.23/webhook-policy-operator-truth/`. + +**v1.22 (shipped 2026-05-06):** Phases **97–102** delivered the outbound event pipeline: signed event contract, durable subscription registry, bounded retries, dead-letter state, generated admin UX, production enqueue repair, operator-truth queries, and generated-host proof. + +**v1.21 (shipped 2026-05-06):** Phases **91–96** — B2B trust + production hardening + API polish. Org-level MFA enforcement (**B2B-01**, Phase 91), RBAC seams (**B2B-02**, Phase 92), M2M service-account tokens (**B2B-03**, Phase 93, re-verified 22/22), Postgres-only declaration (**HARD-01**, Phase 94), optional-dep boot validation + `mix sigra.doctor` (**HARD-02**, Phase 95), OAuth refresh + rate-limit headers (**HARD-03 + API-01**, Phase 96). Audit: tech_debt → reconciled. Phase numbering continues from **Phase 96**. Archives: [`.planning/milestones/v1.21-ROADMAP.md`](milestones/v1.21-ROADMAP.md), [`v1.21-REQUIREMENTS.md`](milestones/v1.21-REQUIREMENTS.md), [`v1.21-MILESTONE-AUDIT.md`](milestones/v1.21-MILESTONE-AUDIT.md). + +**v1.20 (shipped 2026-04-28):** Phases **85–90** — **AUD-21** (OAuth audit atomicity closure: Phase 45 T2 clusters 052–056 / 058 / 063 → atomic `Multi`; Phase 9 **C-1 PASS-WITH-CAVEATS → PASS**), **GAUAT-01..09** (machine substitutes for all 8 SEED-001 rows via Playwright + Premailex; evidence at `.planning/uat-evidence/v1.20/`), **LAUNCH-01..07** (Hex v1.20.0 push + README promotion + CHANGELOG alignment). Phase 90 publicity / monitoring waived. Verification: `.planning/phases/89-pre-launch-hex-publish/`, milestone audit `.planning/milestones/v1.20-MILESTONE-AUDIT.md`. + +**v1.19 (shipped 2026-04-24):** Phases **82–83** — **AUD-19** (JWT **`user_tokens`** persistence + **`api.jwt_refresh*`** co-fate) + **AUD-20** (**`AUD-04-022`** invalid-code → **`commit_ad_hoc_mfa_audit/5`**). **`83-VERIFICATION.md`** / **`82-VERIFICATION.md`** merge gates. **Phase 84** routing-honesty reconciliation closed **2026-04-25** (`84-VERIFICATION.md`). **v1.18 (shipped 2026-04-24):** Phase **81** — **AUD-18-01**..**AUD-18-04** — **`audit_jwt_refresh/2`** / **`audit_jwt_refresh_reuse/2`** transactional **`log_multi_safe`** (audit-only txn); **`api_token_audit_atomic_test.exs`**; **44** / **45** / **09** / **`CHANGELOG` [Unreleased]**; **persistence co-fate** → **v1.19**. Verification: **`.planning/phases/81-jwt-refresh-audit-atomicity/81-VERIFICATION.md`**. @@ -88,11 +141,23 @@ Sigra is a Phoenix 1.8+ authentication platform spanning the v1.0 auth stack, v1 ## Next milestone goals -**v1.19** is **shipped** (**Phases 82–83**, **2026-04-24**). Prefer **CHANGELOG + Hex** for small fixes; **`/gsd-new-milestone`** when **`MAINTAINING.md`** *Resume `/gsd-new-milestone`* criteria match (e.g. loud launch + **SEED-001**, documented adoption gap, **ADR 001** glue). +**Current ranking source:** [`.planning/MILESTONE-ARC.md`](MILESTONE-ARC.md) + +**Immediate next action:** **Define the next milestone after shipped `SESS-CTRL`, starting with a fresh `REQUIREMENTS.md`** -**Backlog / hygiene:** **Phase 84** owns routing-honesty cleanup after **v1.19**. **`999.1`** and **999.x** are archaeology only; see **`.planning/ROADMAP.md`** and the **`999.1-*`** tombstone files. **`STATE.md`** is session handoff only. **Planning precedence:** **`ROADMAP.md`** + phase **`*-VERIFICATION.md`** / **`*-VALIDATION.md`** over conflicting **`STATE.md`** notes. +**Recent between-milestones closeout:** **`REL-01 Release Truth Reset`** -**Later candidates (post–v1.19):** **SEED-001** human matrix before megaphone launch; Phase **45** **T2** clusters (**052–056**, **058**, **063**) only if promoted with owner + trigger; new validation / assurance work uses newly numbered phases rather than **999.x** reuse; **`sigra_lockspire`** per ADR **001** triggers. +**Ranked follow-ons:** +- `EMAIL-RAILS` — email reliability, override seams, diagnostics, and provider-agnostic delivery posture +- `PK-LIFECYCLE` — passkey recovery, last-passkey safety, and cross-device trust +- `DATA-LIFECYCLE` — auth-data export, audit inclusion, and anonymize/delete lifecycle guidance + +**Deferred after `EMAIL-RAILS`:** +- `sigra_lockspire` glue package per **ADR 001** once a real companion-app trigger fires. +- Any theme that primarily expands generic admin CRUD, hosted-control-plane behavior, or authz policy rather than the auth control plane itself. +- Any newly identified validation or assurance work should use newly numbered phases; do not reuse **999.x**. + +**Backlog / hygiene:** **`999.1`** / **999.x** remain archaeology only; see **`.planning/ROADMAP.md`** and **`999.1-*`** tombstone files. **`STATE.md`** is session handoff only. **Planning precedence:** **`ROADMAP.md`** + phase **`*-VERIFICATION.md`** / **`*-VALIDATION.md`** over conflicting **`STATE.md`** notes.
Archived v1.2 milestone framing (Admin Dashboard) @@ -122,7 +187,26 @@ Sigra is a Phoenix 1.8+ authentication platform spanning the v1.0 auth stack, v1 ## Requirements -### Active — _(none — **v1.19** Phases **82–83** shipped **2026-04-24**; next planning follow-up: **Phase 84** / routing honesty only)_ +### Active — Next milestone not yet defined + +The active `REQUIREMENTS.md` has been intentionally cleared at the v1.24 boundary. Start the next milestone by selecting a fresh requirement contract rather than carrying v1.24 forward in place. + +### Validated — v1.22 Webhooks / outbound event pipeline (shipped 2026-05-06) + +_See [`.planning/milestones/v1.22-REQUIREMENTS.md`](milestones/v1.22-REQUIREMENTS.md) for the archived requirement contract and outcomes._ + +- ✓ **WH-01** — Host app can register outbound webhook subscriptions for Sigra-owned auth and identity events, and Sigra emits signed payloads with stable event IDs, timestamps, and a documented verification contract. +- ✓ **WH-02** — Each subscription can filter event types, failed deliveries retry automatically with bounded policy, and exhausted deliveries land in a dead-letter state with durable attempt history. +- ✓ **WH-03** — Generated admin LiveView lets adopters create, enable/disable, rotate, and inspect webhook subscriptions and delivery history without hand-editing Sigra internals. + +### Validated — v1.20 GA Launch (shipped 2026-04-28) + +- ✓ **LAUNCH-01, LAUNCH-02, LAUNCH-07** — Pre-launch Hex publish and README promotion — **Phase 89** +- ✓ **AUD-21** — OAuth audit atomicity closure (Phase 45 T2 cluster: 052–056, 058, 063 → atomic) — **Phase 85** (2026-04-25) +- ✓ **GAUAT-01** — Phase 04 lockout + suspicious-login email visual regression: 8 baselines, evidence under `.planning/uat-evidence/v1.20/email-phase-04/`, 0-human-MUA — **Phase 86** (2026-04-26) +- ✓ **GAUAT-02** — Phase 08 lifecycle email visual regression: 28 baselines, evidence under `.planning/uat-evidence/v1.20/email-phase-08/`, same residual policy as GAUAT-01 — **Phase 86** (2026-04-26) +- ✓ **GAUAT-03..09** — OAuth real-credential cycles + MFA backup-code rotation E2E + clean-machine getting-started — **Phases 87–88** (2026-04-26..28) +- ✓ **LAUNCH-03..06** — CHANGELOG alignment + maintainer monitoring lane (Phase 90 publicity / HN / community soft-launch waived per user direction) ### Validated — v1.19 JWT persistence + audit co-fate & MFA invalid-code audit (shipped in-repo 2026-04-24) @@ -397,7 +481,7 @@ _SEED-001 and SEED-002 were promoted and **closed in v1.4** (see `.planning/mile ## Constraints - **Framework:** Phoenix 1.8+ / Ecto 3.x as blessed path. Plug compatibility where DX is not compromised. -- **Database:** PostgreSQL as primary (citext, JSONB). MySQL/SQLite support via conditional migrations. +- **Database:** PostgreSQL only (citext, JSONB). - **Security:** OWASP standards throughout. Argon2id default. All tokens HMAC-protected. Enumeration prevention by default. - **Dependencies:** Minimal transitive deps. Copy-paste over deps when code is small and stable. - **LiveView:** Supported but optional. Core works with standard controllers. Login/logout via HTTP POST (not LiveView events). @@ -471,4 +555,4 @@ This document evolves at phase transitions and milestone boundaries.
-*Last updated: 2026-04-24 — **`v1.19`** Phases **82–83** shipped (**AUD-19** + **AUD-20**); **`REQUIREMENTS.md`**, **`ROADMAP.md`**, **`PROJECT.md`**, **`STATE.md`** aligned.* +*Last updated: 2026-05-08 — archived `v1.24` session control and reset the active milestone surface for `EMAIL-RAILS` selection.* diff --git a/.planning/REQUIREMENTS.md b/.planning/REQUIREMENTS.md deleted file mode 100644 index bd820f9e..00000000 --- a/.planning/REQUIREMENTS.md +++ /dev/null @@ -1,46 +0,0 @@ -# Requirements: Sigra — v1.19 JWT persistence + audit co-fate & MFA enrollment failure - -**Defined:** 2026-04-24 -**Milestone:** v1.19 — bounded **SEED-002** (**AUD-19** + **AUD-20**) - -## v1.19 Requirements - -### JWT refresh — persistence + audit co-fate (closes v1.18 “AUD-08 persistence” footnote) - -- [x] **AUD-19-01** — On successful JWT refresh, **`Sigra.JWT.RefreshToken.rotate/3`** persistence work (supersede old **`user_tokens`** row + insert new refresh token) and **`api.jwt_refresh`** emission occur in **one** `Repo.transaction` (or equivalent documented single boundary) when `:audit_schema` is set, so the host never observes persisted rotation without a matching audit row, and audit failure rolls back rotation. -- [x] **AUD-19-02** — On **`:reuse_detected`**, family-wide revocation persistence and **`api.jwt_refresh_reuse`** audit share the same transactional discipline when audit is on (aligned to **AUD-19-01** semantics). -- [x] **AUD-19-03** — Automated tests prove co-fate: happy path, audit-off, and fault injection (audit insert failure → no partial persistence / consistent `{:error, _}` or documented contract). Prefer extending **`test/sigra/api_token_audit_atomic_test.exs`** and/or focused JWT integration tests. -- [x] **AUD-19-04** — Planning truth: **`.planning/phases/09-audit-logging/09-VERIFICATION.md`** rows **048–049** footnotes, **`.planning/phases/44-mfa-account-api-atomic-batches/44-AUD-04-INVENTORY.md`**, **`.planning/phases/45-oauth-ops-c1-signoff/45-AUD-04-INVENTORY.md`** (JWT appendix as needed), **`.planning/phases/09-audit-logging/09-03-SUMMARY.md`**, **`CHANGELOG.md` [Unreleased]**; **`.planning/phases/82-jwt-refresh-persistence-audit-cofate/82-VERIFICATION.md`** records merge gate. - -### MFA — AUD-04-022 / EX-44-02 - -- [x] **AUD-20-01** — **`Sigra.MFA.confirm_enrollment/5`** invalid-TOTP (**pre-persistence**) path upgraded from standalone **`log_safe/3`** to **`Repo.transaction/1` + `Multi` + `log_multi_safe`** **or** explicit milestone waiver with updated **EX-44-02** rationale (must be captured in **83** discuss/plan if waived). -- [x] **AUD-20-02** — **`test/sigra/mfa_audit_atomicity_test.exs`** covers the **022** mechanism + rollback / audit-off parity with prior MFA atomicity phases. -- [x] **AUD-20-03** — **44** inventory row **022**, **09-VERIFICATION** C-1 **022**, **09-03-SUMMARY**, **`CHANGELOG` [Unreleased]**; **`.planning/phases/83-mfa-confirm-enrollment-022/83-VERIFICATION.md`** merge gate. - -## Future requirements - -- **ROUTE-84-01** / **02** / **03** — completed in **Phase 84** (**2026-04-25**); see `.planning/phases/84-routing-honesty-reconciliation/84-VERIFICATION.md`. -- **Phase 45 T2** promotions (**052–056**, **058**, **063**) — only if a later milestone selects them with owner + reopen trigger (**EX-45-***). -- **SEED-001** human GA matrix — launch lane milestone, not **v1.19**. - -## Out of scope - -- Re-auditing **Phase 45** merge gate **`mix ci.audit_45`** beyond regression needed for **JWT** path edits. -- **`sigra_lockspire`** / ADR **001** glue package. -- **999.x** Nyquist archaeology. - -## Traceability - -| REQ-ID | Phase | -|-----------|-------| -| AUD-19-01 | 82 | -| AUD-19-02 | 82 | -| AUD-19-03 | 82 | -| AUD-19-04 | 82 | -| AUD-20-01 | 83 | -| AUD-20-02 | 83 | -| AUD-20-03 | 83 | -| ROUTE-84-01 | 84 | -| ROUTE-84-02 | 84 | -| ROUTE-84-03 | 84 | diff --git a/.planning/RETROSPECTIVE.md b/.planning/RETROSPECTIVE.md index 63d64b04..a397857e 100644 --- a/.planning/RETROSPECTIVE.md +++ b/.planning/RETROSPECTIVE.md @@ -1,6 +1,53 @@ # Project Retrospective -*Living document updated at milestone boundaries. v1.17 section added at milestone close (2026-04-24).* +*Living document updated at milestone boundaries. v1.21 section added at milestone close (2026-05-06).* + +## Milestone: v1.21 — B2B-ready & production-honest + +**Shipped:** 2026-05-06 +**Phases:** 6 (91–96) | **Plans (on-disk):** 33 summaries across 26 PLAN.md files | **Timeline:** 2026-04-28 → 2026-05-06 (8 days) + +### What was built + +- **B2B trust leg** — Phase 91 org-level MFA enforcement (B2B-01) with `Sigra.Plug.RequireOrgMfa` + atomic `organization.mfa_policy_change` audit row; Phase 92 RBAC seams (B2B-02) shipping `Sigra.Authz` behaviour + nullable role on memberships + scope-struct `:role` propagation + role-based-access-control recipe (zero opinionated roles in `lib/sigra/`); Phase 93 M2M service-account tokens (B2B-03) with `client_credentials` grant on existing JWT path + `current_scope.actor_type: :service_account` discriminator + 5 SA-mutation rollback proofs. +- **Production hardening leg** — Phase 94 Postgres-only declaration (HARD-01) refusing non-Postgres at `mix sigra.install` pre-flight; Phase 95 optional-dep boot validation (HARD-02) via `Sigra.OptionalDeps` SOT + `mix sigra.doctor` + 3 dep-off CI lanes. +- **OAuth + API polish leg** — Phase 96 OAuth refresh dispatch (HARD-03) for GitHub/Apple/Facebook/Generic via Assent + atomic `oauth.token_refreshed` audit, plus rate-limit headers (API-01) emitting `X-RateLimit-Limit/Remaining/Reset` + `Retry-After` from Hammer state in single-pass plug — 122 passing tests across 4 evidence sections. + +### What worked + +- **Three legs in parallel** — B2B trust, production hardening, OAuth+API polish each had clean dependency boundaries; only B2B-03 → B2B-02 (`:actor_type` reservation) imposed ordering. Allowed concurrent execution and independent verification. +- **Milestone audit before close** — Surfaced strict-3-source bookkeeping gaps (Phases 94/96 verifications missing YAML frontmatter; SUMMARYs missing `requirements-completed:`) before they became archaeology debt. Substantive code was already 7/7; the audit/reconcile/close path took ~30 min of mechanical edits. +- **Phase 95 discipline** — Only v1.21 phase with `nyquist_compliant: true` and full VALIDATION.md. The dep-off CI lanes pattern is reusable for future optional-integration work. +- **Re-verification cycle on Phase 93** — Initial 5 plans → 5 gaps → gap-closure plans 06–10 → 3 new blockers → critical fixes in commit `bf5a8a8` → final 22/22. Catching the new blockers (Postgrex struct match leak, `nil and x` BadBooleanError) at re-verification rather than after release saved a hot patch. + +### What was inefficient + +- **`gsd-sdk` binary broken** — Continued from v1.20. Forced manual `milestone.complete` again; SDK CLI returned "Expected gsd-sdk run/auto/init" instead of `query` subcommand. Pre-close audit, milestone archival, and progress queries all done by hand. Same workaround pattern as v1.12–v1.20. +- **STATE.md drift** — Last STATE.md update was 2026-05-02; it still pointed at "Phase 93 verification" four days after Phase 93 was re-verified clean and Phases 94/95/96 had completed. The 2026-05-06 milestone audit had to reconcile the staleness explicitly. Lesson: STATE.md should be touched at every phase verification, not just at session pause. +- **Phase 94 "environmental caveat"** — Original 94-VERIFICATION recorded Oban.Worker compile failures as an Elixir 1.19.5 environmental issue and shipped without confirming whether they reproduced after Phase 94's own work landed. The 2026-05-06 audit verified the caveat no longer reproduces — but it sat as ambiguous tech debt for a week. Lesson: env caveats need an explicit "verified still applies on phase close SHA" line before they're allowed in VERIFICATION. + +### Patterns established + +- **`requirements-completed:` frontmatter on every SUMMARY.md** — promoted from convention to requirement after the milestone audit surfaced 3 phases worth of missing entries. +- **YAML frontmatter on every VERIFICATION.md** — same. Phase 94/96 had pure-prose VERIFICATIONs that strict 3-source matrix flagged as `unsatisfied` despite having clear passing-test evidence in the body. +- **Single milestone audit before close, not just at gaps** — even when artifacts look clean, a strict 3-source pass catches frontmatter drift cheaply. +- **Cross-phase wiring readout in audit** — Skipping the `gsd-integration-checker` subagent and doing a focused readout from existing VERIFICATIONs saved a subagent run; the wires are already documented with code pointers. + +### Key lessons + +1. **Strict 3-source matrix > prose verification** — A VERIFICATION.md with passing tests but no YAML status is half-finished. Fix this in the discuss-phase template or plan-phase output, not at milestone close. +2. **Refresh STATE.md at every verification, not at every pause** — staleness compounds and creates false signal at audit time. +3. **Env caveats need an expiry stamp** — "exists on pristine main" claims should be re-verified at phase close SHA, with the date/SHA recorded, or they become cargo cult. +4. **Re-verification cycles are good** — finding `Postgrex.Error` struct-match leak and `nil and x` LV bug at re-verification (not after merge) was the correct disposition. Worth keeping the pattern. +5. **One-shot bookkeeping reconciliation > insert-a-closure-phase** — for milestone-close clean-up work that's purely mechanical (frontmatter cleanup, traceability sync), inline reconciliation in a single commit is faster and clearer than a closure phase chain. + +### Cost observations + +- Model mix: n/a (not instrumented) +- Sessions: many (parallel work across 3 legs over 8 days) +- Notable: 33 plan summaries across 6 phases is the highest plan-density milestone since v1.0 (which had 60 plans across 12 phases). Re-verification on Phase 93 added 5 plans + 2 cycles of fix work; that should be planned for in advance for any milestone that gates on a re-verifiable observable contract. + +--- ## Milestone: v1.17 — Forced password change audit atomicity diff --git a/.planning/ROADMAP.md b/.planning/ROADMAP.md index dc2529ca..369925f3 100644 --- a/.planning/ROADMAP.md +++ b/.planning/ROADMAP.md @@ -2,10 +2,10 @@ ## Milestones -- ✅ **v1.0 Phoenix Auth Library - Initial Release** - Phases 1-10 + 10.1 + 10.1.1 (shipped 2026-04-11). See [v1.0 archive](milestones/v1.0-ROADMAP.md) and [MILESTONES.md](MILESTONES.md). -- ✅ **v1.1 Foundations** - Phases 11-23 (shipped 2026-04-16). See [v1.1 archive](milestones/v1.1-ROADMAP.md), [v1.1 requirements](milestones/v1.1-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). -- ✅ **Post-v1.1 Closeout** - Phases 24-26 (completed 2026-04-16). -- ✅ **v1.2 Admin Dashboard** - Phases 27-31 + gap closure 32-35 (shipped 2026-04-17). See [v1.2 archive](milestones/v1.2-ROADMAP.md), [v1.2 requirements](milestones/v1.2-REQUIREMENTS.md), [v1.2 milestone audit](milestones/v1.2-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.0 Phoenix Auth Library — Initial Release** — Phases 1-10 + 10.1 + 10.1.1 (shipped 2026-04-11). See [v1.0 archive](milestones/v1.0-ROADMAP.md), [v1.0 requirements](milestones/v1.0-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.1 Foundations** — Phases 11-23 (shipped 2026-04-16). See [v1.1 archive](milestones/v1.1-ROADMAP.md), [v1.1 requirements](milestones/v1.1-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **Post-v1.1 Closeout** — Phases 24-26 (completed 2026-04-16). +- ✅ **v1.2 Admin Dashboard** — Phases 27-31 + gap closure 32-35 (shipped 2026-04-17). See [v1.2 archive](milestones/v1.2-ROADMAP.md), [v1.2 requirements](milestones/v1.2-REQUIREMENTS.md), [v1.2 milestone audit](milestones/v1.2-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). - ✅ **v1.3 Cleanup & Hardening** — Phases 36-40 (shipped 2026-04-19). See [v1.3 archive](milestones/v1.3-ROADMAP.md), [v1.3 requirements](milestones/v1.3-REQUIREMENTS.md), [v1.3 milestone audit](milestones/v1.3-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). - ✅ **v1.4 GA readiness & audit trail completeness** — Phases **41–52** (shipped **2026-04-22**). See [v1.4 archive](milestones/v1.4-ROADMAP.md), [v1.4 requirements](milestones/v1.4-REQUIREMENTS.md), [v1.4 milestone audit](milestones/v1.4-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). - ✅ **v1.5 Public release narrative & community readiness** — Phases **53–56** (shipped **2026-04-22**). See [v1.5 archive](milestones/v1.5-ROADMAP.md), [v1.5 requirements](milestones/v1.5-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). @@ -15,198 +15,36 @@ - ✅ **v1.9 Audit atomicity (bounded SEED-002)** — Phases **66–67** (shipped **2026-04-23**). See [v1.9 archive](milestones/v1.9-ROADMAP.md), [v1.9 requirements](milestones/v1.9-REQUIREMENTS.md), [v1.9 milestone audit](milestones/v1.9-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). - ✅ **v1.10 Adopter confidence for solo production** — Phases **68–70** (shipped **2026-04-23**). See [v1.10 archive](milestones/v1.10-ROADMAP.md), [v1.10 requirements](milestones/v1.10-REQUIREMENTS.md), [v1.10 milestone audit](milestones/v1.10-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). - ✅ **v1.11 Adoption stabilization** — Phases **71–72** (shipped **2026-04-23**). See [v1.11 archive](milestones/v1.11-ROADMAP.md), [v1.11 requirements](milestones/v1.11-REQUIREMENTS.md), and triage [v1.11-TRIAGE.md](v1.11-TRIAGE.md). -- ✅ **v1.12 Trust, evidence, and adoption polish** — Phases **73–75** (shipped **2026-04-24**). See [v1.12 archive](milestones/v1.12-ROADMAP.md), [v1.12 requirements](milestones/v1.12-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). Bounded **SEED-002** batch + **SEED-001** evidence index + triage-driven doc polish. -- ✅ **v1.13 Post–v1.12 operational cadence** — Phase **76** (shipped **2026-04-24**). See [v1.13 archive](milestones/v1.13-ROADMAP.md), [v1.13 requirements](milestones/v1.13-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). Planning-only cadence lock-in (**CAD-01**..**CAD-03**). -- ✅ **v1.14 Bounded audit trust closure** — Phase **77** (shipped **2026-04-24**). See [v1.14 archive](milestones/v1.14-ROADMAP.md), [v1.14 requirements](milestones/v1.14-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). **SEED-002** slice — **AUD-04-033** / **034** (**AUD-13**). -- ✅ **v1.15 Account + API C-1 planning truth** — Phase **78** (shipped **2026-04-24**). See [v1.15 archive](milestones/v1.15-ROADMAP.md), [v1.15 requirements](milestones/v1.15-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). **AUD-14**..**AUD-14-05**. +- ✅ **v1.12 Trust, evidence, and adoption polish** — Phases **73–75** (shipped **2026-04-24**). See [v1.12 archive](milestones/v1.12-ROADMAP.md), [v1.12 requirements](milestones/v1.12-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.13 Post–v1.12 operational cadence** — Phase **76** (shipped **2026-04-24**). See [v1.13 archive](milestones/v1.13-ROADMAP.md), [v1.13 requirements](milestones/v1.13-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.14 Bounded audit trust closure** — Phase **77** (shipped **2026-04-24**). See [v1.14 archive](milestones/v1.14-ROADMAP.md), [v1.14 requirements](milestones/v1.14-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.15 Account + API C-1 planning truth** — Phase **78** (shipped **2026-04-24**). See [v1.15 archive](milestones/v1.15-ROADMAP.md), [v1.15 requirements](milestones/v1.15-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). - ✅ **v1.16 API verify failure audit atomicity** — Phase **79** (shipped **2026-04-24**). See [v1.16 archive](milestones/v1.16-ROADMAP.md), [v1.16 requirements](milestones/v1.16-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). -- ✅ **v1.17 Forced password change audit atomicity (SEED-002 / AUD-04-043)** — Phase **80** (shipped **2026-04-24**). See [v1.17 requirements](milestones/v1.17-REQUIREMENTS.md), [milestone archive](milestones/v1.17-ROADMAP.md), and [MILESTONES.md](MILESTONES.md). -- ✅ **v1.18 JWT refresh / reuse audit atomicity (SEED-002 / AUD-04-048..049)** — Phase **81** (shipped **2026-04-24**). [MILESTONES.md](MILESTONES.md); verification [`.planning/phases/81-jwt-refresh-audit-atomicity/81-VERIFICATION.md`](phases/81-jwt-refresh-audit-atomicity/81-VERIFICATION.md). -- **v1.19 JWT refresh persistence + audit co-fate & MFA enrollment failure** — Phases **82–83** (opened **2026-04-24**). Live [REQUIREMENTS.md](REQUIREMENTS.md); phases **82** then **83** below. -- ✅ **Post-v1.19 routing honesty follow-up** — Phase **84** (completed **2026-04-25**). Removed stale executable pointers to superseded **`999.1`** and kept future validation work on newly numbered phases only. - -## Phases - -### v1.19 — shipped **2026-04-24** (Phases **82–83**) - -**Coverage:** 7 requirements → 2 phases (**AUD-19** + **AUD-20**). Numbering continues from **v1.18** (last phase **81**). - -
-✅ v1.19 MFA **`AUD-04-022`** closure (Phase **83**) — SHIPPED **2026-04-24** - -| Phase | Name | Goal | Requirements | Success criteria (observable) | -|-------|------|------|--------------|----------------------------| -| **83** | MFA **`AUD-04-022`** closure | Promote invalid pre-DB TOTP on **`confirm_enrollment/5`** to **`commit_ad_hoc_mfa_audit/5`** when `:audit_schema` is set. | AUD-20-01, AUD-20-02, AUD-20-03 | (1) **`lib/sigra/mfa.ex`** uses transactional **`log_multi_safe`** for **`mfa.enroll.failure`**. (2) **`mfa_audit_atomicity_test.exs`** invalid-code matrix. (3) **44** / **09** / **09-03** / **`CHANGELOG`** + **`83-VERIFICATION.md`**. | - -**At a glance:** **`Sigra.MFA.confirm_enrollment/5`**; **`test/sigra/mfa_audit_atomicity_test.exs`**; **44-AUD-04-INVENTORY** row **022** + **EX-44-02** appendix; **09-VERIFICATION** C-1 **022** → **T1**. Verification: [`.planning/phases/83-mfa-confirm-enrollment-022/83-VERIFICATION.md`](phases/83-mfa-confirm-enrollment-022/83-VERIFICATION.md). - -
- -
-✅ v1.19 JWT refresh persistence + audit co-fate (Phase 82) — SHIPPED 2026-04-24 - -| Phase | Name | Goal | Requirements | Success criteria (observable) | -|-------|------|------|--------------|----------------------------| -| **82** | JWT refresh persistence + audit co-fate | Single transactional boundary for **`user_tokens`** rotation / reuse revocation and **`api.jwt_refresh`** / **`api.jwt_refresh_reuse`** when audit is on. | AUD-19-01, AUD-19-02, AUD-19-03, AUD-19-04 | (1) **`Sigra.JWT.refresh/3`** does not commit refresh-token DB effects unless audit succeeds when `:audit_schema` set. (2) Reuse path matches same discipline. (3) **`jwt_refresh_audit_cofate_test.exs`** proves rollback / audit-off. (4) **09** / **44** / **45** / **09-03** / **`CHANGELOG`** + **`82-VERIFICATION.md`**. | - -**At a glance:** **`Sigra.JWT.refresh/3`** + **`append_api_token_jwt_audit_to_multi`**; **`test/sigra/jwt_refresh_audit_cofate_test.exs`**; **44** / **45** / **09** / **`CHANGELOG` [Unreleased]**; **AUD-08** for guided **`JWT.refresh`**. Verification: [`.planning/phases/82-jwt-refresh-persistence-audit-cofate/82-VERIFICATION.md`](phases/82-jwt-refresh-persistence-audit-cofate/82-VERIFICATION.md) (**merge gate pending** until Postgres test run). - -
- -
-✅ v1.18 JWT refresh / reuse audit atomicity (Phase 81) — SHIPPED 2026-04-24 - -| Phase | Name | Goal | Requirements | Success criteria (observable) | -|-------|------|------|--------------|----------------------------| -| **81** | JWT refresh audit atomicity | Replace hybrid **`log_safe/3`** on **`audit_jwt_refresh/2`** and **`audit_jwt_refresh_reuse/2`** with transactional **`log_multi_safe`** when audit is on; align **44**/**45**/**09**/**CHANGELOG**. | AUD-18-01, AUD-18-02, AUD-18-03, AUD-18-04 | (1) Both helpers use **`Repo.transaction/1`** + audit-only **`Multi` + `log_multi_safe`** when `:audit_schema` set. (2) **`api_token_audit_atomic_test.exs`** covers success, audit-off, and fault injection. (3) **44**/**45** inventories + **09-VERIFICATION** rows **048–049** + **09-03-SUMMARY** + **`CHANGELOG` [Unreleased]** match **`lib/sigra/api_token.ex`**. (4) **`81-VERIFICATION.md`** records merge gate outcome. | - -**At a glance:** **81** — **`commit_api_token_jwt_audit/3`**; **`api_token_audit_atomic_test.exs`** JWT rows; **44** / **45** / **09** / **`CHANGELOG` [Unreleased]**; **JWT persistence + audit co-fate** → **v1.19** / **Phase 82**. Verification: [`.planning/phases/81-jwt-refresh-audit-atomicity/81-VERIFICATION.md`](phases/81-jwt-refresh-audit-atomicity/81-VERIFICATION.md). - -**Coverage:** 4 requirements → 1 phase. - -
- -
-✅ v1.17 Forced password change audit atomicity (Phase 80) — SHIPPED 2026-04-24 - -Full phase table, goals, and success criteria are archived in [`milestones/v1.17-ROADMAP.md`](milestones/v1.17-ROADMAP.md). - -**At a glance:** **80** — **`Sigra.Account.clear_password_change_requirement/3`** **`Multi` + `log_multi_safe`** for **AUD-04-043**; **`account_audit_atomicity_test.exs`**; **44** / **09** / **09-03-SUMMARY** / **`CHANGELOG` [Unreleased]**; **EX-44-05** closed (**AUD-17**). Verification: [`.planning/phases/80-forced-password-change-audit/80-VERIFICATION.md`](phases/80-forced-password-change-audit/80-VERIFICATION.md). - -
- -
-✅ v1.16 API verify failure audit atomicity (Phase 79) — SHIPPED 2026-04-24 - -Full phase table, goals, and success criteria are archived in [`milestones/v1.16-ROADMAP.md`](milestones/v1.16-ROADMAP.md). - -**At a glance:** **79** — **`Sigra.APIToken.verify/2`** failure **`api.token_verify.failure`** **`Multi` + `log_multi_safe`** (**AUD-04-044..046**); **`api_token_audit_atomic_test.exs`**; **44** / **09** / **09-03-SUMMARY** / **`CHANGELOG` [Unreleased]**; **D-27** preserved. Verification: [`.planning/phases/79-api-token-verify-failure-audit/79-VERIFICATION.md`](phases/79-api-token-verify-failure-audit/79-VERIFICATION.md). - -
- -
-✅ v1.15 Account + API C-1 planning truth (Phase 78) — SHIPPED 2026-04-24 - -Full phase table, goals, and success criteria are archived in [`milestones/v1.15-ROADMAP.md`](milestones/v1.15-ROADMAP.md). - -**At a glance:** **78** — **44** + **09** C-1 planning truth for **AUD-04-035..042**, **047** vs **`lib/sigra/account.ex`** / **`lib/sigra/api_token.ex`**; **`09-03-SUMMARY`** + **`CHANGELOG` [Unreleased]**; **`account_audit_atomicity_test.exs`** **`change_password`**. Verification: [`.planning/phases/78-account-api-c1-planning-truth/78-VERIFICATION.md`](phases/78-account-api-c1-planning-truth/78-VERIFICATION.md). - -
- -
-✅ v1.14 Bounded audit trust closure (Phase 77) — SHIPPED 2026-04-24 - -Full phase table, goals, and success criteria are archived in [`milestones/v1.14-ROADMAP.md`](milestones/v1.14-ROADMAP.md). - -**At a glance:** **77** — **`audit_backup_codes_regenerate/3`** / **`audit_trust_browser/2`** → **`commit_ad_hoc_mfa_audit/5`** (**`Multi` + `log_multi_safe`**); **`mfa_audit_atomicity_test.exs`**; **09** / **44** / **CHANGELOG** truth (**AUD-13-01**..**AUD-13-04**). Verification: [`.planning/phases/77-mfa-adhoc-audit-multi/77-VERIFICATION.md`](phases/77-mfa-adhoc-audit-multi/77-VERIFICATION.md). - -
- -
-✅ v1.13 Post–v1.12 operational cadence (Phase 76) — SHIPPED 2026-04-24 - -Full phase table, goals, and success criteria are archived in [`milestones/v1.13-ROADMAP.md`](milestones/v1.13-ROADMAP.md). - -**At a glance:** **76** — **PROJECT** / **STATE** / **ROADMAP** / **REQUIREMENTS** + **76-VERIFICATION.md** record default Hex patch cadence and trust-signal event lanes (**CAD-01**..**CAD-03**). - -
- -
-✅ v1.12 Trust, evidence, and adoption polish (Phases 73–75) — SHIPPED 2026-04-24 - -Full phase table, goals, and success criteria are archived in [`milestones/v1.12-ROADMAP.md`](milestones/v1.12-ROADMAP.md). - -**At a glance:** **73** bounded **C-1** **`Multi`** + **`log_multi_safe`** + **`mfa_audit_atomicity_test.exs`** (**AUD-11**); **74** **09-03-SUMMARY** + **v1.12-UAT-EVIDENCE** + **`docs/uat-ci-coverage.md`** (**AUD-12**, **UAT-01**, **UAT-02**); **75** **`upgrading-to-v1.12.md`** + trust-bundle surfacing + **`v1.11-TRIAGE.md`** reconciliation (**TRN-01**..**TRN-03**). - -
- -
-✅ v1.11 Adoption stabilization (Phases 71–72) — SHIPPED 2026-04-23 - -| Phase | Name | Goal | Requirements | Success criteria (observable) | -|-------|------|------|----------------|----------------------------| -| **71** | Triage + maintainer pause guidance | Record adoption signals; document when to pause GSD milestones. | STAB-01, STAB-03 | (1) **`.planning/v1.11-TRIAGE.md`** is complete and linked from **`upgrading-to-v1.11.md`**. (2) **`MAINTAINING.md`** contains **Milestone cadence and pause** with pause/resume criteria. | -| **72** | Upgrade stub + intro cross-links | Planning **v1.11** vs Hex is legible; intro docs surface upgrade pages. | STAB-02, STAB-04 | (1) **`guides/introduction/upgrading-to-v1.11.md`** ships and appears in **`mix.exs`** ExDoc extras after **v1.10** upgrade page. (2) **getting-started** faster path lists **v1.10** and **v1.11** upgrade links; **upgrading-to-v1.10** See also links **v1.11**. | - -**Coverage:** 4 requirements → 2 phases. Phase numbering continues from **v1.10** (last phase **70**). - -
- -
-✅ v1.10 Adopter confidence for solo production (Phases 68–70) — SHIPPED 2026-04-23 - -Full phase table, goals, and success criteria are archived in [`milestones/v1.10-ROADMAP.md`](milestones/v1.10-ROADMAP.md). - -**At a glance:** **68** deployment + mail confidence hub (**`068-VERIFICATION.md`**, **ACF-01** / **ACF-04**); **69** intermediate path + **`generator-options`** index (**`069-VERIFICATION.md`**, **ACF-02** / **ACF-03**); **70** **`upgrading-to-v1.10.md`** + non-goal attestation (**`070-VERIFICATION.md`**, **ACF-05** / **ACF-06**). - -
- -
-✅ v1.9 Audit atomicity (Phases 66–67) — SHIPPED 2026-04-23 - -Full phase table, goals, and success criteria are archived in [`milestones/v1.9-ROADMAP.md`](milestones/v1.9-ROADMAP.md). - -**At a glance:** **66** **`confirm_enrollment/5`** **AUD-04-020..021** **`Multi`** + **`mfa_audit_atomicity_test.exs`** (**AUD-09**); **67** **`09-03-SUMMARY.md`** + **D-06** attestation (**AUD-10**). - -
- -
-✅ v1.8 Adopter polish (Phases 63–65) — SHIPPED 2026-04-23 - -Full phase table, goals, and success criteria are archived in [`milestones/v1.8-ROADMAP.md`](milestones/v1.8-ROADMAP.md). - -**At a glance:** **63** **`upgrading-to-v1.8.md`** + ExDoc extras + SemVer framing (**ADOPT-04**); **64** cross-links among getting-started, first-hour, troubleshooting, and upgrade paths (**ADOPT-05**); **65** companion recipe prerequisite / when-not-to-use / See also polish (**INTG-02**). - -
- -
-✅ v1.7 Adoption readiness & audit durability (Phases 60–62) — SHIPPED 2026-04-23 - -Full phase table, success criteria, and Phase **60** directory note are archived in [`milestones/v1.7-ROADMAP.md`](milestones/v1.7-ROADMAP.md). - -**At a glance:** **60** adoption + companion recipe (**ADOPT-01..03**, **INTG-01**); **61** bounded **SEED-002** batch — `verify_backup/4` failure **`Multi`** + **`mfa_audit_atomicity_test.exs`** + **AUD-04-067** (**AUD-01**); **62** **`09-03-SUMMARY.md`** + **AUD-02** closure. - -
- -
-✅ v1.4 GA readiness & audit trail completeness (Phases 41–52) — SHIPPED 2026-04-22 - -The live phase table, success criteria, and the **44/45 vs 47–49** reader note are archived in [`milestones/v1.4-ROADMAP.md`](milestones/v1.4-ROADMAP.md). - -At a glance: **41** backup-code rotation (**GA-01**); **42** GA matrix scaffold; **43–45** audit inventory + Auth / MFA–Account–API / OAuth–ops batches (**AUD-04..AUD-08** implementation); **46** GA matrix gap closure (**GA-02..GA-05**); **47–49** formal `*-VERIFICATION.md` gates + requirements reconciliation; **50** Nyquist policy + **`mix ci.install_golden`** / **`install_golden_contract`**; **51** CI path coupling for installer golden; **52** roadmap and milestone-honesty contract tests. - -
- -
-✅ v1.5 Public release narrative & community readiness (Phases 53–56) — SHIPPED 2026-04-22 - -Full phase table, goals, and canonical refs are archived in [`milestones/v1.5-ROADMAP.md`](milestones/v1.5-ROADMAP.md). - -At a glance: **53** Hex / `mix.exs` metadata (**PUB-01**); **54** `CHANGELOG.md` milestone anchors (**PUB-02**); **55** README + ExDoc GA entry paths (**DOC-01**, **DOC-02**); **56** maintainer **First public launch** checklist in `MAINTAINING.md` (**MAINT-01**). - -
- -
-✅ v1.6 Nyquist closure + OAuth audit depth (Phases 57–59) — SHIPPED 2026-04-23 - -Full phase table, goals, success criteria, and reader note are archived in [`milestones/v1.6-ROADMAP.md`](milestones/v1.6-ROADMAP.md). - -**At a glance:** **57** canonical **41–44** posture matrix + contract test (**NYQ-01**, **NYQ-02**); **58** **`Sigra.OAuthCeremonyAuditTest`** + CI coupling contract (**OA-01**); **59** **OA-02** alignment across **`docs/uat-ci-coverage.md`**, **GA-03** / waiver / evidence **INDEX**, and **`docs/ga-evidence.md`**. - -**Reader note:** Phases **41–44** shipped under v1.4; v1.6 makes **Nyquist posture** and **OAuth↔audit machine proof** legible — honest disposition is mandatory. - -
- -### Post-v1.19 follow-up — completed (Phase **84**) - -| Phase | Name | Goal | Requirements | Success criteria (observable) | -|-------|------|------|--------------|----------------------------| -| **84** | Routing honesty reconciliation | Align **`STATE.md`**, **`ROADMAP.md`**, and related planning surfaces so no active workflow points at superseded **`999.1`**; preserve **`999.1`** as archaeology-only and route any future Nyquist work to newly numbered phases. | ROUTE-84-01, ROUTE-84-02, ROUTE-84-03 | Complete — **`STATE.md`** no longer marks **`999.1`** as next/current/planned, live planning hubs describe **`999.1`** / **`999.2`** as archaeology-only, and **Phase 84** verification artifacts document the routing rule. | - -**At a glance:** planning-surface honesty only — no Sigra runtime/library code changes; canonical supersession remains [`.planning/phases/999.1-nyquist-retroactive-validation-pass/999.1-CONTEXT.md`](phases/999.1-nyquist-retroactive-validation-pass/999.1-CONTEXT.md) and Phase **36** evidence. Verification: [`.planning/phases/84-routing-honesty-reconciliation/84-VERIFICATION.md`](phases/84-routing-honesty-reconciliation/84-VERIFICATION.md). +- ✅ **v1.17 Forced password change audit atomicity** — Phase **80** (shipped **2026-04-24**). See [v1.17 archive](milestones/v1.17-ROADMAP.md), [v1.17 requirements](milestones/v1.17-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.18 JWT refresh / reuse audit atomicity** — Phase **81** (shipped **2026-04-24**). See [MILESTONES.md](MILESTONES.md); verification [`.planning/phases/81-jwt-refresh-audit-atomicity/81-VERIFICATION.md`](phases/81-jwt-refresh-audit-atomicity/81-VERIFICATION.md). +- ✅ **v1.19 JWT refresh persistence + audit co-fate & MFA enrollment failure** — Phases **82–83** (shipped **2026-04-24**). See [MILESTONES.md](MILESTONES.md). +- ✅ **Post-v1.19 routing honesty follow-up** — Phase **84** (completed **2026-04-25**). +- ✅ **v1.20 GA Launch — SEED closure + public release** — Phases **85–90** (shipped **2026-04-28**). See [v1.20 archive](milestones/v1.20-ROADMAP.md), [v1.20 requirements](milestones/v1.20-REQUIREMENTS.md), [v1.20 milestone audit](milestones/v1.20-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.21 B2B-ready & production-honest** — Phases **91–96** (shipped **2026-05-06**). See [v1.21 archive](milestones/v1.21-ROADMAP.md), [v1.21 requirements](milestones/v1.21-REQUIREMENTS.md), [v1.21 milestone audit](milestones/v1.21-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.22 Webhooks / outbound event pipeline** — Phases **97–102** (shipped **2026-05-06**). See [v1.22 archive](milestones/v1.22-ROADMAP.md), [v1.22 requirements](milestones/v1.22-REQUIREMENTS.md), [v1.22 milestone audit](milestones/v1.22-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.23 Webhook operator trust & controls** — Phases **103–107** (shipped **2026-05-08**). See [v1.23 archive](milestones/v1.23-ROADMAP.md), [v1.23 requirements](milestones/v1.23-REQUIREMENTS.md), [v1.23 milestone audit](milestones/v1.23-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.24 Session Control Plane** — Phases **108–110** (shipped **2026-05-08**). See [v1.24 archive](milestones/v1.24-ROADMAP.md), [v1.24 requirements](milestones/v1.24-REQUIREMENTS.md), [v1.24 milestone audit](milestones/v1.24-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). ## Backlog (parking lot — not in the active roadmap until promoted) - **999.1** / **999.2** — historical parking-lot labels; shipped in v1.3 — keep directories under `.planning/phases/` as archaeology only. Do not plan new work under **999.x**; use newly numbered phases. -- **SEED-002** — broad `log_safe/3` → `Ecto.Multi` conversion; trigger when audit-aware refactors are scheduled or compliance demands it. -- Items not mapped in archived requirements stay here until a future milestone selects them. +- **`sigra_lockspire` glue package per ADR 001** — still awaiting companion-app trigger; explicitly out of scope for v1.23. +- **`REL-01` release truth reset** — completed between milestones; reconciled version/release truth across package metadata, changelog framing, and maintainer-facing release docs. +- **`EMAIL-RAILS` email reliability + override rails** — ranked feature candidate #1; focus on override seams, previews, diagnostics, and provider-agnostic delivery posture. +- **`PK-LIFECYCLE` passkey lifecycle completion** — ranked feature candidate #2; recovery, last-passkey safety, and cross-device trust matter more than already-shipped passkey CRUD. +- **`DATA-LIFECYCLE` compliance export + data lifecycle** — ranked feature candidate #3; extend existing export and anonymize seams after more universal adoption blockers are closed. +- **Built-in opinionated roles** — RBAC stays seams-only per Phase **92**. +- **MySQL / SQLite adapters** — explicitly removed via Phase **94**; re-evaluate only if an adopter signals concrete demand and is willing to own the adapter. +- **Phase 999.x archaeology** — pure planning hygiene; tombstone-only. +- **Items not mapped in archived requirements** — stay here until a future milestone selects them into a new `REQUIREMENTS.md`. + +## Arc Notes + +- Treat [`.planning/MILESTONE-ARC.md`](MILESTONE-ARC.md) as the ranking source for the next several milestones. +- Do not treat `SESS-01` or `PK-01` as fresh greenfield gaps: session/device labeling and passkey list/rename/remove are already substantially shipped. +- Prefer milestones that improve production trust, integration clarity, or DX on rough edges over generic admin expansion or hosted-control-plane imitation. diff --git a/.planning/STATE.md b/.planning/STATE.md index 8c820eba..7a656935 100644 --- a/.planning/STATE.md +++ b/.planning/STATE.md @@ -1,16 +1,16 @@ --- gsd_state_version: 1.0 -milestone: v1.19 -milestone_name: — shipped **2026-04-24** -status: ready_to_plan -last_updated: "2026-04-25T17:09:42.837Z" -last_activity: 2026-04-25 -- Phase 84 routing honesty reconciliation complete +milestone: v1.24 +milestone_name: Session Control Plane (shipped) +status: "v1.24 is archived in planning truth: Phases 108-109 implemented the milestone, Phase 110 authoritatively verified/reconciled it, and the next milestone selection is EMAIL-RAILS unless the user pivots" +last_updated: "2026-05-08T16:05:00Z" +last_activity: 2026-05-08 progress: - total_phases: 72 - completed_phases: 65 - total_plans: 194 - completed_plans: 200 - percent: 90 + total_phases: 3 + completed_phases: 3 + total_plans: 9 + completed_plans: 9 + percent: 100 --- # Project State @@ -23,44 +23,56 @@ See: `.planning/PROJECT.md` **North star (milestones):** Prefer work that moves **North Star (milestones)** in `.planning/PROJECT.md` — production trust, integration path, DX. -**Current focus:** **v1.19** shipped; **Phase 84** routing honesty reconciliation is complete. No active **999.x** work is planned. +**Current focus:** v1.24 milestone archived in planning truth; next milestone selection and fresh requirements are pending. +**Arc source:** [`.planning/MILESTONE-ARC.md`](MILESTONE-ARC.md) now promotes `EMAIL-RAILS` as the default next milestone candidate. ## Current Position -Milestone: **v1.19** — **shipped** (**Phases 82–83**, **2026-04-24**) +Milestone: **v1.24 — Session Control Plane (shipped)** -Phase: 84 (routing-honesty-reconciliation) — COMPLETE +Phase: **110 — Session control plane verification closeout** +Plan: **Archived** +Status: `SESS-CTRL` is shipped and archived in the active planning surface: Phase 108 shipped preserve-current revoke plus the first session-truth slice, Phase 109 shipped recent security activity plus the remaining truth alignment, and Phase 110 reconciled the active proof surface before archival. -Plan: 1 of 1 +Last activity: 2026-05-08 — archived the verified v1.24 milestone, copied the audit into `.planning/milestones/`, removed the active `REQUIREMENTS.md`, and promoted `EMAIL-RAILS` to the default next milestone candidate. -Status: Ready to plan a later newly numbered phase or milestone +Carried-forward context (non-blocking): +- DEF-92-02-01 pre-existing audit Multi step-name collision (predates Phase 92) +- Nyquist coverage thin: 91/92/93 draft VALIDATION.md; 94/96 missing VALIDATION.md (Phase 95 only one with `nyquist_compliant: true`) -Last activity: 2026-04-25 -- `84-01-SUMMARY.md` and `84-VERIFICATION.md` recorded +### Quick Tasks Completed -## Performance Metrics +| # | Description | Date | Commit | Directory | +|---|-------------|------|--------|-----------| +| 260502-lzl | fix PR #37 CI red — 6 mechanical drift fixes (4 commits + 2 documented no-ops; 2357/2358 local pass) | 2026-05-02 | 80ecae7 | [260502-lzl-fix-pr-37-ci-red-6-mechanical-drift-fixe](./quick/260502-lzl-fix-pr-37-ci-red-6-mechanical-drift-fixe/) | +| 260502-oc7 | fix PR #37 CI groups B + D — Oban-off worker contract + OAuth Assent leak + admin policy arity (4 commits incl. fixture rebless; 2357/2358 local pass) | 2026-05-02 | 022b35b | [260502-oc7-fix-pr-37-ci-groups-b-d-oban-off-worker-](./quick/260502-oc7-fix-pr-37-ci-groups-b-d-oban-off-worker-/) | +| (manual) | PR #37 CI Group C partial — CLOAK_KEY env added to example_unit_smoke + example_http_smoke + example_playwright_smoke (3 of 6 jobs in Group C; the other 3 already had CLOAK_KEY and need separate diagnosis). Manual fix because gsd-sdk binary is broken (asdf shim → missing dist/cli.js). | 2026-05-02 | 5f32cfc | (no quick dir — manual) | -_Velocity metrics populate during phase work._ +## Decisions -## Accumulated Context - -**v1.19** — **Phase 83** shipped **AUD-20** — **`Sigra.MFA.confirm_enrollment/5`** invalid TOTP records **`mfa.enroll.failure`** via **`commit_ad_hoc_mfa_audit/5`** (**`Repo.transaction/1`** + **`Multi` + `log_multi_safe`**) when **`:audit_schema`** is set; caller always **`{:error, :invalid_code}`** on crypto failure (**D-83-02**). **Phase 82** shipped **AUD-19** — JWT refresh persistence + audit co-fate (**`Sigra.JWT.refresh/3`**). **Phase 81** standalone **`audit_jwt_refresh*`** helpers unchanged for backward compatibility. - -### Pending Todos - -- Flip **`82-VERIFICATION.md`** checklist after **`mix test test/sigra/jwt_refresh_audit_cofate_test.exs`** passes locally/CI (merge gate hygiene for **AUD-19** evidence). - -### Blockers/Concerns - -_None._ +- Completed v1.21 milestone (2026-05-06) with all seven requirements substantively satisfied and reconciled. +- Closed v1.22 (2026-05-06) with Phases 97-102, including production enqueue repair, operator-state query truth, and generated-host proof evidence under `.planning/uat-evidence/v1.22/generated-host-proof/`. +- Opened v1.23 (2026-05-07) as the next best leverage point after v1.22: operator-trust follow-ons for the outbound webhook surface, not a release-admin or polish-only detour. +- Closed the active `SESS-CTRL` milestone truth on 2026-05-08: Phase 108 implemented `SESS-02` and the first `SESS-04/05` slice, Phase 109 implemented `SESS-03` and the remaining `SESS-04/05` alignment, and Phase 110 authoritatively verified/reconciled those outcomes. +- Archived v1.24 on 2026-05-08: roadmap, requirements, and audit now live under `.planning/milestones/`, and the active planning surface no longer treats session control as an open milestone. +- Phase 106 is the authoritative replay closeout: Phase 104 implemented `WH-05` and Phase 106 verified and reconciled it. +- Phase 107 is the authoritative `WH-06` closeout: Phase 105 implemented the egress-policy contract and Phase 107 verified the operator-truth and evidence chain. +- Phase numbering continues from the shipped webhook milestone; `--reset-phase-numbers` not used. +- v1.22 remains intentionally outbound-only: Sigra emits auth and identity events to downstream systems; inbound provider webhooks remain out of scope. +- Phase 102 supersedes the initial `gaps_found` webhook milestone audit through `.planning/phases/102-generated-host-proof-and-planning-reconciliation/102-VERIFICATION.md`. +- Install-smoke follow-ups from 2026-04-30 were closed on 2026-05-07 as harness maintenance, not milestone requirements. +- The strategic backlog has been corrected: session/device labeling and passkey CRUD are not fresh missing features; `REL-01` and `SESS-CTRL` are complete, and the next ranking is `EMAIL-RAILS` -> `PK-LIFECYCLE` -> `DATA-LIFECYCLE`. ## Session Continuity -**Next:** **`/gsd-new-milestone`** or a later newly numbered phase — do not reopen **999.x** +**Next:** Start the next milestone with a fresh `REQUIREMENTS.md`, defaulting to `EMAIL-RAILS` from [`.planning/MILESTONE-ARC.md`](MILESTONE-ARC.md) unless the user explicitly pivots. Do not reopen `WH-04..06` or `SESS-CTRL` as active implementation work without a new milestone decision. -**Resume file:** None +**Artifacts (active):** `.planning/PROJECT.md`, `.planning/ROADMAP.md`, `.planning/STATE.md`, `.planning/MILESTONE-ARC.md`. -**Artifacts:** `.planning/REQUIREMENTS.md`, `.planning/ROADMAP.md`, **`.planning/phases/84-routing-honesty-reconciliation/84-VERIFICATION.md`**, **`.planning/phases/84-routing-honesty-reconciliation/84-01-SUMMARY.md`**, **`.planning/phases/999.1-nyquist-retroactive-validation-pass/999.1-CONTEXT.md`**, **`.planning/phases/83-mfa-confirm-enrollment-022/83-VERIFICATION.md`**, **`.planning/phases/82-jwt-refresh-persistence-audit-cofate/82-VERIFICATION.md`** +## Accumulated Context + +### Pending Todos -**Last completed phase:** **84** (routing-honesty-reconciliation) — **2026-04-25** +- 0 pending todos in `.planning/todos/pending` -**Planned Phase:** None — future assurance work must use a newly numbered phase, not **999.x** +**Most recently executed phase:** 110 — Session control plane verification closeout. diff --git a/.planning/config.json b/.planning/config.json index c3dd7378..7110baa9 100644 --- a/.planning/config.json +++ b/.planning/config.json @@ -46,6 +46,11 @@ "phase_naming": "sequential", "agent_skills": {}, "resolve_model_ids": "omit", + "review": { + "models": { + "codex": "gpt-5.4" + } + }, "mode": "yolo", "granularity": "fine" } diff --git a/.planning/debug/resolved/pr37-phantom-sigra-web.md b/.planning/debug/resolved/pr37-phantom-sigra-web.md new file mode 100644 index 00000000..38c9c036 --- /dev/null +++ b/.planning/debug/resolved/pr37-phantom-sigra-web.md @@ -0,0 +1,128 @@ +--- +slug: pr37-phantom-sigra-web +status: resolved +trigger: PR #37 CI red — Library tests job creates phantom lib/sigra_web/ in sigra repo via in-process Install.run from a test that lost its raise guard, polluting downstream install_fixture tests +created: 2026-05-02 +updated: 2026-05-02 +--- + +# Debug Session: pr37-phantom-sigra-web + +## Trigger + + +PR #37 (v1.21 batch including phase 93 ship) ran CI for the first time after the merge of origin/main and turned out 16/23 jobs red. + +14 of 16 fails trace to a single root cause: a phantom `lib/sigra_web/` directory that appears in sigra-as-path-dep at compile time on a fresh CI clone, even though `lib/sigra_web/` is not in git, no source defines `SigraWeb.*`, and no `test/support/` references it. + +Plan 08 SUMMARY (9a1bbdf, 2026-05-02) explicitly flagged it: "untracked lib/sigra_web/ directory exists in the worktree…pre-existing WIP cruft…prevents running the full test suite with mix test." It was knowingly left behind because targeted tests weren't affected. + +CI failure signature: + pre-install mix compile failed: + lib/sigra_web/components/org_switcher.ex:20 cannot expand Kernel.use/2 while compiling sigra-as-path-dep in test fixture + +Goal: find what creates `lib/sigra_web/` in sigra's repo root on a fresh CI clone and remove the source — or if it's a side-effect of running tests, isolate it. + + +## Symptoms + +- **Expected behavior:** `MIX_ENV=test mix test` on a fresh clone (no untracked files) compiles and runs the full suite without phantom modules. +- **Actual behavior:** A `lib/sigra_web/` directory appears in the sigra repo root and is picked up by `sigra-as-path-dep` in test fixtures, breaking compilation of 14/23 CI jobs. +- **Error message:** `lib/sigra_web/components/org_switcher.ex:20 cannot expand Kernel.use/2 while compiling sigra-as-path-dep in test fixture` +- **Timeline:** Surfaced after the v1.21 batch / origin/main merge into chore/phase-88-uat-evidence (commit a93f195). Plan 08 SUMMARY (9a1bbdf, 2026-05-02) flagged the cruft as pre-existing. +- **Reproduction:** Push to `chore/phase-88-uat-evidence` → PR #37 CI; or fresh clone + `MIX_ENV=test mix test test/mix/tasks/sigra.install_test.exs:97` reproduces locally. + +## Critical files to inspect first + +- `test/support/install_fixture.ex` — esp. `setup_tmp_app/1` and `setup_tmp_app_without_install/1` (lines ~42–168) which `phx.new` and `mix compile` the path-dep +- `mix.exs` — `elixirc_paths/1` (currently `["lib"]` for non-test, `["lib", "test/support"]` for test) +- `priv/templates/sigra.install/organizations/components/org_switcher.ex` — the template that gets emitted as `lib/_web/components/org_switcher.ex` after install +- `priv/templates/sigra.install/admin/components/admin_shell.ex` — same shape, second file in the cascade +- `lib/mix/tasks/sigra.install.ex` — to understand whether anything resolves the host web module name as `SigraWeb` instead of `Web` +- `.planning/phases/93-m2m-service-account-tokens-b2b-03/93-08-SUMMARY.md` — has the original "out of scope" admission and may hint at where the cruft came from (look at "Rule 1 - Bug" entries) + +## Current Focus + +- hypothesis (resolved): The 93-08 fix to `validate_supported_adapter!/1` collapsed two distinct cases into one fallback — "Repo not yet compiled" AND "Repo loaded but has no `__adapter__/0`" both returned `:postgres` instead of raising. This let the `:undetectable_adapter` test in `test/mix/tasks/sigra.install_test.exs:97` slip past the raise guard and run the full installer pipeline against the sigra repo as the current Mix project, generating `lib/sigra_web/...` files there. +- test: `MIX_ENV=test mix test test/mix/tasks/sigra.install_test.exs:97` produced the phantom directory deterministically before the fix; passes (and leaves the repo clean) after the fix. +- expecting: After splitting the two cases — fallback only when `Code.ensure_loaded?` is false, raise when the module is loaded but lacks `__adapter__/0` — both the assertion and the directory contamination are gone. +- next_action: (none — resolved) + +## Evidence + +- timestamp: 2026-05-02T~14:35Z + command: `gh run view 25258893126 --log-failed --job 74062762072` (Library tests) + finding: Library tests job error trace shows `module SigraWeb is not loaded ... lib/sigra_web/components/org_switcher.ex:20 ... lib/sigra_web/components/admin_shell.ex:6 ... lib/sigra_web/auth_error_handler.ex:15`. Module name is fully resolved (`SigraWeb.Components.OrgSwitcher`, NOT literal `<%= web_module %>`), so the file was rendered through EEx with `Mix.Phoenix.base() == "Sigra"` — i.e. the installer ran inside the sigra Mix project. + +- timestamp: 2026-05-02T~14:38Z + command: `gh run view 25258893126 --log-failed --job 74062762053` (Install matrix) + finding: install_matrix CI failures are NOT the phantom directory. They are `error: undefined function auth_rate_limit/2 (expected TmpAppWeb.Router to define such a function or for it to be imported, but none are available)` from `mix compile --warnings-as-errors` after `mix sigra.install`. Separate generator drift bug — see "Out of session scope" below. + +- timestamp: 2026-05-02T~14:42Z + command: read `lib/mix/tasks/sigra.install.ex:156-174` + finding: Plan 08 (commit 9a1bbdf, 2026-05-02) changed `validate_supported_adapter!/1` to fall back to `:postgres` "when Repo not loaded". The change collapsed two cases into one: (a) `Code.ensure_loaded?(repo) == false` (Repo not yet compiled), AND (b) Repo loaded but `function_exported?(repo, :__adapter__, 0) == false` (genuinely unknown adapter). The `:undetectable_adapter` test feeds case (b) and now no longer raises. + +- timestamp: 2026-05-02T~14:44Z + command: `PGUSER=postgres PGPASSWORD=postgres PGHOST=localhost MIX_ENV=test mix test test/mix/tasks/sigra.install_test.exs:97` + finding: Test run produces the cascade `* creating lib/sigra/sigra_admin_policy.ex / * creating lib/sigra_web/components/admin_shell.ex / ...` and fails the `assert_raise Mix.Error` because no raise occurs. Confirms reproduction. After test, `git status` shows `?? lib/sigra_web/` and 9 other untracked installer outputs in the sigra repo root. + +- timestamp: 2026-05-02T~14:50Z + command: edit `lib/mix/tasks/sigra.install.ex` — split `validate_supported_adapter!/1` into a `cond` with three arms: not-loaded → `:postgres` (preserves Plan 08 intent), loaded-no-adapter → `Mix.raise("...Detected an unknown adapter...")`, loaded-with-adapter → only allow `Ecto.Adapters.Postgres`. + finding: After re-running `mix test test/mix/tasks/sigra.install_test.exs`: 21 tests, 0 failures. Repo working tree shows only `M lib/mix/tasks/sigra.install.ex` — no phantom `lib/sigra_web/`, no other untracked installer outputs. + +- timestamp: 2026-05-02T~14:55Z + command: `mix test test/sigra/install/ test/mix/tasks/` + finding: 618 tests, 7 failures. Working tree stays clean (no phantom directory). The 7 failures decompose as: + 1. `templates_layout_test:70` — manifest count 50 vs 51 (orchestrator-flagged stale assertion, OUT OF SCOPE) + 2. `core_post_instructions_test:116` — Oban warning copy drift (orchestrator-flagged stale assertion, OUT OF SCOPE) + 3-5. `golden_diff_test:53/66` and `vault_promotion_test:9` — install_fixture tests that fail at `mix compile --warnings-as-errors` inside the generated tmp_app due to the `auth_rate_limit/2` generator-drift bug (NOT in orchestrator's stale-assertion list) + 6-7. `generator_passkeys_opt_out_test:33` (×2) — same `auth_rate_limit/2` failure path + The phantom-directory bug is fully resolved. The remaining 5 (3-7) failures share a single distinct root cause documented under "Out of session scope" below. + +## Eliminated + +- The install fixtures (`setup_tmp_app/1`, `setup_tmp_app_without_install/1`) themselves do not contaminate the sigra repo — every `System.cmd` is correctly scoped to a tmp dir under `System.tmp_dir!()`. +- The `purely_additive_test.exs` walker test uses absolute tmp paths for both files and migrations and changes cwd into tmp before invoking `Runner.run` — also clean. +- The various `*_test.exs` files that call `Features.X.files(otp_app: :my_app)` etc. are read-only inspections and never write anywhere. +- The `test/example/` subproject has its own `mix.exs` and never escapes its directory tree. +- The error message about literal `<%= web_module %>` in earlier writeups was a misread — the CI logs show the module name is fully expanded (`SigraWeb.Components.OrgSwitcher`), so the template DID get EEx-evaluated. The bug is not unrendered-template-leakage; it is the installer running with the wrong host (sigra itself). + +## Resolution + +**Root cause:** Phase 93 Plan 08 (commit 9a1bbdf, 2026-05-02) changed `validate_supported_adapter!/1` in `lib/mix/tasks/sigra.install.ex` to fall back to `:postgres` whenever the repo module's `__adapter__/0` could not be invoked. This collapsed two structurally distinct cases — "Repo not yet compiled" and "Repo loaded but malformed" — into a single permissive branch. The `:undetectable_adapter` unit test in `test/mix/tasks/sigra.install_test.exs:97` exercises the second case via a stub module with no `__adapter__/0`. With the old raise gone, the test's `Install.run(["Accounts", "User", "users"])` invocation proceeded past validation, computed `Mix.Phoenix.otp_app() == :sigra` and `Mix.Phoenix.base() == "Sigra"`, and ran the full feature walker, generating `lib/sigra_web/components/org_switcher.ex`, `lib/sigra_web/components/admin_shell.ex`, `lib/sigra_web/auth_error_handler.ex`, etc. into the sigra repo root. + +Once those files existed, every downstream `setup_tmp_app/1` test failed at the post-`deps.get` `mix compile` step because the tmp_app's path-dep view of sigra now contained Phoenix-shape modules referencing an undefined `SigraWeb` namespace. + +**Fix:** `lib/mix/tasks/sigra.install.ex` — split `validate_supported_adapter!/1` into three explicit cases: + +```elixir +defp validate_supported_adapter!(repo_module) do + cond do + not Code.ensure_loaded?(repo_module) -> + :postgres # Plan 08 case: Repo not yet compiled + not function_exported?(repo_module, :__adapter__, 0) -> + Mix.raise("Sigra supports PostgreSQL only. Detected an unknown adapter. ...") + true -> + case repo_module.__adapter__() do + Ecto.Adapters.Postgres -> :postgres + adapter -> Mix.raise("Sigra supports PostgreSQL only. Detected #{inspect(adapter)}. ...") + end + end +end +``` + +This preserves Plan 08's intent (don't reject a host whose Repo simply hasn't been compiled yet) while restoring the raise guard for the case the test was written to enforce — and which is the actual safety net against running a generator pass with the sigra project as the cwd target. + +**Verification:** + +- `mix test test/mix/tasks/sigra.install_test.exs` → 21 tests, 0 failures (was 1 failure) +- `git status` after the run → only `M lib/mix/tasks/sigra.install.ex`, no phantom `lib/sigra_web/`, no other untracked installer artifacts +- `mix test test/sigra/install/ test/mix/tasks/` → working tree stays clean throughout; only the 7 unrelated failures (4 stale assertions + 3 from a separate generator drift bug, see below) remain + +**Out of session scope (reported up to orchestrator):** + +5 install-related test failures (`golden_diff_test:53/66`, `vault_promotion_test:9`, `generator_passkeys_opt_out_test:33` ×2) and the 4 install_matrix CI jobs share a separate generator-drift root cause that the orchestrator's briefing did not flag: + +`lib/sigra/install/features/core.ex:525` injects `pipe_through [:browser, :redirect_if_user_is_authenticated, :auth_rate_limit]` into the generated host router, but the corresponding `pipeline :auth_rate_limit do plug Sigra.Plug.RateLimit, ... end` block was never added to the same template's `pipelines` section. Commit 3accda8 (2026-05-01, "feat(api): 96-04 wire rate limit and oauth refresh into active seams") added the `pipe_through` reference and the matching pipeline block to `test/example/lib/example_web/router.ex` but missed updating the generator template at `core.ex` line ~494-516. Every fresh `mix sigra.install` therefore emits a router that fails compile with `undefined function auth_rate_limit/2`. + +This is not the phantom-directory bug and is structurally distinct: it ships an invalid generator output rather than polluting the library repo. Recommend a separate `/gsd-quick` (or atomic commit alongside the existing 5 mechanical fixes the orchestrator already enumerated) that adds the missing `pipeline :auth_rate_limit do plug Sigra.Plug.RateLimit, error_handler: #{web_module}.AuthErrorHandler end` block to the `# Sigra authentication` injection content in `core.ex`. diff --git a/.planning/milestones/v1.20-MILESTONE-AUDIT.md b/.planning/milestones/v1.20-MILESTONE-AUDIT.md new file mode 100644 index 00000000..21f1ea08 --- /dev/null +++ b/.planning/milestones/v1.20-MILESTONE-AUDIT.md @@ -0,0 +1,42 @@ +--- +milestone: v1.20 +milestone_name: GA Launch — SEED closure + public release +audit_status: passed +audited_at: 2026-04-28 +auditor: milestone close (/gsd-complete-milestone, Gemini CLI) +retroactive: false +--- + +# Milestone audit — v1.20 GA Launch (SEED closure + public release) + +## Verdict + +**Passed — suitable for archive.** Live `.planning/REQUIREMENTS.md` showed 21/21 requirement checkboxes satisfied/waived with a complete traceability table. Phases 85-89 completed. Phase 90 waived per user instruction. + +## Evidence index + +| Area | Pointer | +|------|---------| +| Requirements (archived) | `milestones/v1.20-REQUIREMENTS.md` | +| Roadmap / phase intent (archived) | `milestones/v1.20-ROADMAP.md` | +| AUD-21 | `phases/85-oauth-audit-atomicity-closure-aud-21/85-VERIFICATION.md` | +| GAUAT Email | `uat-evidence/v1.20/email-phase-04/`, `email-phase-08/` | +| GAUAT OAuth | `uat-evidence/v1.20/oauth-gen/`, `oauth-google/`, `oauth-link/`, `oauth-email-match/` | +| GAUAT MFA | `uat-evidence/v1.20/mfa-backup-rotation/` | +| GAUAT Install | `uat-evidence/v1.20/getting-started-clean-machine/` | +| Launch | Hex.pm tag `v1.20.0`, `CHANGELOG.md` | + +## Requirement cross-check + +| ID | Phase | Closure signal | +|----|-------|----------------| +| AUD-21 | 85 | `log_safe` boundaries atomic; C-1 PASS; `85-VERIFICATION.md` | +| GAUAT 01-02 | 86 | `email_visual_regression` CI green; 36 snapshot baselines | +| GAUAT 03-06 | 87 | Playwright specs against `Testing.OAuthIssuer`; `87-VERIFICATION.md` (CI provenance pending but local verified) | +| GAUAT 07-09 | 88 | Backup code rotation E2E; Getting started E2E; SEED-001 closed | +| LAUNCH 01-07 | 89, 90 | Hex published, docs updated, publicity + monitoring waived | + +## Explicit non-goals (accepted) + +- Publicity launch steps (LAUNCH 03-06) waived to focus on library quality. +- Lockspire integration deferred. diff --git a/.planning/milestones/v1.20-REQUIREMENTS.md b/.planning/milestones/v1.20-REQUIREMENTS.md new file mode 100644 index 00000000..5d2300f9 --- /dev/null +++ b/.planning/milestones/v1.20-REQUIREMENTS.md @@ -0,0 +1,98 @@ +# Requirements: Sigra — v1.20 GA Launch (SEED closure + public release) + +**Defined:** 2026-04-25 +**Milestone:** v1.20 — GA Launch — SEED closure + public release +**Selected seeds:** SEED-001 (human GA UAT), SEED-002 (OAuth audit atomicity remainder) + +## v1.20 Requirements + +### Leg 1 — SEED-002 OAuth audit atomicity closure (AUD-21) + +Closes the C-1 caveat that has hung over Phase 9 since v1.0. After this leg, every `log_safe/3` integration site in `lib/sigra/oauth/*` and the OAuth/ops Phase 45 T2 inventory uses atomic `Repo.transaction/1` + `Ecto.Multi` + `Sigra.Audit.log_multi_safe/3` when `:audit_schema` is set, matching the discipline already shipped in `Sigra.MFA`, `Sigra.Account`, and `Sigra.APIToken`. + +- [x] **AUD-21-01 +** — Convert OAuth/ops `log_safe/3` clusters at **AUD-04 rows 052–056, 058, 063** (per `.planning/phases/45-oauth-ops-c1-signoff/45-AUD-04-INVENTORY.md`) to atomic `Repo.transaction/1` + `Ecto.Multi` + `log_multi_safe`. On audit-insert failure: callers see `{:error, _}` and business-op rolls back; on `:audit_schema` unset: behavior preserved (telemetry-on-commit only). +- [x] **AUD-21-02 +** — Audit-aware test coverage at `test/sigra/oauth_audit_atomic_test.exs` (or extension of existing OAuth ceremony tests) proves: happy-path co-fate, audit-off parity, fault-injection rollback (CHECK guard) for each new atomic site. +- [x] **AUD-21-03 +** — Planning truth refresh: `45-AUD-04-INVENTORY.md` rows 052–056/058/063 marked T1 with phase reference; `09-VERIFICATION.md` C-1 matrix updated; `09-03-SUMMARY.md` post-batch narrative added; `CHANGELOG.md` `[Unreleased]` trace bullet. +- [x] **AUD-21-04 +** — Phase 9 **C-1 caveat downgraded from PASS-WITH-CAVEATS to PASS** in `09-VERIFICATION.md` frontmatter (`caveats: []` or removal) and `09-03-SUMMARY.md` summary block, with explicit reference to AUD-21 closure. SEED-002 status flipped to `validated` in `.planning/seeds/SEED-002-phase-9-log-safe-atomicity-followup.md` frontmatter. +- [x] **AUD-21-05 +** — Per-phase merge gate (`*-VERIFICATION.md`) in the implementing phase directory; `mix ci.audit_45` still green; library test suite + 5 CI gates remain green on `main`. + +### Leg 2 — SEED-001 GA UAT closure (GAUAT) + +Closes the 8 GA-risk UAT items listed in `.planning/seeds/SEED-001-v1.0-ga-human-uat-gate.md`. Each requirement maps to machine-authoritative evidence with recorded outcome and supporting artifacts. No GAUAT row requires a human witness to ship. + +- [x] **GAUAT-01** — **Phase 04 lockout + suspicious-login email visual regression (automated)** — Phase 86 ships the `email_visual_regression` CI job rendering both templates (`lockout_notification_email`, `suspicious_login_email`) across {Chromium, WebKit} × {light, dark} via Premailex-inlined HTML + Playwright `toHaveScreenshot`, plus extended ExUnit asserts (computed WCAG contrast, byte budget vs Gmail 102 KB clip, multipart parity, recipient correctness, XSS fuzz, Outlook-Word-engine deny-list, image tripwire) and caniemail.com CSS-feature lint. Eight baselines committed under `test/example/priv/playwright/__snapshots__/email-visual.spec.ts/`. Evidence (README, manifest.json, hero PNGs, contrast-summary.json, byte-budget.csv) under `.planning/uat-evidence/v1.20/email-phase-04/`. Phase-86 CONTEXT.md D-86-09 records the documented residual (legacy Outlook desktop Word engine — EOL Oct 2026; subjective copy tone — handled in PR review; spam-folder placement — adopter deliverability surface). 0 human MUA passes required. +- [x] **GAUAT-02** — **Phase 08 lifecycle email visual regression (automated)** — Same harness covers the 7 lifecycle templates (`email_change_confirmation_email`, `email_change_notification_email`, `email_changed_email`, `password_changed_email`, `deletion_scheduled_email`, `deletion_cancelled_email`, `deletion_finalized_email`); 28 baselines committed; evidence under `.planning/uat-evidence/v1.20/email-phase-08/`. Same residual policy as GAUAT-01. 0 human MUA passes required. +- [ ] **GAUAT-03** — **`mix sigra.gen.oauth` fresh-host smoke (automated)** — Extended `scripts/ci/install-smoke.sh` runs on every PR: `mix phx.new` + `sigra.install` + `sigra.gen.oauth --providers google,github` + `mix compile --warnings-as-errors` + `MIX_ENV=test mix test`, emitting `oauth-gen: 12/12 expected artifacts present, mix test green`. Transcript tee'd to `.planning/uat-evidence/v1.20/oauth-gen/transcript.log` (CI artifact + GitHub release asset on `v*` tags). Reshaped from human terminal-transcript capture per Phase 87 D-87-04 (precedent: Phase 86 D-86-08). +- [ ] **GAUAT-04** — **End-to-end OAuth register/login cycle (automated)** — Playwright spec `oauth-register.spec.ts` drives Sigra's example app against the in-process `Sigra.Testing.OAuthIssuer` (TestServer-backed, RS256 ID tokens, real PKCE — mirrors Assent's own `OIDCTestCase` precedent). Cells: provider button → 302 to /authorize with state nonce → mock auto-consent → callback → user + identity row + session cookie → logout → re-login (same user, no new identity row). Evidence: pass/fail manifest + Playwright trace under `.planning/uat-evidence/v1.20/oauth-google/`. Adopter-side real-credential check ships separately as `mix sigra.oauth.smoketest --provider=google` per Phase 87 D-87-03. Reshaped from human screen-recording capture per Phase 87 D-87-01 (0 human UAT — matches Auth.js / Spring Security / Assent / pow_assent / Devise+omniauth ecosystem convention). +- [ ] **GAUAT-05** — **Provider linking + last-method unlink prevention (automated)** — Playwright spec `oauth-link.spec.ts` covers four visual states: (1) linked-with-password (unlink enabled), (2) only-oauth-no-password (unlink disabled, tooltip matches verbatim source from `oauth_settings_live.ex:92`), (3) after-set-password (button re-enabled), (4) post-unlink (`user_identities` row absent, password login still works). Evidence: 4-row manifest + one hero PNG of the disabled-tooltip state under `.planning/uat-evidence/v1.20/oauth-link/`. Reshaped from human four-state screenshot capture per Phase 87 D-87-05. +- [ ] **GAUAT-06** — **Email-match confirmation flash + redirect (automated)** — Playwright spec `oauth-email-match.spec.ts` covers: pre-seeded user with password → mock issuer returns matching email + novel sub → flash text matches verbatim source from `oauth_controller.ex:96` → redirect to login → password login → identity row created → `provider_linked_email` arrival in `/dev/mailbox/json`. Evidence: 4-row manifest + flash-text + DB-probe + mailbox JSON under `.planning/uat-evidence/v1.20/oauth-email-match/`. Reshaped from human screenshot capture per Phase 87 D-87-05. +- [x] **GAUAT-07 +** — **Backup-code regeneration E2E proof (automated)** — `mfa-backup-rotation.spec.ts` drives the real MFA settings flow in the example app (register/confirm/login → sudo → enroll MFA → capture pre-rotation backup code → regenerate via fresh TOTP) and proves both user-visible and persisted outcomes: new backup codes shown once, old plaintext no longer matches any current code, and `mfa.backup_codes_regenerate` audit persistence. Evidence under `.planning/uat-evidence/v1.20/mfa-backup-rotation/`; CI gate is `.github/workflows/ci.yml / mfa_e2e_playwright`. 0 human UAT required. +- [x] **GAUAT-08 +** — **Generated-host getting-started proof (automated)** — `scripts/ci/install-smoke.sh` runs the real getting-started path on a disposable Phoenix 1.8 host (`mix phx.new` → Sigra install → compile/migrate → generated-host auth lifecycle test → boot the app and hit the documented routes) and emits machine-readable environment, transcript, and lifecycle evidence under `.planning/uat-evidence/v1.20/getting-started-clean-machine/`. Subjective first-read timing/friction is explicitly non-gating. 0 human UAT required. +- [x] **GAUAT-09** — **Results filing + seed closure** — `.planning/v1.20-GA-UAT-RESULTS.md` is written with one explicit row per GAUAT-01..08, links to the machine evidence directories under `.planning/uat-evidence/v1.20/`, and a final go/no-go disposition for the launch leg. SEED-001 moves to `validated` when those rows have release-authoritative evidence on the launch SHA/tag; no human-only exception path remains for GAUAT-07 + or GAUAT-08. + +### Leg 3 — Public launch execution (LAUNCH) + +Executes the v1.5 `MAINT-01` First Public Launch checklist for the first time. Sequenced *after* legs 1 and 2 close so the launch is defensible. Failures here roll back narrowly (delete announcement, mark Hex release as broken) without invalidating legs 1 and 2. + +- [x] **LAUNCH-01 +** — **Hex.pm publish v1.20** — Bump `mix.exs` version to `1.20.0`; tag `v1.20` annotated; `mix hex.publish` (with reviewable diff against the prior published version, if any); verify package shows on hex.pm with correct description, links, optional-deps, and ExDoc. Record release URL. (If this is Sigra's first-ever Hex publish, also covers `mix hex.user auth` setup if not already configured.) +- [x] **LAUNCH-02 +** — **README "use this in production" promotion** — Update README from "production readiness available" framing to an explicit "Use this in production" section with: link to v1.20 GA evidence, link to Phase 9 C-1 PASS attestation (post-AUD-21), getting-started link, version-pin guidance. ExDoc landing path mirrors the change. +- [x] **LAUNCH-03** — **Announcement post drafted + published** — *(Waived: User focus is purely on library quality, not publicity. Discussed in Phase 90.)* +- [x] **LAUNCH-04** — **Hacker News submission** — *(Waived: User focus is purely on library quality. Discussed in Phase 90.)* +- [x] **LAUNCH-05** — **Elixir community soft-launch** — *(Waived: User focus is purely on library quality. Discussed in Phase 90.)* +- [x] **LAUNCH-06** — **MAINTAINING.md post-launch monitoring lane** — *(Waived: User focus is purely on library quality, skipped in Phase 90.)* +- [x] **LAUNCH-07 +** — **CHANGELOG + ExDoc final alignment** — `CHANGELOG.md` v1.20.0 section finalized: covers AUD-21 (audit completeness PASS), GAUAT closure pointer, launch metadata, upgrade notes (none expected — pure additive). ExDoc extras include `upgrading-to-v1.20.md` (or "no upgrade required" stub if changeset is purely additive); `mix docs --warnings-as-errors` clean. + +## Future requirements + +- **Week-one launch-feedback follow-ups** — sized as a v1.21 patch milestone if signal warrants. Not pre-scoped. +- **Phase 45 T2 stragglers** beyond 052–056/058/063, if any surface during AUD-21 inventory walk — captured as `EX-45-*` with reopen triggers, deferred to a later milestone. +- **`sigra_lockspire` glue package per ADR 001** — still awaiting companion-app trigger; explicitly out of scope for v1.20. +- **30d post-launch retrospective** — formal retrospective on launch-week outcomes, distinct from the LAUNCH-06 monitoring checkpoints. Triggered automatically at the 30d mark. + +## Out of scope + +- **Reopening 999.x archaeology** — assurance work uses newly numbered phases. +- **Re-auditing Phase 45 merge gate (`mix ci.audit_45`) beyond regression needed for AUD-21 edits.** +- **Responding to launch feedback during the v1.20 milestone window** — captured in LAUNCH-06 monitoring lane and routed to a follow-up milestone. +- **`sigra_lockspire` / ADR 001** — deferred until a real companion-app trigger fires. +- **Marketing site / standalone landing page** — README + announcement post cover positioning. A dedicated marketing site is a later concern. +- **Paid promotion / sponsorships** — organic only for first launch. + +## Traceability + +| REQ-ID | Phase | +|-----------|-------| +| AUD-21-01 | 85 | +| AUD-21-02 | 85 | +| AUD-21-03 | 85 | +| AUD-21-04 | 85 | +| AUD-21-05 | 85 | +| GAUAT-01 | 86 | +| GAUAT-02 | 86 | +| GAUAT-03 | 87 | +| GAUAT-04 | 87 | +| GAUAT-05 | 87 | +| GAUAT-06 | 87 | +| GAUAT-07 | 88 | +| GAUAT-08 | 88 | +| GAUAT-09 | 88 | +| LAUNCH-01 | 89 | +| LAUNCH-02 | 89 | +| LAUNCH-03 | 90 | +| LAUNCH-04 | 90 | +| LAUNCH-05 | 90 | +| LAUNCH-06 | 90 | +| LAUNCH-07 | 89 | + +_(Phase column populated by gsd-roadmapper, 2026-04-25. 21/21 requirements mapped to exactly one phase across Phases 85–90.)_ diff --git a/.planning/milestones/v1.20-ROADMAP.md b/.planning/milestones/v1.20-ROADMAP.md new file mode 100644 index 00000000..33012e59 --- /dev/null +++ b/.planning/milestones/v1.20-ROADMAP.md @@ -0,0 +1,233 @@ +# Roadmap: Sigra + +## Milestones + +- ✅ **v1.0 Phoenix Auth Library - Initial Release** - Phases 1-10 + 10.1 + 10.1.1 (shipped 2026-04-11). See [v1.0 archive](milestones/v1.0-ROADMAP.md) and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.1 Foundations** - Phases 11-23 (shipped 2026-04-16). See [v1.1 archive](milestones/v1.1-ROADMAP.md), [v1.1 requirements](milestones/v1.1-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **Post-v1.1 Closeout** - Phases 24-26 (completed 2026-04-16). +- ✅ **v1.2 Admin Dashboard** - Phases 27-31 + gap closure 32-35 (shipped 2026-04-17). See [v1.2 archive](milestones/v1.2-ROADMAP.md), [v1.2 requirements](milestones/v1.2-REQUIREMENTS.md), [v1.2 milestone audit](milestones/v1.2-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.3 Cleanup & Hardening** — Phases 36-40 (shipped 2026-04-19). See [v1.3 archive](milestones/v1.3-ROADMAP.md), [v1.3 requirements](milestones/v1.3-REQUIREMENTS.md), [v1.3 milestone audit](milestones/v1.3-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.4 GA readiness & audit trail completeness** — Phases **41–52** (shipped **2026-04-22**). See [v1.4 archive](milestones/v1.4-ROADMAP.md), [v1.4 requirements](milestones/v1.4-REQUIREMENTS.md), [v1.4 milestone audit](milestones/v1.4-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.5 Public release narrative & community readiness** — Phases **53–56** (shipped **2026-04-22**). See [v1.5 archive](milestones/v1.5-ROADMAP.md), [v1.5 requirements](milestones/v1.5-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.6 Nyquist closure + OAuth audit depth** — Phases **57–59** (shipped **2026-04-23**). See [v1.6 archive](milestones/v1.6-ROADMAP.md), [v1.6 requirements](milestones/v1.6-REQUIREMENTS.md), [v1.6 milestone audit](milestones/v1.6-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.7 Adoption readiness & audit durability** — Phases **60–62** (shipped **2026-04-23**). See [v1.7 archive](milestones/v1.7-ROADMAP.md), [v1.7 requirements](milestones/v1.7-REQUIREMENTS.md), [v1.7 milestone audit](milestones/v1.7-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.8 Adopter polish (diminishing returns)** — Phases **63–65** (shipped **2026-04-23**). See [v1.8 archive](milestones/v1.8-ROADMAP.md), [v1.8 requirements](milestones/v1.8-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.9 Audit atomicity (bounded SEED-002)** — Phases **66–67** (shipped **2026-04-23**). See [v1.9 archive](milestones/v1.9-ROADMAP.md), [v1.9 requirements](milestones/v1.9-REQUIREMENTS.md), [v1.9 milestone audit](milestones/v1.9-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.10 Adopter confidence for solo production** — Phases **68–70** (shipped **2026-04-23**). See [v1.10 archive](milestones/v1.10-ROADMAP.md), [v1.10 requirements](milestones/v1.10-REQUIREMENTS.md), [v1.10 milestone audit](milestones/v1.10-MILESTONE-AUDIT.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.11 Adoption stabilization** — Phases **71–72** (shipped **2026-04-23**). See [v1.11 archive](milestones/v1.11-ROADMAP.md), [v1.11 requirements](milestones/v1.11-REQUIREMENTS.md), and triage [v1.11-TRIAGE.md](v1.11-TRIAGE.md). +- ✅ **v1.12 Trust, evidence, and adoption polish** — Phases **73–75** (shipped **2026-04-24**). See [v1.12 archive](milestones/v1.12-ROADMAP.md), [v1.12 requirements](milestones/v1.12-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). Bounded **SEED-002** batch + **SEED-001** evidence index + triage-driven doc polish. +- ✅ **v1.13 Post–v1.12 operational cadence** — Phase **76** (shipped **2026-04-24**). See [v1.13 archive](milestones/v1.13-ROADMAP.md), [v1.13 requirements](milestones/v1.13-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). Planning-only cadence lock-in (**CAD-01**..**CAD-03**). +- ✅ **v1.14 Bounded audit trust closure** — Phase **77** (shipped **2026-04-24**). See [v1.14 archive](milestones/v1.14-ROADMAP.md), [v1.14 requirements](milestones/v1.14-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). **SEED-002** slice — **AUD-04-033** / **034** (**AUD-13**). +- ✅ **v1.15 Account + API C-1 planning truth** — Phase **78** (shipped **2026-04-24**). See [v1.15 archive](milestones/v1.15-ROADMAP.md), [v1.15 requirements](milestones/v1.15-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). **AUD-14**..**AUD-14-05**. +- ✅ **v1.16 API verify failure audit atomicity** — Phase **79** (shipped **2026-04-24**). See [v1.16 archive](milestones/v1.16-ROADMAP.md), [v1.16 requirements](milestones/v1.16-REQUIREMENTS.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.17 Forced password change audit atomicity (SEED-002 / AUD-04-043)** — Phase **80** (shipped **2026-04-24**). See [v1.17 requirements](milestones/v1.17-REQUIREMENTS.md), [milestone archive](milestones/v1.17-ROADMAP.md), and [MILESTONES.md](MILESTONES.md). +- ✅ **v1.18 JWT refresh / reuse audit atomicity (SEED-002 / AUD-04-048..049)** — Phase **81** (shipped **2026-04-24**). [MILESTONES.md](MILESTONES.md); verification [`.planning/phases/81-jwt-refresh-audit-atomicity/81-VERIFICATION.md`](phases/81-jwt-refresh-audit-atomicity/81-VERIFICATION.md). +- ✅ **v1.19 JWT refresh persistence + audit co-fate & MFA enrollment failure** — Phases **82–83** (shipped **2026-04-24**). [MILESTONES.md](MILESTONES.md). +- ✅ **Post-v1.19 routing honesty follow-up** — Phase **84** (completed **2026-04-25**). +- 🟡 **v1.20 GA Launch — SEED closure + public release** — Phases **85–90** (opened **2026-04-25**). Live [REQUIREMENTS.md](REQUIREMENTS.md); phases below. + +## Phases + +### v1.20 — active (Phases **85–90**) + +**Coverage:** 21 requirements → 6 phases. Numbering continues from **v1.19/post-v1.19** (last phase **84**). + +**Dependency shape:** two parallel legs (Leg 1 — AUD-21 OAuth audit closure; Leg 2 — SEED-001 GAUAT closure) feed a sequential launch leg (Leg 3 — pre-launch then launch+monitoring). Legs 1 and 2 are independent and can run in either order. Leg 3 cannot start until **both** legs 1 and 2 close — the launch's defensibility depends on AUD-21 downgrading the Phase 9 C-1 caveat to PASS and GAUAT-09 filing v1.20 GA UAT results with a go-decision. + +**Phase summary:** + +- [x] **Phase 85: OAuth audit atomicity closure (AUD-21)** — Convert remaining `log_safe/3` OAuth/ops clusters in Phase 45 T2 (AUD-04 rows 052–056, 058, 063) to atomic `Repo.transaction/1` + `Ecto.Multi` + `Sigra.Audit.log_multi_safe/3`; refresh planning truth; downgrade Phase 9 C-1 caveat from PASS-WITH-CAVEATS to PASS. +- [x] **Phase 86: GAUAT email visual regression harness (Phase 04 + Phase 08 templates)** — Ship automated visual regression harness (Premailex CSS-inline + Playwright `toHaveScreenshot` Chromium+WebKit × light+dark + caniemail CSS lint + extended WCAG/byte/multipart/XSS ExUnit asserts) producing CI-reproducible evidence per template. 0 human MUA passes required for v1.20 launch. (completed 2026-04-26) +- [ ] **Phase 87: GAUAT OAuth automated end-to-end harness** — Ship `Sigra.Testing.OAuthIssuer` (TestServer-backed in-process OIDC issuer mirroring Assent's `OIDCTestCase`) + 3 Playwright specs (oauth-register / oauth-link / oauth-email-match) covering GAUAT-04/05/06 against Sigra's example app, plus extended `install-smoke.sh` covering GAUAT-03 (`mix phx.new` + `sigra.install` + `sigra.gen.oauth` + `--warnings-as-errors` + `mix test`); ship `mix sigra.oauth.smoketest --provider=google` + `docs/oauth-google-setup.md` for adopter-side real-credential check at install time. 0 human UAT. +- [x] **Phase 88: GAUAT MFA + getting-started + results filing** — Automated backup-code regeneration E2E proof; generated-host getting-started install/runtime proof; file `.planning/v1.20-GA-UAT-RESULTS.md`; flip SEED-001 status to `validated` when release-SHA evidence is complete. (completed 2026-04-28) +- [x] **Phase 89: Pre-launch — Hex publish + README promotion + CHANGELOG/ExDoc alignment** — Bump `mix.exs` to 1.20.0; tag `v1.20`; `mix hex.publish`; promote README from "production readiness available" to "use this in production"; finalize CHANGELOG v1.20.0 section + `upgrading-to-v1.20.md` (or no-upgrade-required stub) so `mix docs --warnings-as-errors` is clean. (completed 2026-04-28) +- [x] ➖ **Phase 90: Launch + monitoring lane** — _Skipped per user request._ (waived 2026-04-28) + +## Phase Details + +### Phase 85: OAuth audit atomicity closure (AUD-21) + +**Goal:** Close the last live SEED-002 audit-atomicity gap so every OAuth/ops integration site that emits a security-relevant audit row co-fates that row with its business-op transaction. After this phase, the Phase 9 C-1 caveat is downgraded from PASS-WITH-CAVEATS to PASS, and SEED-002 is `validated`. + +**Depends on:** Nothing (parallel-ready with Phases 86–88). + +**Requirements:** AUD-21-01, AUD-21-02, AUD-21-03, AUD-21-04, AUD-21-05. + +**Success criteria** (what must be TRUE): + +1. A maintainer running `rg "log_safe\\(" lib/sigra/oauth/ lib/sigra/oauth.ex` finds zero hits at the AUD-04 row 052–056/058/063 boundaries that v1.20 targeted; remaining `log_safe` calls are paired with explicit `EX-*` rows in `45-AUD-04-INVENTORY.md` (or have been retired from that inventory). +2. A maintainer querying `09-VERIFICATION.md` C-1 matrix sees PASS, not PASS-WITH-CAVEATS, with the AUD-21 phase reference embedded; `caveats:` in frontmatter is `[]` (or the field is removed). +3. Running `mix test test/sigra/oauth_audit_atomic_test.exs` (or the equivalent OAuth-ceremony audit test extension) on Postgres exercises happy-path co-fate, audit-off parity, and CHECK-guarded fault-injection rollback for each newly atomic site, and exits green. +4. `.planning/seeds/SEED-002-phase-9-log-safe-atomicity-followup.md` frontmatter `status:` reads `validated`; `45-AUD-04-INVENTORY.md` rows 052–056/058/063 carry T1 verdicts with phase 85 reference; `09-03-SUMMARY.md` has a phase-85 / AUD-21 narrative bullet; `CHANGELOG.md` `[Unreleased]` carries the AUD-21 trace bullet. +5. `mix ci.audit_45` and the library test suite (plus the 5 existing CI gates) remain green on `main` after the phase merges; `85-VERIFICATION.md` records the merge gate outcome. + +**Plans:** 4 plans. + +Plans: +- [x] 85-01-PLAN.md — Atomicize impersonation session/audit orchestration and sharpen AUD-04 truth. +- [x] 85-02-PLAN.md — Close the C-1 narrative, seed status, and verification trail for AUD-21. + +### Phase 86: GAUAT email visual regression harness (Phase 04 + Phase 08 templates) + +**Goal:** Ship an automated email visual regression harness that produces CI-reproducible evidence for the 9 transactional email templates (2 Phase 04 security + 7 Phase 08 lifecycle) without any human MUA pass. The launch claim is downgraded from "real-mail-client tested" to **"render-tested across Chromium + WebKit engines × light + dark mode, with caniemail-validated CSS for Gmail web / new Outlook web / Apple Mail; legacy Outlook Word-engine desktop documented as out-of-scope (Microsoft EOL Oct 2026)"** — accurate, defensible, and reproducible from any SHA. 0 human UAT for v1.20 launch. + +**Depends on:** Nothing (parallel-ready with Phase 85 and with Phases 87–88). + +**Requirements:** GAUAT-01, GAUAT-02. + +**Success criteria** (what must be TRUE): + +1. An external reviewer running `mix test` and `mix ci.email_visual` (or the equivalent CI workflow) on the phase-close SHA produces 36 baseline-matching snapshots (9 templates × 2 engines × 2 themes) byte-equal to the committed baselines under `test/example/priv/playwright/__snapshots__/email-visual.spec.ts/`. Pixel-diff > `maxDiffPixels` fails the build. +2. An external reviewer opening `.planning/uat-evidence/v1.20/email-phase-04/README.md` and `email-phase-08/README.md` finds: YAML frontmatter (`hex_version`, `git_sha`, `git_tag`, `ci_run_url`, `disposition`); `manifest.json` with one row per (template, engine, theme); `reports/contrast-summary.json` and `byte-budget.csv`; hero PNGs under `snapshots/` named `{template}__{engine}__{theme}__sha-{short-sha}.png` (~1-2 MB total per directory). +3. The full snapshot bundle (raw `.eml`, all engine PNGs at full res, axe-core JSONs) is uploaded as a GitHub Actions artifact at every CI run AND promoted to the `v1.20.0` GitHub release asset at tag time (release assets do not expire, vs. Actions artifacts capped at 400d — matters for SOC 2 Type II audit windows). +4. The extended ExUnit suite (`Sigra.A11y.Contrast` module + `Example.EmailAssertions` helper) asserts WCAG 2.2 AA computed contrast (CTA bumped to `#1d4ed8` for 5.17:1 normal-text), byte budget < 100 KB vs Gmail clip, multipart parity (text mirrors HTML URLs), recipient correctness, XSS fuzz on user-controlled fields, Outlook Word-engine deny-list (no ` + + +

Hello

+ + + """ + + describe "lint/1" do + test "returns :ok for HTML using only allow-listed CSS properties" do + assert :ok = CssLint.lint(@safe_html) + end + + test "returns error for display:flex (not supported by Gmail/Outlook/Apple Mail)" do + assert {:error, violations} = CssLint.lint(@unsafe_html_flex) + assert is_list(violations) + assert length(violations) > 0 + assert Enum.any?(violations, &String.contains?(&1, "flex")) + end + + test "returns error for display:grid (not supported by Gmail/Outlook/Apple Mail)" do + assert {:error, violations} = CssLint.lint(@unsafe_html_grid) + assert is_list(violations) + assert Enum.any?(violations, &String.contains?(&1, "grid")) + end + + test "returns error for position: (ignored by Outlook Word-engine)" do + assert {:error, violations} = CssLint.lint(@unsafe_html_position) + assert is_list(violations) + assert Enum.any?(violations, &String.contains?(&1, "position")) + end + + test "returns error for background-image: (stripped by Outlook Word-engine)" do + assert {:error, violations} = CssLint.lint(@unsafe_html_bg_image) + assert is_list(violations) + assert Enum.any?(violations, &String.contains?(&1, "background-image")) + end + + test "returns error for + +
Hello
+ + + """ + + assert {:error, violations} = CssLint.lint(combined) + assert length(violations) >= 2 + end + end + + describe "allowlist/0" do + test "returns a map with clients, allow_css, and deny_css keys" do + policy = CssLint.allowlist() + assert is_map(policy) + assert Map.has_key?(policy, "clients") + assert Map.has_key?(policy, "allow_css") + assert Map.has_key?(policy, "deny_css") + end + + test "clients list includes gmail-web, outlook-web-new, and apple-mail-macos" do + %{"clients" => clients} = CssLint.allowlist() + assert "gmail-web" in clients + assert "outlook-web-new" in clients + assert "apple-mail-macos" in clients + end + + test "allow_css includes common safe properties" do + %{"allow_css" => allowed} = CssLint.allowlist() + assert "background-color" in allowed + assert "color" in allowed + assert "font-size" in allowed + assert "padding" in allowed + end + + test "deny_css includes the four Word-engine landmine constructs" do + %{"deny_css" => denied} = CssLint.allowlist() + assert "display:flex" in denied or Enum.any?(denied, &String.contains?(&1, "flex")) + assert "display:grid" in denied or Enum.any?(denied, &String.contains?(&1, "grid")) + assert "position" in denied or Enum.any?(denied, &String.contains?(&1, "position")) + assert "background-image" in denied or Enum.any?(denied, &String.contains?(&1, "background-image")) + end + end +end diff --git a/test/sigra/guides_dx02_test.exs b/test/sigra/guides_dx02_test.exs index 6e86c4e9..7dbfe4c3 100644 --- a/test/sigra/guides_dx02_test.exs +++ b/test/sigra/guides_dx02_test.exs @@ -220,7 +220,7 @@ defmodule Sigra.GuidesDx02Test do "mix.exs must set docs main: \"getting-started\" (plan 10-05 Task 1 Step F)" end - test "all 17 expected guide files exist" do + test "all 18 expected guide files exist" do expected = [ "introduction/getting-started.md", @@ -239,7 +239,12 @@ defmodule Sigra.GuidesDx02Test do "recipes/multi-tenant.md", "recipes/passkeys.md", "recipes/deployment.md", - "recipes/subdomain-auth.md" + "recipes/subdomain-auth.md", + # Phase 92 / B2B-02 (Plan 92-04): RBAC seam recipe — concrete + # deny-by-default walk-through using a host-owned `owner/admin/member` + # policy. The library ships only the `Sigra.Authz` behaviour; the + # recipe lives here as the host-facing companion. + "recipes/role-based-access-control.md" ] |> Enum.map(&Path.join(@guides_root, &1)) @@ -248,7 +253,7 @@ defmodule Sigra.GuidesDx02Test do assert missing == [], "Missing expected guide files: #{Enum.join(missing, ", ")}" - assert length(expected) == 17 + assert length(expected) == 18 end test "subdomain-auth.md mentions cookie_domain (10-03 -> 10-04 consistency)" do diff --git a/test/sigra/impersonation_audit_atomicity_test.exs b/test/sigra/impersonation_audit_atomicity_test.exs new file mode 100644 index 00000000..8ccb6753 --- /dev/null +++ b/test/sigra/impersonation_audit_atomicity_test.exs @@ -0,0 +1,408 @@ +defmodule Sigra.ImpersonationAuditAtomicityTest do + @moduledoc """ + Postgres integration coverage for impersonation session/audit co-fate. + + For the non-atomic fallback path that still uses `session.create` / `session.delete` + plus `log_safe`, see `Sigra.ImpersonationTest`. + """ + + use ExUnit.Case, async: false + + alias Sigra.Admin.Scope, as: AdminScope + alias Sigra.Config + alias Sigra.Impersonation + alias Sigra.Session + alias Sigra.Test.AuditEvent, as: AuditTestEvent + alias Sigra.Test.PostgresRepo + + defmodule TestUser do + defstruct [:id, :email, :organization_ids] + end + + defmodule TestScope do + defstruct [:user, :active_organization, :membership, :impersonating_from] + end + + defmodule ImpersonationSessionRecord do + @moduledoc false + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: true} + schema "impersonation_audit_sessions" do + field(:user_id, :binary_id) + field(:hashed_token, :binary) + field(:token, :binary, virtual: true) + field(:type, :string, default: "standard") + field(:ip, :string) + field(:user_agent, :string) + field(:geo_city, :string) + field(:geo_country_code, :string) + field(:active_organization_id, :binary_id) + field(:last_active_at, :utc_datetime_usec) + field(:sudo_at, :utc_datetime_usec) + timestamps(type: :utc_datetime_usec) + end + end + + defmodule LegacySessionStore do + @moduledoc false + + def create(user_id, metadata, _opts) do + send(self(), {:legacy_create, user_id, metadata}) + + {:ok, + %Sigra.Session{ + id: 4242, + user_id: user_id, + token: "impersonation-raw", + hashed_token: "impersonation-hash", + type: :standard, + inserted_at: DateTime.utc_now(), + last_active_at: DateTime.utc_now() + }} + end + + def delete(hashed_token, _opts) do + send(self(), {:legacy_delete, hashed_token}) + :ok + end + end + + setup do + start_supervised!({PostgresRepo, PostgresRepo.default_config()}) + repo = PostgresRepo + + Ecto.Adapters.SQL.query!(repo, "CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\"", []) + + for table <- ["impersonation_audit_sessions", "audit_events"] do + Ecto.Adapters.SQL.query!(repo, "DROP TABLE IF EXISTS #{table} CASCADE", []) + end + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE impersonation_audit_sessions ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + user_id uuid NOT NULL, + hashed_token bytea NOT NULL, + type varchar(32) NOT NULL DEFAULT 'standard', + ip varchar(64), + user_agent varchar(512), + geo_city varchar(255), + geo_country_code varchar(8), + active_organization_id uuid, + last_active_at timestamp, + sudo_at timestamp, + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE audit_events ( + id uuid PRIMARY KEY, + occurred_at timestamp NOT NULL DEFAULT now(), + action varchar(255) NOT NULL, + outcome varchar(32) NOT NULL DEFAULT 'success', + actor_id uuid, + actor_type varchar(64) NOT NULL DEFAULT 'user', + target_id uuid, + target_type varchar(64), + ip_address varchar(64), + user_agent varchar(512), + metadata jsonb NOT NULL DEFAULT '{}'::jsonb, + organization_id uuid, + effective_user_id uuid, + inserted_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!(repo, "TRUNCATE TABLE impersonation_audit_sessions CASCADE", []) + Ecto.Adapters.SQL.query!(repo, "TRUNCATE TABLE audit_events CASCADE", []) + + %{repo: repo} + end + + defp admin_scope(mode, admin_user, organization_id \\ nil) do + organization = + case organization_id do + nil -> nil + id -> %{id: id, slug: "org-#{id}", name: "Org #{id}"} + end + + %AdminScope{ + mode: mode, + scope: %TestScope{ + user: admin_user, + active_organization: nil, + membership: nil, + impersonating_from: nil + }, + organization: organization, + organization_id: organization_id, + organization_slug: organization && organization.slug, + platform_admin?: mode == :global, + admin_org_ids: if(organization_id, do: [organization_id], else: []) + } + end + + defp session(user_id, attrs) do + now = DateTime.utc_now() |> DateTime.truncate(:second) + + struct( + %Session{ + id: attrs[:id] || 1, + user_id: user_id, + token: attrs[:token], + hashed_token: attrs[:hashed_token] || "hashed-session-token", + type: attrs[:type] || :standard, + last_active_at: Map.get(attrs, :last_active_at, now), + inserted_at: Map.get(attrs, :inserted_at, now), + active_organization_id: Map.get(attrs, :active_organization_id), + sudo_at: Map.get(attrs, :sudo_at) + }, + Map.drop(attrs, [ + :id, + :token, + :hashed_token, + :type, + :last_active_at, + :inserted_at, + :active_organization_id, + :sudo_at + ]) + ) + end + + defp base_config(repo, store, audit_schema \\ AuditTestEvent) do + Config.new!( + repo: repo, + user_schema: TestUser, + scope_module: TestScope, + otp_app: :impersonation_audit_atomicity_test, + secret_key_base: String.duplicate("k", 64), + audit: [audit_schema: audit_schema], + session: [ + store: store, + session_schema: ImpersonationSessionRecord + ] + ) + end + + defp audit_count(repo, action) do + %{rows: [[count]]} = + Ecto.Adapters.SQL.query!(repo, "SELECT count(*)::bigint FROM audit_events WHERE action = $1", [ + action + ]) + + count + end + + defp session_count(repo) do + %{rows: [[count]]} = + Ecto.Adapters.SQL.query!(repo, "SELECT count(*)::bigint FROM impersonation_audit_sessions", []) + + count + end + + defp get_session_row(repo, hashed_token) do + %{rows: rows} = + Ecto.Adapters.SQL.query!( + repo, + "SELECT user_id, hashed_token, type FROM impersonation_audit_sessions WHERE hashed_token = $1", + [hashed_token] + ) + + rows + end + + test "default Ecto store co-fates impersonation start with its audit row", %{repo: repo} do + cfg = base_config(repo, Sigra.SessionStores.Ecto) + admin = %TestUser{id: Ecto.UUID.generate(), email: "admin@example.com"} + target = %TestUser{id: Ecto.UUID.generate(), email: "user@example.com"} + admin_session = session(admin.id, %{id: 11, hashed_token: "admin-hash"}) + + assert {:ok, %{session: result, restore: {:admin_session, "admin-token"}, mode: :impersonating}} = + Impersonation.start( + cfg, + admin_scope(:global, admin), + admin_session, + target, + admin_token: "admin-token" + ) + + assert result.user_id == target.id + assert is_binary(result.token) + assert session_count(repo) == 1 + assert audit_count(repo, "admin.impersonation.start") == 1 + assert audit_count(repo, "session.create") == 0 + assert length(get_session_row(repo, result.hashed_token)) == 1 + end + + test "audit-off parity still persists sessions without audit rows", %{repo: repo} do + cfg = base_config(repo, Sigra.SessionStores.Ecto, nil) + admin = %TestUser{id: Ecto.UUID.generate(), email: "admin@example.com"} + target = %TestUser{id: Ecto.UUID.generate(), email: "user@example.com"} + admin_session = session(admin.id, %{id: 12, hashed_token: "admin-hash"}) + + assert {:ok, %{session: session, mode: :impersonating}} = + Impersonation.start( + cfg, + admin_scope(:global, admin), + admin_session, + target, + admin_token: "admin-token" + ) + + assert session_count(repo) == 1 + assert audit_count(repo, "admin.impersonation.start") == 0 + + assert {:ok, %{restore: {:admin_session, "admin-token"}, session_deleted?: true}} = + Impersonation.stop( + cfg, + %TestScope{user: target, active_organization: nil, membership: nil, impersonating_from: admin}, + session, + admin_token: "admin-token" + ) + + assert session_count(repo) == 0 + assert audit_count(repo, "admin.impersonation.stop") == 0 + end + + test "default Ecto store co-fates impersonation stop with its audit row", %{repo: repo} do + cfg = base_config(repo, Sigra.SessionStores.Ecto) + admin = %TestUser{id: Ecto.UUID.generate(), email: "admin@example.com"} + target = %TestUser{id: Ecto.UUID.generate(), email: "user@example.com"} + + {:ok, _} = + repo.insert(%ImpersonationSessionRecord{ + user_id: target.id, + hashed_token: "impersonation-hash", + type: "standard" + }) + + impersonation_session = + session(target.id, %{ + id: 22, + hashed_token: "impersonation-hash", + impersonator_user_id: admin.id + }) + + assert {:ok, %{restore: {:admin_session, "admin-token"}, session_deleted?: true}} = + Impersonation.stop( + cfg, + %TestScope{user: target, active_organization: nil, membership: nil, impersonating_from: admin}, + impersonation_session, + admin_token: "admin-token" + ) + + assert session_count(repo) == 0 + assert audit_count(repo, "admin.impersonation.stop") == 1 + assert audit_count(repo, "session.delete") == 0 + end + + test "audit insert failure rolls back both start and stop on the Ecto path", %{repo: repo} do + cfg = base_config(repo, Sigra.SessionStores.Ecto) + admin = %TestUser{id: Ecto.UUID.generate(), email: "admin@example.com"} + target = %TestUser{id: Ecto.UUID.generate(), email: "user@example.com"} + admin_session = session(admin.id, %{id: 31, hashed_token: "admin-hash"}) + + Ecto.Adapters.SQL.query!( + repo, + """ + ALTER TABLE audit_events + ADD CONSTRAINT impersonation_audit_guard CHECK (action NOT IN ('admin.impersonation.start', 'admin.impersonation.stop')) + """, + [] + ) + + try do + assert {:error, :impersonation_aborted} = + Impersonation.start( + cfg, + admin_scope(:global, admin), + admin_session, + target, + admin_token: "admin-token" + ) + + assert session_count(repo) == 0 + assert audit_count(repo, "admin.impersonation.start") == 0 + + {:ok, _} = + repo.insert(%ImpersonationSessionRecord{ + user_id: target.id, + hashed_token: "impersonation-hash", + type: "standard" + }) + + impersonation_session = + session(target.id, %{ + id: 32, + hashed_token: "impersonation-hash", + impersonator_user_id: admin.id + }) + + assert {:error, :impersonation_aborted} = + Impersonation.stop( + cfg, + %TestScope{user: target, active_organization: nil, membership: nil, impersonating_from: admin}, + impersonation_session, + admin_token: "admin-token" + ) + + assert session_count(repo) == 1 + assert audit_count(repo, "admin.impersonation.stop") == 0 + after + Ecto.Adapters.SQL.query!( + repo, + "ALTER TABLE audit_events DROP CONSTRAINT IF EXISTS impersonation_audit_guard", + [] + ) + end + end + + test "fallback store keeps the legacy create/delete plus log_safe path", %{repo: repo} do + cfg = base_config(repo, LegacySessionStore) + admin = %TestUser{id: Ecto.UUID.generate(), email: "admin@example.com"} + target = %TestUser{id: Ecto.UUID.generate(), email: "user@example.com"} + target_id = target.id + admin_session = session(admin.id, %{id: 41, hashed_token: "admin-hash"}) + + assert {:ok, %{session: impersonation_session, mode: :impersonating}} = + Impersonation.start( + cfg, + admin_scope(:global, admin), + admin_session, + target, + admin_token: "admin-token" + ) + + assert_received {:legacy_create, ^target_id, metadata} + assert metadata.impersonator_user_id == admin.id + assert metadata.impersonator_session_id == admin_session.id + assert impersonation_session.user_id == target.id + assert impersonation_session.hashed_token == "impersonation-hash" + + assert {:ok, %{session_deleted?: true}} = + Impersonation.stop( + cfg, + %TestScope{user: target, active_organization: nil, membership: nil, impersonating_from: admin}, + impersonation_session, + admin_token: "admin-token" + ) + + assert_received {:legacy_delete, "impersonation-hash"} + + assert audit_count(repo, "session.create") == 1 + assert audit_count(repo, "admin.impersonation.start") == 1 + assert audit_count(repo, "session.delete") == 1 + assert audit_count(repo, "admin.impersonation.stop") == 1 + end +end diff --git a/test/sigra/impersonation_test.exs b/test/sigra/impersonation_test.exs index b0b6302a..d1b679db 100644 --- a/test/sigra/impersonation_test.exs +++ b/test/sigra/impersonation_test.exs @@ -96,12 +96,17 @@ defmodule Sigra.ImpersonationTest do }) Sigra.MockSessionStore - |> expect(:create, fn user_id, metadata, _opts -> + |> expect(:create_session_multi, fn user_id, metadata, _opts -> assert user_id == target.id assert metadata.type == :standard assert metadata.impersonator_user_id == admin.id assert metadata.impersonator_session_id == admin_session.id - {:ok, impersonation_session} + Ecto.Multi.new() |> Ecto.Multi.put(:session, impersonation_session) + end) + + Sigra.MockRepo + |> expect(:transaction, fn _multi -> + {:ok, %{session: impersonation_session}} end) assert {:ok, result} = @@ -131,10 +136,15 @@ defmodule Sigra.ImpersonationTest do }) Sigra.MockSessionStore - |> expect(:create, fn user_id, metadata, _opts -> + |> expect(:create_session_multi, fn user_id, metadata, _opts -> assert user_id == target.id assert metadata.impersonator_user_id == admin.id - {:ok, impersonation_session} + Ecto.Multi.new() |> Ecto.Multi.put(:session, impersonation_session) + end) + + Sigra.MockRepo + |> expect(:transaction, fn _multi -> + {:ok, %{session: impersonation_session}} end) assert {:ok, %{session: ^impersonation_session}} = @@ -207,7 +217,14 @@ defmodule Sigra.ImpersonationTest do } Sigra.MockSessionStore - |> expect(:delete, fn "impersonation-hash", _opts -> :ok end) + |> expect(:delete_session_multi, fn "impersonation-hash", _session, _opts -> + Ecto.Multi.new() |> Ecto.Multi.put(:session_deleted, true) + end) + + Sigra.MockRepo + |> expect(:transaction, fn _multi -> + {:ok, %{session_deleted: true}} + end) assert {:ok, %{restore: {:admin_session, "admin-raw-token"}, session_deleted?: true}} = Impersonation.stop( diff --git a/test/sigra/install/api_token_generator_test.exs b/test/sigra/install/api_token_generator_test.exs index 92f95256..7655122d 100644 --- a/test/sigra/install/api_token_generator_test.exs +++ b/test/sigra/install/api_token_generator_test.exs @@ -50,7 +50,7 @@ defmodule Sigra.Install.APITokenGeneratorTest do end end - describe "api_token_migration.exs template (Postgres)" do + describe "api_token_migration.exs template" do test "creates user_api_tokens table" do content = render_template("api_token_migration.exs") assert content =~ "create table(:user_api_tokens, primary_key: false)" @@ -103,26 +103,6 @@ defmodule Sigra.Install.APITokenGeneratorTest do end end - describe "api_token_migration.exs template (MySQL)" do - test "uses string type for scopes on MySQL" do - content = render_template("api_token_migration.exs", adapter: :mysql) - assert content =~ "add :scopes, :string" - refute content =~ "{:array, :string}" - end - - test "uses change function for MySQL" do - content = render_template("api_token_migration.exs", adapter: :mysql) - assert content =~ "def change do" - end - end - - describe "api_token_migration.exs template (SQLite)" do - test "uses string type for scopes on SQLite" do - content = render_template("api_token_migration.exs", adapter: :sqlite) - assert content =~ "add :scopes, :string" - refute content =~ "{:array, :string}" - end - end describe "user_api_token.ex template" do test "defines schema for user_api_tokens" do @@ -140,11 +120,6 @@ defmodule Sigra.Install.APITokenGeneratorTest do assert content =~ "field :scopes, {:array, :string}, default: []" end - test "uses StringList type for scopes on MySQL" do - content = render_template("user_api_token.ex", adapter: :mysql) - assert content =~ "field :scopes, Sigra.Ecto.Types.StringList" - end - test "belongs_to user" do content = render_template("user_api_token.ex") assert content =~ "belongs_to :user, MyApp.Auth.User" diff --git a/test/sigra/install/authz_template_test.exs b/test/sigra/install/authz_template_test.exs new file mode 100644 index 00000000..a49f5662 --- /dev/null +++ b/test/sigra/install/authz_template_test.exs @@ -0,0 +1,154 @@ +defmodule Sigra.Install.AuthzTemplateTest do + @moduledoc """ + Phase 92 / B2B-02 (Plan 92-02) regression guard for the generated + host-owned `Sigra.Authz` starter. + + Asserts that the install generator emits a host-owned authz module + alongside the admin-policy stub. The generated starter: + + * declares `@behaviour Sigra.Authz` + * implements `can?/3` returning `true` for every input (current + roadmap contract — Plan 92-04 walks hosts to deny-by-default) + * is registered in `Sigra.Install.Features.Core.files/1` so a fresh + `mix sigra.install` writes it under `lib//sigra_authz.ex` + * mirrors the existing admin-policy stub posture so reviewers see + the seam in the same place every time + """ + use ExUnit.Case, async: true + + @moduletag :install + + @template_path Path.expand( + "../../../priv/templates/sigra.install/core/sigra_authz.ex", + __DIR__ + ) + + describe "generated host authz template" do + test "exists on disk under priv/templates/sigra.install/core/sigra_authz.ex" do + assert File.exists?(@template_path), + """ + Phase 92 / Plan 92-02: the generator must emit a host-owned + Sigra.Authz starter. Expected template at: + + #{@template_path} + + The file should mirror priv/templates/sigra.install/admin/policy.ex + in posture: a tiny @behaviour-implementing stub the host owns. + """ + end + + test "declares @behaviour Sigra.Authz on the host-owned starter module" do + source = File.read!(@template_path) + + assert source =~ "@behaviour Sigra.Authz", + """ + The generated authz module must declare `@behaviour Sigra.Authz` + so static analysis (and the host's own readers) can see the + contract. Without this line the seam is invisible at the host. + """ + end + + test "implements can?/3 returning true for every input (current roadmap contract)" do + source = File.read!(@template_path) + + # The starter is allow-all so installed hosts compile and behave + # identically to today's "no library defaults" world. Plan 92-04 + # walks hosts to deny-by-default semantics; until then, returning + # `true` is the explicit contract documented in the moduledoc. + assert source =~ ~r/def can\?\(\s*[a-z_]+\s*,\s*[a-z_]+\s*,\s*[a-z_]+\s*\)\s*do/, + """ + The generated authz module must implement `can?/3` taking + three positional arguments (action, subject, scope). Got: + + #{Regex.run(~r/def can\?\([^)]*\)/, source) |> inspect()} + """ + + # The body must literally evaluate to `true` — not a guard, not a + # fall-through, not a delegation. The host can edit later but the + # generator-emitted starter is allow-all. + assert source =~ ~r/def can\?\([^)]*\)\s*do\s*[^\n]*\n(?:[^d][^e][^f]?[^\n]*\n)*?\s*true\s*\n\s*end/, + """ + The generated `can?/3` must return `true`. Plan 92-02 ships + the seam as allow-all so installed hosts behave identically + to today's no-defaults world; Plan 92-04 walks hosts to + deny-by-default. If you change the starter to deny-by-default + before Plan 92-04 lands, you break the migration guarantee. + """ + end + + test "documents that this is a host-owned starter the recipe will harden" do + source = File.read!(@template_path) + + # Mirrors the admin-policy stub's posture: the moduledoc tells the + # reader the file is theirs and points at the deny-by-default recipe. + assert source =~ "@moduledoc", + "generated authz module must have a moduledoc" + + assert source =~ "host" or source =~ "Host", + "moduledoc must call out that the file is host-owned" + + assert source =~ ~r/Phase 92|Plan 92-04|deny-by-default/, + """ + The moduledoc must reference the Phase 92 seam contract or the + Plan 92-04 deny-by-default recipe so the reader knows where + to go to harden the starter. + """ + end + end + + describe "feature registration" do + test "Sigra.Install.Features.Core.files/1 registers the authz template under lib//sigra_authz.ex" do + binding = [ + otp_app: :my_app, + web_module: "MyAppWeb", + app_module: "MyApp", + context_module: "MyApp.Accounts", + context_alias: "Accounts", + schema_module: "MyApp.Accounts.User", + schema_alias: "User", + repo_module: "MyApp.Repo", + binary_id: true, + opts: [live: true, api: false, jwt: false, mfa: true, oauth: true] + ] + + tuples = Sigra.Install.Features.Core.files(binding) + + sources = Enum.map(tuples, fn {:eex, src, _} -> src end) + targets = Enum.map(tuples, fn {:eex, _src, t} -> t end) + + assert "core/sigra_authz.ex" in sources, + """ + Sigra.Install.Features.Core.files/1 must register the + core/sigra_authz.ex template so a fresh `mix sigra.install` + emits the host-owned authz starter. Today the generator + emits no host authz file at all, leaving Plan 92-04 with + no anchor to walk hosts forward from. + + Sources returned: #{inspect(sources)} + """ + + assert "lib/my_app/sigra_authz.ex" in targets, + """ + The generated authz module must land at + `lib//sigra_authz.ex` so it sits beside + `lib//sigra_admin_policy.ex` (the matching stub). + Symmetrical placement makes the two host-owned policy + modules discoverable as a pair. + + Targets returned: #{inspect(targets)} + """ + end + + test "Features.Core source code references the sigra_authz template (grep anchor)" do + core_source = File.read!("lib/sigra/install/features/core.ex") + + assert core_source =~ "sigra_authz", + """ + The string "sigra_authz" must appear in + lib/sigra/install/features/core.ex so the plan's verify + grep anchors a stable reference. Without this the install + walker won't pick up the new template. + """ + end + end +end diff --git a/test/sigra/install/features/core_post_instructions_test.exs b/test/sigra/install/features/core_post_instructions_test.exs index b00d8864..98dcdc22 100644 --- a/test/sigra/install/features/core_post_instructions_test.exs +++ b/test/sigra/install/features/core_post_instructions_test.exs @@ -120,7 +120,11 @@ defmodule Sigra.Install.Features.CorePostInstructionsTest do assert out =~ "Oban not detected" assert out =~ "synchronous mode" - assert out =~ "To enable async delivery" + # Remediation copy tells the user how to enable async delivery: add the + # :oban dep and configure the sigra_mailer queue. Anchored on the + # remediation marker emitted by `optional_dependency_remediation(:async_email)`. + assert out =~ "Add {:oban" + assert out =~ "sigra_mailer queue" end test "neither config/config.exs nor config/runtime.exs present still emits Oban-absent warning" do diff --git a/test/sigra/install/features/core_test.exs b/test/sigra/install/features/core_test.exs index 57ac8ee2..bf6bc2d2 100644 --- a/test/sigra/install/features/core_test.exs +++ b/test/sigra/install/features/core_test.exs @@ -67,18 +67,19 @@ defmodule Sigra.Install.Features.CoreTest do end describe "migrations/1" do - test "returns exactly 4 slot entries in canonical order" do + test "returns exactly 5 slot entries in canonical order" do # Phase 24.1: :audit_events_org_columns moved to the Organizations # feature so its hard FK to the organizations table lands AFTER # that table is created, and is omitted entirely under # --no-organizations. slots = Core.migrations(@binding) - assert length(slots) == 4 + assert length(slots) == 5 assert [ {:primary, "core/migration.exs", _primary_basename}, {:active_org_column, "core/add_active_organization_id_to_user_sessions.exs", _active_org_basename}, + {:webhooks, "core/webhook_migration.exs", _webhook_basename}, {:api_token, "core/api_token_migration.exs", _api_basename}, {:audit_events, "core/create_audit_events.exs", _audit_basename} ] = slots @@ -188,17 +189,17 @@ defmodule Sigra.Install.Features.CoreTest do # allocator. assert "core/migration.exs" in sources assert "core/add_active_organization_id_to_user_sessions.exs" in sources + assert "core/webhook_migration.exs" in sources assert "core/create_audit_events.exs" in sources # api_token migration is only included with --api/--jwt refute "core/api_token_migration.exs" in sources end - test "default (live=true, api=false, jwt=false) returns exactly 38 files" do - # 28 base_files + 9 ui_files (live-mode) + 3 inlined migrations - # (primary + active_org_column + audit_events); api_token migration - # is --api-only; audit_events_org_columns moved to the Organizations - # feature in Phase 24.1 (was previously in Core's files/1). - assert length(Core.files(@binding)) == 38 + test "default (live=true, api=false, jwt=false) returns exactly 44 files" do + # Phase 92 / Plan 92-02 added core/sigra_authz.ex. + # Phase 97/98 added 4 webhook core templates plus 1 inlined + # webhook migration to the default installer surface. + assert length(Core.files(@binding)) == 44 end test "--no-live excludes LiveView UI templates and includes controller-mode UI" do @@ -218,9 +219,12 @@ defmodule Sigra.Install.Features.CoreTest do assert "core/mfa_settings_html.ex" in sources end - test "--no-live returns exactly 32 files" do + test "--no-live returns exactly 38 files" do + # Phase 92 / Plan 92-02 added core/sigra_authz.ex. + # Phase 97/98 added 4 webhook core templates plus 1 inlined + # webhook migration to the controller-mode installer surface. binding = Keyword.put(@binding, :opts, live: false, api: false, jwt: false) - assert length(Core.files(binding)) == 32 + assert length(Core.files(binding)) == 38 end test "falls back to the plaintext stub when encryption-requiring features are disabled" do diff --git a/test/sigra/install/features/coverage_test.exs b/test/sigra/install/features/coverage_test.exs index f88cc9ea..ae1252bf 100644 --- a/test/sigra/install/features/coverage_test.exs +++ b/test/sigra/install/features/coverage_test.exs @@ -203,6 +203,76 @@ defmodule Sigra.Install.Features.CoverageTest do end end + describe "Phase 92 Plan 92-02 ownership split (core/authz vs organizations/membership)" do + # Plan 92-02 introduces a deliberate split: `core/sigra_authz.ex` + # is owned by Features.Core (because Sigra.Authz is a core seam + + # scope-template contract) while membership-role storage stays + # owned by Features.Organizations. These tests freeze that split + # so a future refactor cannot silently relocate ownership. + + @core_binding [ + otp_app: :ownership_split_app, + web_module: "OwnershipSplitAppWeb", + app_module: "OwnershipSplitApp", + context_module: "OwnershipSplitApp.Accounts", + context_alias: "Accounts", + schema_module: "OwnershipSplitApp.Accounts.User", + schema_alias: "User", + repo_module: "OwnershipSplitApp.Repo", + binary_id: true, + opts: [live: true, api: false, jwt: false, mfa: true, oauth: true] + ] + + test "Features.Core owns core/sigra_authz.ex (Authz seam is core-scoped)" do + core_sources = + Sigra.Install.Features.Core.files(@core_binding) + |> Enum.map(fn {:eex, src, _} -> src end) + + assert "core/sigra_authz.ex" in core_sources, + """ + Plan 92-02 contract: Features.Core owns the host-owned + Sigra.Authz starter because the Authz behaviour is a core + seam + a scope-template contract (scope.ex carries the + :role / :actor_type fields the starter reads). Moving the + template to another feature would re-couple Authz to that + feature's optional flag. + """ + + org_sources = + Sigra.Install.Features.Organizations.files(@core_binding) + |> Enum.map(fn {:eex, src, _} -> src end) + + refute "core/sigra_authz.ex" in org_sources, + """ + Features.Organizations must NOT register core/sigra_authz.ex. + That file is owned by Features.Core (Plan 92-02 ownership + split): the Authz seam exists even with --no-organizations. + """ + end + + test "Features.Organizations owns organization_membership.ex (membership storage stays org-scoped)" do + org_sources = + Sigra.Install.Features.Organizations.files(@core_binding) + |> Enum.map(fn {:eex, src, _} -> src end) + + assert "organizations/organization_membership.ex" in org_sources, + "Features.Organizations must own the OrganizationMembership schema template" + + assert "organizations/migration.exs" in org_sources, + "Features.Organizations must own the organizations migration template" + + core_sources = + Sigra.Install.Features.Core.files(@core_binding) + |> Enum.map(fn {:eex, src, _} -> src end) + + refute "organizations/organization_membership.ex" in core_sources, + "Features.Core must NOT register organizations/organization_membership.ex" + + refute "organizations/migration.exs" in core_sources, + "Features.Core must NOT register organizations/migration.exs" + end + end + # Migrations return tuples like `{:organizations, "organizations/migration.exs", "create_organizations.exs"}` # where the middle element is already relative to @template_root. Normalize defensively. defp normalize(path), do: Path.relative_to(path, ".") diff --git a/test/sigra/install/features/organizations_test.exs b/test/sigra/install/features/organizations_test.exs index ddc50bff..d1a916f6 100644 --- a/test/sigra/install/features/organizations_test.exs +++ b/test/sigra/install/features/organizations_test.exs @@ -138,6 +138,38 @@ defmodule Sigra.Install.Features.OrganizationsTest do assert organizations_template =~ "use Sigra.Organizations" end + test "organizations.ex template passes explicit host-owned :roles / :owner_role / :invitation_admin_roles to use Sigra.Organizations (Phase 92 Plan 92-02)" do + # Phase 92 / B2B-02 (Plan 92-01) made these three options required + # on Sigra.Organizations.__config_schema__. Plan 92-02 closes the loop: + # the generated host wrapper must supply them explicitly so the + # privilege taxonomy is visible at `use Sigra.Organizations` and the + # NimbleOptions schema is satisfied without library-side defaults. + template = + File.read!("priv/templates/sigra.install/organizations/organizations.ex") + + assert template =~ ~r/roles:\s*\[/, + """ + The generated organizations.ex wrapper must pass an explicit + `roles: [...]` option to `use Sigra.Organizations`. Phase 92 + made :roles a required NimbleOptions key — without this line + the host app fails to compile with NimbleOptions.ValidationError. + """ + + assert template =~ ~r/owner_role:\s*:/, + """ + The generated organizations.ex wrapper must pass an explicit + `owner_role: :` option (Phase 92 required key). + """ + + assert template =~ ~r/invitation_admin_roles:\s*\[/, + """ + The generated organizations.ex wrapper must pass an explicit + `invitation_admin_roles: [...]` option (Phase 92 required key) + so invitation-admin privilege is visible at the host's + `use Sigra.Organizations` call site. + """ + end + test "organizations.ex template compiles against real Sigra.Organizations.__using__/1 (CR-01 regression)" do # This test renders the EEx template against a set of stub schema # modules and compiles the result end-to-end. It catches NimbleOptions @@ -541,6 +573,197 @@ defmodule Sigra.Install.Features.OrganizationsTest do end end + describe "generated membership schema role storage (Phase 92 Plan 92-02)" do + @membership_template_path "priv/templates/sigra.install/organizations/organization_membership.ex" + + test "schema role field is nullable (not Ecto.Enum, no required taxonomy)" do + template = File.read!(@membership_template_path) + + # The library no longer ships canonical role atoms (Phase 92-01). + # The generator must NOT hard-code Ecto.Enum [:owner, :admin, :member] + # because that re-introduces the very taxonomy Phase 92 removed. + refute template =~ ~r/Ecto\.Enum,\s*values:\s*\[:owner,\s*:admin,\s*:member\]/, + """ + organization_membership.ex template must NOT hard-code + `Ecto.Enum, values: [:owner, :admin, :member]` for the role + field. Phase 92 / B2B-02 makes role storage host-owned and + nullable; hosts pick their own taxonomy via + `use Sigra.Organizations` config. + """ + + # Phase 92 CR-02 fix: the field uses Sigra.Ecto.Types.RoleAtom so + # role values round-trip as atoms in Elixir code while the DB + # column stays a plain string the host can edit. + assert template =~ ~r/field :role,\s*Sigra\.Ecto\.Types\.RoleAtom/, + """ + organization_membership.ex must declare + `field :role, Sigra.Ecto.Types.RoleAtom`. Phase 92 CR-02 + fix: a plain `:string` field silently breaks atom + comparisons in `Sigra.Plug.RequireMembership` and the + last-owner guard — the custom type round-trips atoms. + """ + end + + test "changeset does not enforce :role as required (host-owned nullability)" do + template = File.read!(@membership_template_path) + + # The Plan 92-01 contract: the host owns role taxonomy AND + # nullability. The generated changeset must allow nil role values + # so a host can rely on its own validation rules without fighting + # the generator-emitted starter. + refute template =~ ~r/validate_required\(\[:role,/, + """ + organization_membership.ex generated changeset must NOT + list :role in `validate_required` — Plan 92-02 makes role + nullable so hosts that prefer late role assignment (e.g. + accept-invite-then-pick-role) work out of the box. + """ + + refute template =~ ~r/validate_required\(\[[^\]]*:role\b/, + """ + organization_membership.ex generated changeset must NOT + include :role in any validate_required call — Plan 92-02 + keeps :role nullable. + """ + end + end + + describe "generated invitation schema role storage (Phase 92 CR-02 fix)" do + @invitation_template_path "priv/templates/sigra.install/organizations/organization_invitation.ex" + + test "invitation schema role field uses Sigra.Ecto.Types.RoleAtom (no Ecto.Enum literal taxonomy)" do + template = File.read!(@invitation_template_path) + + # CR-02 from Phase 92 code review: the invitation schema previously + # still hardcoded `Ecto.Enum, values: [:owner, :admin, :member]` + # while membership had been migrated. A host with a custom taxonomy + # would see Invitations.create/2 raise Ecto.ChangeError because + # their role atom is not in the literal Enum values list. + refute template =~ ~r/Ecto\.Enum,\s*values:\s*\[:owner,\s*:admin,\s*:member\]/, + """ + organization_invitation.ex template must NOT hard-code + `Ecto.Enum, values: [:owner, :admin, :member]`. CR-02 from + the Phase 92 code review: this taxonomy literal breaks + custom-taxonomy hosts at invitation creation time. Use + Sigra.Ecto.Types.RoleAtom for the symmetric host-owned shape. + """ + + assert template =~ ~r/field :role,\s*Sigra\.Ecto\.Types\.RoleAtom/, + """ + organization_invitation.ex must declare + `field :role, Sigra.Ecto.Types.RoleAtom` so it parallels + the membership schema and round-trips role atoms cleanly. + """ + end + end + + describe "generated migration role column storage (Phase 92 Plan 92-02)" do + @migration_template_path "priv/templates/sigra.install/organizations/migration.exs" + + test "organization_invitations.role column is nullable with no default (CR-03 fix)" do + template = File.read!(@migration_template_path) + + # CR-03 from Phase 92 code review: both adapter branches still + # emitted `add :role, :string, null: false, default: "member"` for + # invitations — the inverse of what Plan 92-02 did for memberships. + # The default "member" string baked the very taxonomy Phase 92 deletes + # into every fresh host install. + invitation_blocks = extract_create_table_blocks(template, "organization_invitations") + + assert length(invitation_blocks) >= 1, + "expected at least one organization_invitations create-table block in migration template" + + Enum.each(invitation_blocks, fn block -> + refute block =~ ~r/add :role, :string, null: false, default: "member"/, + """ + organization_invitations block emits the pre-CR-03 role + column shape: + + #{Regex.run(~r/add :role[^\n]+/, block) |> inspect()} + + CR-03 contract: invitations role column must mirror the + memberships shape — nullable string, no default. Drop both + `null: false` and `default: "member"`. + """ + + refute block =~ ~r/add :role, :string, null: false/, + """ + organization_invitations role column must be nullable + (CR-03). Drop `null: false`. + """ + + refute block =~ ~r/add :role[^\n]*default:\s*"member"/, + """ + organization_invitations role column must drop + `default: "member"` (CR-03) — the canonical taxonomy + default cannot ship in a host-owned-taxonomy world. + """ + + assert block =~ ~r/add :role/, + "organization_invitations block must still declare an `add :role` column" + end) + end + + test "organization_memberships.role column is nullable with no default" do + template = File.read!(@migration_template_path) + + # Plan 92-02 scope is the MEMBERSHIPS table only. Slice the template + # into per-table blocks by searching from each `create table(:X ...)` + # line down to the next `create table(...)` or the closing `end` of + # the `def up do` block. We need to test the memberships block in + # isolation because the invitations table also has a `role` column + # (with its own `default: "member"`) that is out of this plan's scope. + memberships_blocks = extract_create_table_blocks(template, "organization_memberships") + + assert length(memberships_blocks) >= 1, + "expected at least one organization_memberships create-table block in migration template" + + # Across both postgres and mysql/sqlite adapter branches, the + # memberships role column MUST be nullable AND MUST NOT carry a + # `default: "member"` (canonical-taxonomy default removed by 92-01). + Enum.each(memberships_blocks, fn block -> + refute block =~ ~r/add :role, :string, null: false, default: "member"/, + """ + organization_memberships block emits the pre-Plan-92-02 + role column shape: + + #{Regex.run(~r/add :role[^\n]+/, block) |> inspect()} + + Plan 92-02 contract: role is nullable AND host-owned. + Drop both `null: false` and `default: "member"`. + """ + + refute block =~ ~r/add :role, :string, null: false/, + """ + organization_memberships role column must be nullable + (Plan 92-02). Drop `null: false`. + """ + + refute block =~ ~r/add :role[^\n]*default:\s*"member"/, + """ + organization_memberships role column must drop + `default: "member"` — Plan 92-02 removes the canonical + role-taxonomy default. + """ + + # Sanity floor: the role column must still exist in the schema — + # only its constraints change, not the column itself. + assert block =~ ~r/add :role/, + "organization_memberships block must still declare an `add :role` column" + end) + end + end + + # Slice helper for the migration-template tests above. Returns the + # `create table(:NAME ...) do ... end` block(s) for `name`. Matches both + # postgres and mysql/sqlite branches in the template. + defp extract_create_table_blocks(template, name) do + pattern = ~r/create table\(:#{Regex.escape(name)}[^)]*\)\s+do(.*?)\n end/s + + Regex.scan(pattern, template, capture: :all_but_first) + |> List.flatten() + end + describe "injections/1" do # The injection templates contain EEx tags that reference :web_module # and :app_module from the binding (Phase 24 fix — the builders now @@ -549,25 +772,26 @@ defmodule Sigra.Install.Features.OrganizationsTest do @injections_binding [ otp_app: :my_app, web_module: "MyAppWeb", - app_module: "MyApp" + app_module: "MyApp", + context_module: "MyApp.Accounts", + opts: [] ] - test "returns list with router injection only (Phase 24.1: user_auth baked into template)" do + test "returns layouts and router injections for organizations" do injections = Organizations.injections(@injections_binding) assert is_list(injections) - # Phase 24.1: the :assign_user_organizations on_mount clause was - # moved from an injection fragment into core/user_auth.ex directly - # (gated on `<%= if organizations? do %>`). Only the router - # injection remains. - assert length(injections) == 1 Enum.each(injections, fn injection -> assert %Sigra.Install.Injection{} = injection end) targets = Enum.map(injections, & &1.target) - assert Enum.any?(targets, &String.ends_with?(&1, "router.ex")) + assert Enum.frequencies(targets) == %{ + Path.join(["lib", "my_app_web", "components", "layouts.ex"]) => 3, + Path.join(["lib", "my_app_web", "router.ex"]) => 1 + } + refute Enum.any?(targets, &String.ends_with?(&1, "user_auth.ex")) end @@ -582,6 +806,9 @@ defmodule Sigra.Install.Features.OrganizationsTest do assert router_injection.content =~ ~s|post "/organizations/switch"| assert router_injection.content =~ ~s|scope "/organizations/:org"| assert router_injection.content =~ "Sigra.Plug.LoadOrganizationFromSlug" + assert router_injection.content =~ "organizations: MyApp.Organizations" + assert router_injection.content =~ "scope_module: MyApp.Accounts.Scope" + assert router_injection.content =~ "Sigra.LiveView.OrganizationScope" end end @@ -1049,9 +1276,8 @@ defmodule Sigra.Install.Features.OrganizationsTest do hashed_token_matches = Regex.scan(~r/unique_index\(:organization_invitations, \[:hashed_token\]\)/, template) - # One occurrence per adapter branch (postgres + mysql/sqlite). - assert length(hashed_token_matches) == 2, - "Expected 2 `unique_index(:organization_invitations, [:hashed_token])` occurrences (one per adapter branch), got #{length(hashed_token_matches)}" + assert length(hashed_token_matches) == 1, + "Expected 1 `unique_index(:organization_invitations, [:hashed_token])` occurrence, got #{length(hashed_token_matches)}" end test "Phase 16 D-03 pending-invitation partial index (IS NULL predicate) is preserved" do diff --git a/test/sigra/install/generator_email_test.exs b/test/sigra/install/generator_email_test.exs index 22e91514..d9745d80 100644 --- a/test/sigra/install/generator_email_test.exs +++ b/test/sigra/install/generator_email_test.exs @@ -79,7 +79,7 @@ defmodule Sigra.Install.GeneratorEmailTest do test "includes CTA button color from UI-SPEC" do content = render_template("emails.ex") - assert content =~ "#2563eb" + assert content =~ "#1d4ed8" end test "includes background color from UI-SPEC" do diff --git a/test/sigra/install/generator_mfa_test.exs b/test/sigra/install/generator_mfa_test.exs index 1c19935c..82824f2c 100644 --- a/test/sigra/install/generator_mfa_test.exs +++ b/test/sigra/install/generator_mfa_test.exs @@ -23,7 +23,11 @@ defmodule Sigra.Install.GeneratorMFATest do # "organizations?"`. organizations?: true, # Core templates gate passkey-related branches on `passkeys?` (EEx assigns). - passkeys?: true + passkeys?: true, + # Phase 93 Plan 03: auth.ex template gates the OptionalDeps preflight on + # `<%= if api || jwt do %>` (changed from Keyword.get(opts, :api/:jwt)). + api: false, + jwt: false ] describe "MFA template files exist" do diff --git a/test/sigra/install/generator_wiring_test.exs b/test/sigra/install/generator_wiring_test.exs index ef1e12bf..7990d04b 100644 --- a/test/sigra/install/generator_wiring_test.exs +++ b/test/sigra/install/generator_wiring_test.exs @@ -18,7 +18,9 @@ defmodule Sigra.Install.GeneratorWiringTest do binary_id: false, adapter: :postgres, organizations?: true, - passkeys?: true + passkeys?: true, + api: false, + jwt: false ] describe "generator file list includes Phase 3 templates" do @@ -215,10 +217,189 @@ defmodule Sigra.Install.GeneratorWiringTest do end end + describe "webhook wiring" do + test "generated accounts config includes webhook schemas and queue defaults" do + content = render_fixture("lib/sigra_install_golden_tmp/accounts.ex") + + assert content =~ "webhooks: [" + assert content =~ "endpoint_policy: &__MODULE__.webhook_endpoint_policy/1" + assert content =~ "webhook_subscription_schema: WebhookSubscription" + assert content =~ "webhook_event_schema: WebhookEvent" + assert content =~ "webhook_delivery_schema: WebhookDelivery" + assert content =~ "webhook_delivery_attempt_schema: WebhookDeliveryAttempt" + assert content =~ ~s(oban_queue: "sigra_webhooks") + assert content =~ "signature_tolerance: 300" + assert content =~ "def webhook_endpoint_policy(_context), do: :ok" + assert content =~ "def webhook_event_types do" + assert content =~ "Sigra.Webhooks.public_event_types()" + assert content =~ "def list_webhook_subscriptions do" + assert content =~ "def create_webhook_subscription(attrs) do" + assert content =~ "def update_webhook_subscription(subscription, attrs) do" + assert content =~ "def enable_webhook_subscription(subscription) do" + assert content =~ "def disable_webhook_subscription(subscription) do" + assert content =~ "def list_admin_webhook_subscriptions(admin_scope, params \\\\ %{}) do" + assert content =~ "def get_admin_webhook_subscription!(admin_scope, subscription_id) do" + assert content =~ "def list_admin_webhook_failures(admin_scope, params \\\\ %{}) do" + assert content =~ "def get_admin_webhook_delivery!(admin_scope, delivery_id) do" + + assert content =~ + "def replay_admin_webhook_delivery(admin_scope, delivery_id, opts \\\\ []) do" + + assert content =~ "def create_admin_webhook_subscription(admin_scope, attrs) do" + + assert content =~ + "def update_admin_webhook_subscription(admin_scope, subscription_id, attrs) do" + + assert content =~ "def enable_admin_webhook_subscription(admin_scope, subscription_id) do" + assert content =~ "def disable_admin_webhook_subscription(admin_scope, subscription_id) do" + assert content =~ "def reveal_admin_webhook_secret(admin_scope, subscription_id) do" + assert content =~ "def rotate_admin_webhook_secret(admin_scope, subscription_id) do" + assert content =~ "def prepare_admin_webhook_secret(admin_scope, subscription_id) do" + + assert content =~ + "def discard_prepared_admin_webhook_secret(admin_scope, subscription_id) do" + + assert content =~ + "def start_admin_webhook_secret_overlap(admin_scope, subscription_id, opts \\\\ []) do" + + assert content =~ + "def complete_admin_webhook_secret_rotation(admin_scope, subscription_id) do" + + assert content =~ + "Sigra.Admin.Webhooks.Actions.replay_delivery(sigra_config(), admin_scope, delivery_id, opts)" + end + + test "example, template, and golden auth surfaces expose the same replay wrapper" do + example = + File.read!(Path.join([File.cwd!(), "test", "example", "lib", "example", "accounts.ex"])) + + template = File.read!(Path.join(@template_dir, "auth.ex")) + golden = render_fixture("lib/sigra_install_golden_tmp/accounts.ex") + + for content <- [example, template, golden] do + assert content =~ + "def replay_admin_webhook_delivery(admin_scope, delivery_id, opts \\\\ []) do" + + assert content =~ + "Sigra.Admin.Webhooks.Actions.replay_delivery(sigra_config(), admin_scope, delivery_id, opts)" + end + end + + test "generated migration and schemas for webhook tables exist" do + migration = render_fixture("priv/repo/migrations/TIMESTAMP_create_webhook_tables.exs") + + subscription = + render_fixture("lib/sigra_install_golden_tmp/accounts/webhook_subscription.ex") + + event = render_fixture("lib/sigra_install_golden_tmp/accounts/webhook_event.ex") + delivery = render_fixture("lib/sigra_install_golden_tmp/accounts/webhook_delivery.ex") + + attempt = + render_fixture("lib/sigra_install_golden_tmp/accounts/webhook_delivery_attempt.ex") + + assert migration =~ "create table(:webhook_subscriptions" + assert migration =~ "create table(:webhook_events" + assert migration =~ "create table(:webhook_deliveries" + assert migration =~ "create table(:webhook_delivery_attempts" + assert migration =~ ~r/add\s+:signing_secret,\s+:binary,\s+null:\s+false/ + assert migration =~ ~r/add\s+:next_signing_secret,\s+:binary/ + assert migration =~ ~r/add\s+:rotation_state,\s+:string,\s+null:\s+false,\s+default:\s+"stable"/ + assert migration =~ ~r/add\s+:rotation_prepared_at,\s+:utc_datetime_usec/ + assert migration =~ ~r/add\s+:rotation_overlap_started_at,\s+:utc_datetime_usec/ + assert migration =~ ~r/add\s+:rotation_retire_after_at,\s+:utc_datetime_usec/ + assert migration =~ ~r/add\s+:rotation_completed_at,\s+:utc_datetime_usec/ + assert migration =~ ~r/add\s+:rotation_last_changed_by_user_id,\s+:binary_id/ + assert migration =~ ~r/add\s+:signing_secret_fingerprint,\s+:string/ + assert migration =~ ~r/add\s+:next_signing_secret_fingerprint,\s+:string/ + assert migration =~ ~r/add\s+:event_id,\s+:string,\s+null:\s+false/ + assert migration =~ ~r/add\s+:delivery_id,\s+:string,\s+null:\s+false/ + assert migration =~ ~r/add\s+:attempt_count,\s+:integer,\s+null:\s+false,\s+default:\s+0/ + assert migration =~ ~r/add\s+:replayed_from_webhook_delivery_id/ + assert migration =~ ~r/add\s+:replay_root_webhook_delivery_id/ + assert migration =~ ~r/add\s+:replayed_at,\s+:utc_datetime_usec/ + assert migration =~ ~r/add\s+:replayed_by_user_id,\s+:binary_id/ + assert migration =~ ~r/add\s+:replay_source,\s+:string/ + assert migration =~ "unique_index(:webhook_deliveries, [:replayed_from_webhook_delivery_id]" + assert migration =~ "index(:webhook_deliveries, [:replay_root_webhook_delivery_id])" + assert migration =~ ~r/add\s+:retry_after_seconds,\s+:integer/ + + assert subscription =~ "schema \"webhook_subscriptions\"" + assert subscription =~ "field :event_types, {:array, :string}, default: []" + assert subscription =~ "field :signing_secret" + assert subscription =~ "field :next_signing_secret" + assert subscription =~ "field :rotation_state, Ecto.Enum" + assert subscription =~ "values: [:stable, :prepared, :overlap_active, :completed]" + assert subscription =~ "field :rotation_prepared_at, :utc_datetime_usec" + assert subscription =~ "field :rotation_overlap_started_at, :utc_datetime_usec" + assert subscription =~ "field :rotation_retire_after_at, :utc_datetime_usec" + assert subscription =~ "field :rotation_completed_at, :utc_datetime_usec" + assert subscription =~ "field :rotation_last_changed_by_user_id, :binary_id" + assert subscription =~ "field :signing_secret_fingerprint, :string" + assert subscription =~ "field :next_signing_secret_fingerprint, :string" + assert event =~ "schema \"webhook_events\"" + assert event =~ "field :event_id, :string" + assert delivery =~ "schema \"webhook_deliveries\"" + assert delivery =~ "field :delivery_id, :string" + assert delivery =~ "field :replayed_from_webhook_delivery_id, :binary_id" + assert delivery =~ "field :replay_root_webhook_delivery_id, :binary_id" + assert delivery =~ "field :replayed_at, :utc_datetime_usec" + assert delivery =~ "field :replayed_by_user_id, :binary_id" + assert delivery =~ "field :replay_source, :string" + assert delivery =~ "has_many :attempts" + assert delivery =~ "field :terminal_reason, :string" + assert attempt =~ "schema \"webhook_delivery_attempts\"" + assert attempt =~ "field :attempt_number, :integer" + assert attempt =~ "field :retryable, :boolean, default: false" + assert attempt =~ "field :terminal_reason, :string" + end + + test "generated admin router and shell expose webhook routes and navigation" do + router = render_fixture("lib/sigra_install_golden_tmp_web/router.ex") + shell = render_fixture("lib/sigra_install_golden_tmp_web/components/admin_shell.ex") + + assert router =~ + ~s(live "/admin/webhooks", Elixir.Sigra.Admin.Live.WebhookSubscriptionsIndexLive, :index) + + assert router =~ + ~s(live "/admin/webhooks/failures", Elixir.Sigra.Admin.Live.WebhookDeliveryFailuresLive, :index) + + assert router =~ ~s(live "/admin/webhooks/subscriptions/:id") + assert router =~ "Elixir.Sigra.Admin.Live.WebhookSubscriptionShowLive" + assert router =~ ":show" + + assert router =~ ~s(live "/admin/webhooks/deliveries/:id") + assert router =~ "Elixir.Sigra.Admin.Live.WebhookDeliveryShowLive" + + assert shell =~ "Webhooks" + assert shell =~ "Failures" + assert shell =~ ~s(href={~p"/admin/webhooks"}) + assert shell =~ ~s(href={~p"/admin/webhooks/failures"}) + end + + test "admin feature emits a webhook receiver setup document" do + admin_feature = + File.read!(Path.join([File.cwd!(), "lib", "sigra", "install", "features", "admin.ex"])) + + doc = render_fixture("docs/webhook_receiver_setup.md") + + assert admin_feature =~ "Path.join([\"docs\", \"webhook_receiver_setup.md\"])" + assert doc =~ "raw request body" + assert doc =~ "body_reader" + assert doc =~ "delivery_id" + assert doc =~ "SIGRA_WEBHOOK_SECRET_CURRENT" + assert doc =~ "prepare the next secret" + end + end + # -- Helpers -- defp render_template(name) do path = Path.join(@template_dir, name) EEx.eval_file(path, @base_binding) end + + defp render_fixture(relative_path) do + Path.join([File.cwd!(), "test", "fixtures", "install_golden", "tree", relative_path]) + |> File.read!() + end end diff --git a/test/sigra/install/idempotency_test.exs b/test/sigra/install/idempotency_test.exs index 7875da48..9f4f6e50 100644 --- a/test/sigra/install/idempotency_test.exs +++ b/test/sigra/install/idempotency_test.exs @@ -97,6 +97,88 @@ defmodule Sigra.Install.IdempotencyTest do "first sigra.install run did not report any file creation — harness is broken" end + test "Phase 92 Plan 92-02 ownership split: sigra_authz.ex + organizations/* land on first run AND survive second run", %{ + app_dir: app_dir, + first_stdout: first_stdout + } do + # Plan 92-02 introduces a core/organizations ownership split that + # MUST be both visible at install time AND idempotent on re-run. + # First-run sanity: + authz_path = Path.join([app_dir, "lib", Path.basename(app_dir), "sigra_authz.ex"]) + + assert File.exists?(authz_path), + """ + Plan 92-02: a fresh `mix sigra.install` must emit + `lib//sigra_authz.ex`. Got app_dir = #{inspect(app_dir)}. + First-run stdout snippet: + + #{first_stdout |> String.split("\n") |> Enum.take(40) |> Enum.join("\n")} + """ + + # The host-owned authz starter must declare @behaviour Sigra.Authz + # so a second `sigra.install` run treats it as already-installed. + authz_source = File.read!(authz_path) + + assert authz_source =~ "@behaviour Sigra.Authz", + "host authz starter must declare @behaviour Sigra.Authz on first install" + + # Membership schema lands under the host accounts directory and is + # nullable / non-opinionated by Plan 92-02. + membership_path = + Path.join([ + app_dir, + "lib", + Path.basename(app_dir), + "accounts", + "organization_membership.ex" + ]) + + assert File.exists?(membership_path), + "Plan 92-02: organization_membership.ex must land under the host accounts dir" + + membership_source = File.read!(membership_path) + + refute membership_source =~ ~r/Ecto\.Enum,\s*values:\s*\[:owner,\s*:admin,\s*:member\]/, + """ + Generated organization_membership.ex must NOT carry the + pre-Plan-92-02 hard-coded `Ecto.Enum, values: [:owner, :admin, :member]` + shape — Plan 92-02 makes role nullable + host-owned. + """ + + # Run a SECOND install and confirm both files are byte-identical + # (the GEN-04 idempotency contract still holds for the new core/ + # organizations ownership split). The other idempotency test in + # this file already proves global byte-identity; here we narrow + # the assertion to the Plan-92-02 surface so a future ownership + # regression has a precise failure signal. + snapshot_before = sha256(authz_path) <> sha256(membership_path) + + {second_out, status} = + System.cmd( + "mix", + ["sigra.install", "Accounts", "User", "users", "--yes"], + cd: app_dir, + stderr_to_stdout: true, + env: [{"MIX_ENV", "dev"}] + ) + + assert status == 0, "second sigra.install failed:\n#{second_out}" + + snapshot_after = sha256(authz_path) <> sha256(membership_path) + + assert snapshot_before == snapshot_after, + """ + Plan 92-02 ownership-split files changed between install runs. + Either sigra_authz.ex or organization_membership.ex was + rewritten on the second run. GEN-04 idempotency must hold for + the new split. + """ + end + + defp sha256(path) do + :crypto.hash(:sha256, File.read!(path)) |> Base.encode16(case: :lower) + end + # -- helpers -------------------------------------------------------------- defp hash_snapshot(app_dir) do diff --git a/test/sigra/install/isolation_test.exs b/test/sigra/install/isolation_test.exs index f8f28c60..424acb49 100644 --- a/test/sigra/install/isolation_test.exs +++ b/test/sigra/install/isolation_test.exs @@ -83,9 +83,15 @@ defmodule Sigra.Install.IsolationTest do end) end - test "contains exactly 49 templates" do + test "contains exactly 56 templates" do + # Phase 92 / Plan 92-02: +1 (core/sigra_authz.ex), the host-owned + # `Sigra.Authz` starter wired into Features.Core.files/1. + # Phase 93 / Plan 93-03: +1 (core/oauth_token_controller.ex), + # the RFC 6749 §4.4 client_credentials grant controller, gated + # on `--jwt --organizations` (D-93-18). + # Phase 97/98: +5 webhook templates/migration in core. files = File.ls!("priv/templates/sigra.install/core") - assert length(files) == 49 + assert length(files) == 56 end end diff --git a/test/sigra/install/oauth_smoketest_task_test.exs b/test/sigra/install/oauth_smoketest_task_test.exs new file mode 100644 index 00000000..7d697954 --- /dev/null +++ b/test/sigra/install/oauth_smoketest_task_test.exs @@ -0,0 +1,160 @@ +defmodule Sigra.Install.OAuthSmoketestTaskTest do + use ExUnit.Case, async: false + + import ExUnit.CaptureIO + + defmodule TaskImplStub do + def run(opts) do + send(self(), {:task_impl_opts, opts}) + + case Process.get(:task_impl_result, :ok) do + fun when is_function(fun, 1) -> fun.(opts) + other -> other + end + end + end + + describe "mix sigra.oauth.smoketest" do + setup do + original = Application.get_env(:sigra, :oauth_smoketest_impl) + Application.put_env(:sigra, :oauth_smoketest_impl, TaskImplStub) + + on_exit(fn -> + if original do + Application.put_env(:sigra, :oauth_smoketest_impl, original) + else + Application.delete_env(:sigra, :oauth_smoketest_impl) + end + + Process.delete(:task_impl_result) + end) + + :ok + end + + test "passes validated options to the runtime" do + capture_io(fn -> + Mix.Tasks.Sigra.Oauth.Smoketest.run([ + "--provider=google", + "--port=4100", + "--config=Example.Accounts.sigra_config/0" + ]) + end) + + assert_received {:task_impl_opts, + [provider: "google", port: 4100, config: "Example.Accounts.sigra_config/0"]} + end + + test "prints success when the runtime returns :ok" do + output = + capture_io(fn -> + Mix.Tasks.Sigra.Oauth.Smoketest.run(["--provider=google"]) + end) + + assert output =~ "OK — round-trip succeeded." + end + + test "exits with the runtime error code" do + Process.put(:task_impl_result, {:error, 2, "missing config"}) + + assert catch_exit( + capture_io(:stderr, fn -> + Mix.Tasks.Sigra.Oauth.Smoketest.run(["--provider=google"]) + end) + ) == {:shutdown, 2} + end + + test "requires --provider" do + assert_raise NimbleOptions.ValidationError, fn -> + Mix.Tasks.Sigra.Oauth.Smoketest.run([]) + end + end + end + + describe "Sigra.OAuth.Smoketest.run/1" do + test "returns success after a simulated callback and exchanges on loopback" do + parent = self() + + result = + capture_io(fn -> + send( + parent, + {:runtime_result, + Sigra.OAuth.Smoketest.run( + provider: "google", + port: 4101, + load_config_fun: fn _opts -> + {:ok, + %{ + secret_key_base: String.duplicate("a", 64), + oauth: [ + providers: [ + google: [client_id: "cid", client_secret: "secret"] + ] + ] + }} + end, + start_server_fun: fn server_opts -> + send(parent, {:server_opts, server_opts}) + {:ok, self()} + end, + stop_server_fun: fn _server -> :ok end, + authorize_url_fun: fn _provider_config -> + {:ok, + %{ + url: "https://accounts.example.test/auth?state=placeholder", + session_params: %{code_verifier: "verifier"} + }} + end, + receive_callback_fun: fn _timeout_ms -> + receive do + {:authorize_state, state} -> {:ok, %{"state" => state, "code" => "auth-code"}} + end + end, + callback_fun: fn _provider_config, _params, _session -> + {:ok, %{"sub" => "google-sub", "email" => "jon@example.test"}, + %{ + "id_token" => + "eyJhbGciOiJub25lIn0." <> + Base.url_encode64( + Jason.encode!(%{ + "sub" => "google-sub", + "email" => "jon@example.test" + }), + padding: false + ) <> ".sig" + }} + end, + print_fun: fn line -> + if String.contains?(line, "https://accounts.example.test/auth") do + [_, state] = Regex.run(~r/state=([^&]+)/, line) + send(parent, {:authorize_state, state}) + end + + send(parent, {:print, line}) + end + )} + ) + end) + + assert is_binary(result) + assert_received {:runtime_result, :ok} + + assert_received {:server_opts, + [ip: {127, 0, 0, 1}, port: 4101, owner: _, callback_path: "/callback"]} + + assert_received {:print, + "OK — got back valid id_token with sub=google-sub and email=jon@example.test"} + end + + test "returns config error when the provider is missing" do + assert {:error, 2, "provider google is not configured under :sigra oauth.providers"} = + Sigra.OAuth.Smoketest.run( + provider: "google", + load_config_fun: fn _opts -> + {:ok, %{secret_key_base: String.duplicate("a", 64), oauth: []}} + end + ) + end + end +end diff --git a/test/sigra/install/scope_template_fields_test.exs b/test/sigra/install/scope_template_fields_test.exs index b1716466..023a0f5a 100644 --- a/test/sigra/install/scope_template_fields_test.exs +++ b/test/sigra/install/scope_template_fields_test.exs @@ -12,7 +12,7 @@ defmodule Sigra.Install.ScopeTemplateFieldsTest do ) describe "scope.ex template fields" do - test "defstruct includes all 4 fields defaulting to nil" do + test "defstruct includes all 6 fields defaulting to nil" do source = File.read!(@scope_path) assert source =~ ~r/active_organization:\s*nil/, @@ -23,9 +23,19 @@ defmodule Sigra.Install.ScopeTemplateFieldsTest do assert source =~ ~r/impersonating_from:\s*nil/, "scope.ex must contain impersonating_from: nil in defstruct" + + # Phase 92 / B2B-02 (Plan 92-02): scope reserves :role and :actor_type + # as additive RBAC fields. :role is the active membership's host-defined + # role atom. :actor_type is reserved Phase 93 prep ONLY — it MUST + # remain nil-only in Phase 92 with no behavior attached anywhere. + assert source =~ ~r/role:\s*nil/, + "scope.ex must contain role: nil in defstruct (Phase 92 Plan 92-02)" + + assert source =~ ~r/actor_type:\s*nil/, + "scope.ex must contain actor_type: nil in defstruct (Phase 92 Plan 92-02 — Phase 93 prep)" end - test "@type t includes all 4 fields" do + test "@type t includes all 6 fields" do source = File.read!(@scope_path) assert source =~ "active_organization: %<%= context_module %>.Organization{} | nil", @@ -36,6 +46,23 @@ defmodule Sigra.Install.ScopeTemplateFieldsTest do assert source =~ ~r/impersonating_from: %<%= schema_alias %>\{\} \| nil/, "scope.ex @type must include impersonating_from" + + # Phase 92 / B2B-02: role + actor_type @type entries. + assert source =~ ~r/role:\s*atom\(\)\s*\|\s*nil/, + "scope.ex @type must include role: atom() | nil (Phase 92 Plan 92-02)" + + assert source =~ ~r/actor_type:\s*atom\(\)\s*\|\s*nil/, + "scope.ex @type must include actor_type: atom() | nil (Phase 92 Plan 92-02 — Phase 93 prep)" + end + + test "moduledoc documents :actor_type as reserved Phase 93 prep with no Phase 92 behavior" do + source = File.read!(@scope_path) + + assert source =~ "actor_type", + "scope.ex moduledoc must reference :actor_type to document the Phase 93 reservation" + + assert source =~ "Phase 93", + "scope.ex moduledoc must explicitly call out Phase 93 as the reservation target so the field cannot accidentally gain behavior in Phase 92" end test "for_user/1 and new/1 remain arity-1" do @@ -45,7 +72,7 @@ defmodule Sigra.Install.ScopeTemplateFieldsTest do assert length(for_user_matches) == 2, "Expected 2 for_user/1 clauses" new_matches = Regex.scan(~r/def new\(/, source) - assert length(new_matches) == 2, "Expected 2 new/1 clauses" + assert length(new_matches) == 3, "Expected 3 new/1 clauses (Phase 93-04 added a service-account attrs-map clause)" end test "moduledoc mentions reserved fields and UPGRADE-v1.2.md" do diff --git a/test/sigra/install/scope_template_invariants_test.exs b/test/sigra/install/scope_template_invariants_test.exs index da435cbd..03c84a24 100644 --- a/test/sigra/install/scope_template_invariants_test.exs +++ b/test/sigra/install/scope_template_invariants_test.exs @@ -61,4 +61,56 @@ defmodule Sigra.Install.ScopeTemplateInvariantsTest do """ end end + + describe "reserved RBAC fields (Phase 92 Plan 92-02)" do + test "rendered Scope struct exposes :role and :actor_type defaulting to nil" do + Code.compile_string("defmodule TestApp.Accounts.User, do: defstruct([:id])") + Code.compile_string("defmodule TestApp.Accounts.Organization, do: defstruct([:id])") + + Code.compile_string( + "defmodule TestApp.Accounts.OrganizationMembership, do: defstruct([:id])" + ) + + rendered = + EEx.eval_file(@template_path, + context_module: "TestApp.Accounts", + schema_alias: "User", + organizations?: true + ) + + [{mod, _bytecode} | _] = Code.compile_string(rendered) + + empty_struct = mod.__struct__() + struct_keys = empty_struct |> Map.keys() + + assert :role in struct_keys, + """ + The rendered Scope struct must contain :role. + Got keys: #{inspect(struct_keys)}. + + :role is part of the Phase 92 / B2B-02 RBAC seam. Generated + host wiring writes the active membership's host-defined role + atom into this field. Removing it breaks the host-owned + Sigra.Authz contract emitted by Plan 92-02. + """ + + assert :actor_type in struct_keys, + """ + The rendered Scope struct must contain :actor_type. + Got keys: #{inspect(struct_keys)}. + + :actor_type is reserved for Phase 93 (M2M tokens / service + accounts) and MUST remain present (and nil) so the v1.x + upgrade path stays additive. Phase 92 attaches NO behavior + to this field — it exists solely so Phase 93 can populate it + without a breaking scope-struct change. + """ + + # Both fields default to nil — Phase 92 must NOT attach behavior to + # actor_type. The default also matches the explicit `field: nil` + # contract documented in the moduledoc. + assert Map.fetch!(empty_struct, :role) == nil + assert Map.fetch!(empty_struct, :actor_type) == nil + end + end end diff --git a/test/sigra/install/service_accounts_generator_test.exs b/test/sigra/install/service_accounts_generator_test.exs new file mode 100644 index 00000000..5c417de6 --- /dev/null +++ b/test/sigra/install/service_accounts_generator_test.exs @@ -0,0 +1,278 @@ +defmodule Sigra.Install.ServiceAccountsGeneratorTest do + @moduledoc """ + Phase 93 D-93-18 emission gating proof. + + The generator's gating for service-account artifacts is asymmetric but + consistently requires both `--jwt` AND `--organizations`: + + * Organizations-feature artifacts (SA schemas, SA migration, SA LiveView, + SA router-injection) require BOTH `--jwt` AND `--organizations`. + The feature module is `:organizations`-gated; within it, SA files are + `:jwt`-gated. See `lib/sigra/install/features/organizations.ex:147,192`. + + * Core-feature `oauth_token_controller.ex` (and its `POST /oauth/token` + router injection) is also gated on BOTH `:jwt` AND `:organizations`, + despite living in the Core feature module. Within `jwt_files/2` + (core.ex:362-383), the function checks `organizations?` before adding the + controller; the route injection in `injections/1` (core.ex:734-752) also + checks `organizations?`. + + NOTE — deviation from 93-08-PLAN.md: + + 1. The plan stated the OAuth controller was `:jwt`-only gated (Core feature). + The actual generator at `lib/sigra/install/features/core.ex:375,735` + adds an `organizations?` guard in BOTH the file list AND the route + injection. This test reflects the **actual** generator behaviour. + + 2. The plan stated "default install (no extra flags)" emits SA artifacts. + The actual default in `sigra.install.ex:62` is `jwt: false`. JWT must + be explicitly enabled via `--jwt`. This test passes `["--jwt"]` to + exercise the nominal SA-enabled install path. The SUMMARY documents + both divergences. + + Three variants are exercised via real `mix sigra.install` invocations through + `Sigra.Test.InstallFixture` (the same harness used by `test/upgrade_test.exs` + for non-default install flags): + + 1. `--jwt` (organizations default-on) -> SA artifacts emitted; OAuth controller emitted + 2. `--jwt --no-organizations` -> ALL SA artifacts suppressed; + OAuth controller ALSO suppressed + (organizations? guard in both + core.ex:375 file list and core.ex:735 + route injection) + 3. no flags / `--no-jwt` -> ALL SA artifacts AND OAuth controller + suppressed (jwt defaults to false) + + This file is a SIBLING to `test/sigra/install/golden_diff_test.exs` rather + than an extension, because (a) the golden test compares ONE canonical tree + byte-for-byte, and (b) bolting per-flag variants into it would conflate + regression detection with feature gating. + + All three variants are tagged `:integration` and may be skipped locally for + fast feedback loops (`mix test --exclude integration`). + """ + + use ExUnit.Case, async: false + + alias Sigra.Test.InstallFixture + + @moduletag :integration + @moduletag :slow + @moduletag :generator_gating + @moduletag timeout: 600_000 + + # Derive the app module prefix from the app directory name. InstallFixture + # always sets the app name from the app_name: opt; app_dir is + # /tmp/sigra_golden_/, so the basename IS the module prefix. + defp derive_app_module(app_dir) do + app_dir + |> Path.basename() + |> String.replace("-", "_") + end + + defp unique_app_name(prefix), + do: "sa_gen_#{prefix}_#{:erlang.unique_integer([:positive])}" + + describe "service-account emission gating (D-93-18)" do + test "--jwt install (organizations default-on) emits all SA artifacts and routes" do + {:ok, %{app_dir: app_dir}} = + InstallFixture.setup_tmp_app_without_install(app_name: unique_app_name("jwt_on")) + + on_exit(fn -> File.rm_rf!(Path.dirname(app_dir)) end) + + {:ok, _stdout} = InstallFixture.run_sigra_install(app_dir, ["--jwt"]) + + app_module = derive_app_module(app_dir) + + # SA schemas exist: + assert File.regular?(Path.join(app_dir, "lib/#{app_module}/accounts/service_account.ex")), + "Expected lib/#{app_module}/accounts/service_account.ex to exist after --jwt install" + + assert File.regular?( + Path.join(app_dir, "lib/#{app_module}/accounts/service_account_credential.ex") + ), + "Expected lib/#{app_module}/accounts/service_account_credential.ex to exist" + + # SA migration exists (timestamp-prefixed): + migrations = + Path.join(app_dir, "priv/repo/migrations") + |> File.ls!() + |> Enum.filter(&String.ends_with?(&1, "_create_service_accounts.exs")) + + assert length(migrations) == 1, + "Expected exactly one *_create_service_accounts.exs migration, got: #{inspect(migrations)}" + + # SA LiveView exists: + assert File.regular?( + Path.join( + app_dir, + "lib/#{app_module}_web/live/organization_service_accounts_live.ex" + ) + ), + "Expected organization_service_accounts_live.ex to exist" + + # OAuth token controller exists: + assert File.regular?( + Path.join(app_dir, "lib/#{app_module}_web/controllers/oauth_token_controller.ex") + ), + "Expected oauth_token_controller.ex to exist" + + # Router contains the SA route AND /oauth/token route: + router_src = File.read!(Path.join(app_dir, "lib/#{app_module}_web/router.ex")) + + assert router_src =~ "OrganizationServiceAccountsLive", + "Expected router to reference OrganizationServiceAccountsLive" + + assert router_src =~ ~r/live\s+"\/service-accounts"/, + "Expected /service-accounts LiveView route in router" + + assert router_src =~ "OAuthTokenController", + "Expected router to reference OAuthTokenController" + + assert router_src =~ ~r/post\s+"\/oauth\/token"/, + "Expected POST /oauth/token route in router" + end + + test "--jwt --no-organizations install suppresses ALL SA artifacts including OAuth controller (both organizations? guards in core.ex fire)" do + {:ok, %{app_dir: app_dir}} = + InstallFixture.setup_tmp_app_without_install(app_name: unique_app_name("jwt_noorg")) + + on_exit(fn -> File.rm_rf!(Path.dirname(app_dir)) end) + + {:ok, _stdout} = InstallFixture.run_sigra_install(app_dir, ["--jwt", "--no-organizations"]) + + app_module = derive_app_module(app_dir) + + # Organizations-owned SA schema files MUST NOT exist: + refute File.exists?(Path.join(app_dir, "lib/#{app_module}/accounts/service_account.ex")), + "service_account.ex must be absent under --jwt --no-organizations" + + refute File.exists?( + Path.join(app_dir, "lib/#{app_module}/accounts/service_account_credential.ex") + ), + "service_account_credential.ex must be absent under --jwt --no-organizations" + + # SA migration MUST NOT exist: + migrations_dir = Path.join(app_dir, "priv/repo/migrations") + + if File.dir?(migrations_dir) do + sa_migrations = + migrations_dir + |> File.ls!() + |> Enum.filter(&String.ends_with?(&1, "_create_service_accounts.exs")) + + assert sa_migrations == [], + "Expected NO *_create_service_accounts.exs migration under --jwt --no-organizations, got: #{inspect(sa_migrations)}" + end + + # SA LiveView MUST NOT exist: + refute File.exists?( + Path.join( + app_dir, + "lib/#{app_module}_web/live/organization_service_accounts_live.ex" + ) + ), + "organization_service_accounts_live.ex must be absent under --jwt --no-organizations" + + # Core-feature oauth_token_controller.ex MUST also be absent. + # DEVIATION FROM PLAN: The plan stated this file is `:jwt`-only gated + # (Core feature, core.ex:362), but the actual code at core.ex:375 checks + # `organizations?` within `jwt_files/2`, AND core.ex:735 checks + # `organizations?` in the route injection. Under `--no-organizations` with + # `:jwt` still on, both the file AND the route injection are suppressed. + refute File.exists?( + Path.join(app_dir, "lib/#{app_module}_web/controllers/oauth_token_controller.ex") + ), + "oauth_token_controller.ex must be ABSENT under --jwt --no-organizations (core.ex:375 + core.ex:735 organizations? guards)" + + # Router MUST NOT contain Organizations SA route OR OAuth route: + router_path = Path.join(app_dir, "lib/#{app_module}_web/router.ex") + + if File.regular?(router_path) do + router_src = File.read!(router_path) + + refute router_src =~ "OrganizationServiceAccountsLive", + "Router under --jwt --no-organizations must not reference OrganizationServiceAccountsLive" + + refute router_src =~ ~r/live\s+"\/service-accounts"/, + "Router under --jwt --no-organizations must not declare /service-accounts route" + + refute router_src =~ "OAuthTokenController", + "Router under --jwt --no-organizations must not reference OAuthTokenController (core.ex:735 organizations? guard)" + + refute router_src =~ ~r/post\s+"\/oauth\/token"/, + "Router under --jwt --no-organizations must not declare POST /oauth/token route" + end + end + + test "--no-jwt (default) install suppresses ALL SA artifacts AND the /oauth/token route" do + {:ok, %{app_dir: app_dir}} = + InstallFixture.setup_tmp_app_without_install(app_name: unique_app_name("no_jwt")) + + on_exit(fn -> File.rm_rf!(Path.dirname(app_dir)) end) + + # jwt defaults to false in sigra.install.ex:62; passing no jwt flag (or + # --no-jwt explicitly) exercises the same code path. + {:ok, _stdout} = InstallFixture.run_sigra_install(app_dir, ["--no-jwt"]) + + app_module = derive_app_module(app_dir) + + # SA schema files MUST NOT exist: + refute File.exists?(Path.join(app_dir, "lib/#{app_module}/accounts/service_account.ex")), + "service_account.ex must be absent under --no-jwt" + + refute File.exists?( + Path.join(app_dir, "lib/#{app_module}/accounts/service_account_credential.ex") + ), + "service_account_credential.ex must be absent under --no-jwt" + + # SA LiveView MUST NOT exist: + refute File.exists?( + Path.join( + app_dir, + "lib/#{app_module}_web/live/organization_service_accounts_live.ex" + ) + ), + "organization_service_accounts_live.ex must be absent under --no-jwt" + + # SA migration MUST NOT exist: + migrations_dir = Path.join(app_dir, "priv/repo/migrations") + + if File.dir?(migrations_dir) do + sa_migrations = + migrations_dir + |> File.ls!() + |> Enum.filter(&String.ends_with?(&1, "_create_service_accounts.exs")) + + assert sa_migrations == [], + "Expected NO *_create_service_accounts.exs migration under --no-jwt" + end + + # /oauth/token controller AND route MUST be absent under --no-jwt + # (per lib/sigra/install/features/core.ex:362 `defp jwt_files(_, false), do: []`): + refute File.exists?( + Path.join(app_dir, "lib/#{app_module}_web/controllers/oauth_token_controller.ex") + ), + "oauth_token_controller.ex must be absent under --no-jwt (core.ex:362 jwt_files guard)" + + # Router assertions: + router_path = Path.join(app_dir, "lib/#{app_module}_web/router.ex") + + if File.regular?(router_path) do + router_src = File.read!(router_path) + + refute router_src =~ "OrganizationServiceAccountsLive", + "Router under --no-jwt must not reference OrganizationServiceAccountsLive" + + refute router_src =~ ~r/live\s+"\/service-accounts"/, + "Router under --no-jwt must not declare /service-accounts route" + + refute router_src =~ ~r/post\s+"\/oauth\/token"/, + "Router under --no-jwt must not declare /oauth/token POST route" + + refute router_src =~ "OAuthTokenController", + "Router under --no-jwt must not reference OAuthTokenController" + end + end + end +end diff --git a/test/sigra/install/templates_layout_test.exs b/test/sigra/install/templates_layout_test.exs index 4a554abc..47c20e21 100644 --- a/test/sigra/install/templates_layout_test.exs +++ b/test/sigra/install/templates_layout_test.exs @@ -40,6 +40,7 @@ defmodule Sigra.Install.TemplatesLayoutTest do mfa_settings_html.ex mfa_settings_live.ex migration.exs + oauth_token_controller.ex reactivation_live.ex registration_html.ex registration_live.ex @@ -50,6 +51,7 @@ defmodule Sigra.Install.TemplatesLayoutTest do session_controller.ex session_live.ex settings_live.ex + sigra_authz.ex sudo_controller.ex sudo_html.ex token_controller.ex @@ -61,14 +63,23 @@ defmodule Sigra.Install.TemplatesLayoutTest do user_session.ex user_token.ex vault.ex + webhook_delivery.ex + webhook_delivery_attempt.ex + webhook_event.ex + webhook_migration.exs + webhook_subscription.ex ) @core_dir "priv/templates/sigra.install/core" @top_dir "priv/templates/sigra.install" test "templates have been relocated under core/ subdirectory" do + # Phase 92 / Plan 92-02: +1 (core/sigra_authz.ex). Phase 97/98: + # +5 webhook templates/migration in core (subscription/event/delivery/ + # attempt + migration) because webhook scaffolding is now part of the + # core installer surface. core_files = @core_dir |> File.ls!() |> Enum.sort() - assert length(core_files) == 49 + assert length(core_files) == 56 assert core_files == Enum.sort(@manifest_post_move) end diff --git a/test/sigra/jwt/signer_test.exs b/test/sigra/jwt/signer_test.exs index 17fa97d9..2dc1ac8d 100644 --- a/test/sigra/jwt/signer_test.exs +++ b/test/sigra/jwt/signer_test.exs @@ -2,6 +2,7 @@ defmodule Sigra.JWT.SignerTest do use ExUnit.Case, async: true alias Sigra.JWT.Signer + alias Sigra.OptionalDeps defp config(overrides \\ []) do base = [ @@ -21,6 +22,11 @@ defmodule Sigra.JWT.SignerTest do test "returns :ok when Joken is loaded" do assert :ok = Signer.ensure_joken!() end + + test "uses the registry-backed jwt contract" do + assert %{dependency: :joken, enforced?: true, support_tier: :phase_95} = + OptionalDeps.feature_spec!(:jwt) + end end describe "create_signer/1 with HS256" do diff --git a/test/sigra/jwt_test.exs b/test/sigra/jwt_test.exs index c77c63e3..d6d57f31 100644 --- a/test/sigra/jwt_test.exs +++ b/test/sigra/jwt_test.exs @@ -355,4 +355,292 @@ defmodule Sigra.JWTTest do assert :ok = JWT.revoke_refresh(cfg, "some-token", token_opts()) end end + + # --------------------------------------------------------------------------- + # Service-account tokens + # Inline schemas (Pattern B: Mox-mocked repo; FK integrity on actor_id is + # not validated, so synthetic UUIDs are appropriate for the user scope). + # --------------------------------------------------------------------------- + + defmodule SASchema do + @moduledoc false + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: false} + schema "service_accounts" do + field :name, :string + field :scopes, {:array, :string}, default: [] + field :role, :string + field :token_epoch, :integer, default: 0 + field :revoked_at, :utc_datetime + field :organization_id, :binary_id + end + + def changeset(struct, attrs) do + import Ecto.Changeset + + struct + |> cast(attrs, [:id, :name, :scopes, :role, :token_epoch, :organization_id]) + |> validate_required([:name, :organization_id]) + end + end + + defmodule CredentialSchema do + @moduledoc false + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: false} + schema "service_account_credentials" do + field :client_id, :string + field :hashed_client_secret, :binary + field :expires_at, :utc_datetime + field :last_used_at, :utc_datetime + field :revoked_at, :utc_datetime + field :service_account_id, :binary_id + end + + def changeset(struct, attrs) do + import Ecto.Changeset + + struct + |> cast(attrs, [:id, :client_id, :hashed_client_secret, :expires_at, :service_account_id]) + |> validate_required([:client_id, :hashed_client_secret, :service_account_id]) + end + end + + defp sa_config(overrides \\ []) do + base = [ + repo: Sigra.MockRepo, + user_schema: Sigra.TestUser, + otp_app: :test_app, + secret_key_base: @secret_key_base, + service_accounts: [ + service_account_schema: Sigra.JWTTest.SASchema, + service_account_credential_schema: Sigra.JWTTest.CredentialSchema, + client_id_prefix: "sigra_sa_", + client_id_byte_size: 24 + ], + jwt: [ + enabled: true, + algorithm: "HS256", + issuer: "test_issuer", + access_ttl: 900, + client_credentials_access_ttl: 3600, + refresh: true, + verify_epoch: true + ] + ] + + Sigra.Config.new!(Keyword.merge(base, overrides)) + end + + defp make_sa(overrides \\ %{}) do + id = Ecto.UUID.generate() + org_id = Ecto.UUID.generate() + + Map.merge( + %Sigra.JWTTest.SASchema{ + id: id, + name: "CI Service Account", + scopes: ["deploy:read", "deploy:write"], + role: "ci", + token_epoch: 0, + revoked_at: nil, + organization_id: org_id + }, + overrides + ) + end + + defp make_credential(sa_id, overrides \\ %{}) do + id = Ecto.UUID.generate() + client_id = "sigra_sa_" <> String.slice(Ecto.UUID.generate(), 0, 24) + + Map.merge( + %Sigra.JWTTest.CredentialSchema{ + id: id, + client_id: client_id, + hashed_client_secret: :crypto.hash(:sha256, "secret"), + expires_at: nil, + last_used_at: nil, + revoked_at: nil, + service_account_id: sa_id + }, + overrides + ) + end + + describe "service-account tokens" do + setup do + sa = make_sa() + cred = make_credential(sa.id) + + # Pattern B synthetic UUID — appropriate because parent uses Mox-mocked + # repo and FK integrity on actor_id is NOT validated. + sa_scope = %{ + user: %{id: Ecto.UUID.generate()}, + active_organization: %{id: sa.organization_id} + } + + {:ok, sa: sa, cred: cred, sa_scope: sa_scope} + end + + test "generate_service_account_tokens/3 returns {:ok, %{access_token, expires_in, scopes}} with no refresh_token", + %{sa: sa, cred: cred} do + cfg = sa_config() + + # Mock transaction for append_token_issued_audit + # (Multi.update :credential_last_used + Audit.log_multi_safe) + Sigra.MockRepo + |> expect(:transaction, fn _multi -> + updated_cred = %{cred | last_used_at: DateTime.utc_now() |> DateTime.truncate(:second)} + {:ok, %{credential_last_used: updated_cred, audit_service_account_token_issued: nil}} + end) + + assert {:ok, %{access_token: access_token, expires_in: 3600} = response} = + JWT.generate_service_account_tokens(cfg, sa, cred) + + assert is_binary(access_token) + # D-93-07: no refresh tokens on client_credentials. The key is present but nil. + assert Map.get(response, :refresh_token) == nil + end + + test "service-account access token contains actor_type, service_account_id, credential_id, org_id, scopes, epoch, sub claims", + %{sa: sa, cred: cred} do + cfg = sa_config() + + Sigra.MockRepo + |> expect(:transaction, fn _multi -> + updated_cred = %{cred | last_used_at: DateTime.utc_now() |> DateTime.truncate(:second)} + {:ok, %{credential_last_used: updated_cred, audit_service_account_token_issued: nil}} + end) + |> expect(:get, fn Sigra.JWTTest.SASchema, _id -> sa end) + |> expect(:get, fn Sigra.JWTTest.CredentialSchema, _id -> cred end) + + {:ok, %{access_token: jwt}} = JWT.generate_service_account_tokens(cfg, sa, cred) + {:ok, claims} = JWT.verify_access(cfg, jwt) + + # D-93-10: all claims present + assert claims["actor_type"] == "service_account" + assert claims["service_account_id"] == sa.id + assert claims["credential_id"] == cred.id + assert claims["org_id"] == sa.organization_id + assert claims["scopes"] == sa.scopes + assert claims["epoch"] == sa.token_epoch + # D-93-09: sub == credential.client_id + assert claims["sub"] == cred.client_id + end + + test "verify_access/2 returns {:ok, claims} for a fresh SA token (parity with user path)", + %{sa: sa, cred: cred} do + cfg = sa_config() + + Sigra.MockRepo + |> expect(:transaction, fn _multi -> + updated_cred = %{cred | last_used_at: DateTime.utc_now() |> DateTime.truncate(:second)} + {:ok, %{credential_last_used: updated_cred, audit_service_account_token_issued: nil}} + end) + |> expect(:get, fn Sigra.JWTTest.SASchema, _id -> sa end) + |> expect(:get, fn Sigra.JWTTest.CredentialSchema, _id -> cred end) + + {:ok, %{access_token: jwt}} = JWT.generate_service_account_tokens(cfg, sa, cred) + assert {:ok, %{"actor_type" => "service_account"}} = JWT.verify_access(cfg, jwt) + end + + test "verify_access/2 returns {:error, :epoch_mismatch} after Sigra.ServiceAccounts.revoke/3 bumps token_epoch", + %{sa: sa, cred: cred, sa_scope: sa_scope} do + cfg = sa_config() + + # Step 1: generate token (mock transaction for issuance audit) + Sigra.MockRepo + |> expect(:transaction, fn _multi -> + updated_cred = %{cred | last_used_at: DateTime.utc_now() |> DateTime.truncate(:second)} + {:ok, %{credential_last_used: updated_cred, audit_service_account_token_issued: nil}} + end) + + {:ok, %{access_token: jwt}} = JWT.generate_service_account_tokens(cfg, sa, cred) + + # Step 2: revoke the SA (bumps token_epoch, sets revoked_at) + # Sigra.ServiceAccounts.revoke/3 runs Multi.update + audit — MUST pass non-nil user scope. + # Pattern B: synthetic UUID in sa_scope (fixture); ensure_user_scope!/2 at + # lib/sigra/service_accounts.ex:347 raises ArgumentError on %{user: nil, ...}. + Sigra.MockRepo + |> expect(:transaction, fn _multi -> + revoked_sa = %{sa | token_epoch: sa.token_epoch + 1, revoked_at: DateTime.utc_now()} + {:ok, %{service_account: revoked_sa, audit_service_account_revoke: nil}} + end) + + assert {:ok, _revoked_sa} = Sigra.ServiceAccounts.revoke(cfg, sa_scope, sa) + + # Step 3: verify_access uses updated SA state. + # Mock returns the bumped-epoch SA to simulate DB state after revoke. + revoked_sa = %{sa | token_epoch: sa.token_epoch + 1, revoked_at: DateTime.utc_now()} + + Sigra.MockRepo + |> expect(:get, fn Sigra.JWTTest.SASchema, _id -> revoked_sa end) + |> expect(:get, fn Sigra.JWTTest.CredentialSchema, _id -> cred end) + + # D-93-12: epoch mismatch after revocation invalidates live token. + assert {:error, :epoch_mismatch} = JWT.verify_access(cfg, jwt) + end + + test "verify_access/2 fails after Sigra.ServiceAccounts.revoke_credential/3 (per-credential revoke)", + %{sa: sa, cred: cred, sa_scope: sa_scope} do + cfg = sa_config() + + # Step 1: generate token + Sigra.MockRepo + |> expect(:transaction, fn _multi -> + updated_cred = %{cred | last_used_at: DateTime.utc_now() |> DateTime.truncate(:second)} + {:ok, %{credential_last_used: updated_cred, audit_service_account_token_issued: nil}} + end) + + {:ok, %{access_token: jwt}} = JWT.generate_service_account_tokens(cfg, sa, cred) + + # Step 2: revoke the credential. + # revoke_credential/3 calls load_service_account (get) + Multi.update + audit. + # Pattern B: synthetic UUID in sa_scope; ensure_user_scope!/2 requires non-nil user. + Sigra.MockRepo + |> expect(:get, fn Sigra.JWTTest.SASchema, _id -> sa end) + |> expect(:transaction, fn _multi -> + revoked_cred = %{cred | revoked_at: DateTime.utc_now()} + {:ok, %{credential: revoked_cred, audit_service_account_credential_revoke: nil}} + end) + + assert {:ok, _revoked_cred} = Sigra.ServiceAccounts.revoke_credential(cfg, sa_scope, cred) + + # Step 3: verify_access returns error — credential has revoked_at set. + # verify_service_account_epoch checks `credential.revoked_at == nil`; when + # revoked it returns {:error, :epoch_mismatch} (single error atom for all + # SA verify failures in lib/sigra/jwt.ex lines 491-499). + revoked_cred = %{cred | revoked_at: DateTime.utc_now()} + + Sigra.MockRepo + |> expect(:get, fn Sigra.JWTTest.SASchema, _id -> sa end) + |> expect(:get, fn Sigra.JWTTest.CredentialSchema, _id -> revoked_cred end) + + # Lock: result MUST be {:error, _atom}, NOT {:ok, _claims}. + assert {:error, _atom} = JWT.verify_access(cfg, jwt) + end + + test "user JWT path is unaffected (parity regression guard)" do + # Reuses the user-token path. Generates a user token, verifies it, and + # asserts that actor_type is NOT 'service_account' and verify returns {:ok, _}. + # This pins the parity invariant: the SA describe block is purely additive. + user = test_user(%{token_epoch: 0}) + cfg = config(jwt: [enabled: true, algorithm: "HS256", issuer: "test_issuer", access_ttl: 900, refresh: false, verify_epoch: true]) + + {:ok, tokens} = JWT.generate_tokens(cfg, user, ["read:users"], token_opts()) + + Sigra.MockRepo + |> expect(:get, fn Sigra.TestUser, "42" -> + %{id: 42, token_epoch: 0} + end) + + assert {:ok, claims} = JWT.verify_access(cfg, tokens.access_token) + # User path: actor_type is nil or absent — NOT "service_account" + refute claims["actor_type"] == "service_account" + assert claims["sub"] == "42" + end + end end diff --git a/test/sigra/live_view/require_org_mfa_test.exs b/test/sigra/live_view/require_org_mfa_test.exs new file mode 100644 index 00000000..f4f0839e --- /dev/null +++ b/test/sigra/live_view/require_org_mfa_test.exs @@ -0,0 +1,69 @@ +defmodule Sigra.LiveView.RequireOrgMfaTest do + use ExUnit.Case, async: true + + alias Sigra.LiveView.RequireOrgMfa + + defmodule TestUser do + defstruct [:id] + end + + defmodule TestOrg do + defstruct [:slug, :enforce_mfa_for_members] + end + + defmodule TestScope do + defstruct [:user, :active_organization] + end + + defp fake_socket(assigns), do: %{assigns: assigns} + + defp scope(attrs \\ %{}) do + Map.merge( + %TestScope{ + user: %TestUser{id: "u1"}, + active_organization: %TestOrg{slug: "acme", enforce_mfa_for_members: false} + }, + attrs + ) + end + + test "nil scope passes through" do + socket = fake_socket(%{current_scope: nil}) + + assert {:cont, _socket} = + RequireOrgMfa.on_mount([mfa_check_fn: fn _ -> false end], %{}, %{}, socket) + end + + test "policy disabled passes through" do + socket = fake_socket(%{current_scope: scope()}) + + assert {:cont, _socket} = + RequireOrgMfa.on_mount([mfa_check_fn: fn _ -> false end], %{}, %{}, socket) + end + + test "enrolled user passes through" do + org = %TestOrg{slug: "acme", enforce_mfa_for_members: true} + socket = fake_socket(%{current_scope: scope(%{active_organization: org})}) + + assert {:cont, _socket} = + RequireOrgMfa.on_mount([mfa_check_fn: fn _ -> true end], %{}, %{}, socket) + end + + test "unenrolled user halts with redirect assign" do + org = %TestOrg{slug: "acme", enforce_mfa_for_members: true} + socket = fake_socket(%{current_scope: scope(%{active_organization: org})}) + + assert {:halt, halted} = + RequireOrgMfa.on_mount([mfa_check_fn: fn _ -> false end], %{}, %{}, socket) + + assert halted.assigns[:sigra_redirect_to] == "/users/settings/mfa" + end + + test "missing :mfa_check_fn raises" do + socket = fake_socket(%{current_scope: scope()}) + + assert_raise KeyError, fn -> + RequireOrgMfa.on_mount([], %{}, %{}, socket) + end + end +end diff --git a/test/sigra/mfa_test.exs b/test/sigra/mfa_test.exs index d63ab860..b543c5b3 100644 --- a/test/sigra/mfa_test.exs +++ b/test/sigra/mfa_test.exs @@ -2,6 +2,7 @@ defmodule Sigra.MFATest do use ExUnit.Case, async: true alias Sigra.MFA + alias Sigra.OptionalDeps.MissingDependencyError describe "enroll/2" do test "returns {:ok, map} with secret, otpauth_uri, svg, and raw_secret" do @@ -10,9 +11,16 @@ defmodule Sigra.MFATest do assert {:ok, enrollment} = MFA.enroll(config) assert is_binary(enrollment.secret) assert is_binary(enrollment.otpauth_uri) + assert is_binary(enrollment.svg) assert is_binary(enrollment.raw_secret) - # svg may be nil if eqrcode not loaded, or a string if available - assert is_nil(enrollment.svg) or is_binary(enrollment.svg) + end + + test "raises a tagged missing dependency error when QR rendering is requested without eqrcode" do + config = build_config() + + assert_raise MissingDependencyError, ~r/optional dependency missing for totp_qr/, fn -> + MFA.enroll(config, dependency_loaded?: fn _spec -> false end) + end end test "otpauth_uri contains issuer and account" do diff --git a/test/sigra/oauth/oauth_audit_atomicity_test.exs b/test/sigra/oauth/oauth_audit_atomicity_test.exs index 2a8b3044..07ec033a 100644 --- a/test/sigra/oauth/oauth_audit_atomicity_test.exs +++ b/test/sigra/oauth/oauth_audit_atomicity_test.exs @@ -191,4 +191,131 @@ defmodule Sigra.OAuthAuditAtomicityTest do ) end end + + defmodule MockStrategy do + @moduledoc false + def refresh(_config, "refresh_me", _config2) do + {:ok, %{"access_token" => "new_acc", "refresh_token" => "new_ref", "expires_in" => 3600}} + end + + def refresh(_config, "bad_refresh", _config2) do + {:error, %Assent.InvalidResponseError{response: %{body: %{"error" => "invalid_grant"}}}} + end + end + + defp refresh_oauth_config(repo, site_url) do + repo + |> oauth_config() + |> Map.put(:secret_key_base, String.duplicate("a", 64)) + |> Map.put(:oauth, [ + enabled: true, + providers: [ + mock: [client_id: "test_id", client_secret: "test_secret", strategy: MockStrategy, base_url: site_url, token_url: "#{site_url}/token"] + ] + ]) + end + + describe "refresh token atomicity" do + setup do + TestServer.start() + %{site_url: TestServer.url()} + end + + test "rolls back token rotation when oauth.token_refreshed audit insert is rejected", %{repo: repo, site_url: site_url} do + TestServer.add("/token", + via: :post, + to: fn conn -> + conn + |> Plug.Conn.put_resp_content_type("application/json") + |> Plug.Conn.send_resp( + 200, + Jason.encode!(%{ + "access_token" => "new_acc", + "refresh_token" => "new_ref", + "expires_in" => 3600, + "token_type" => "Bearer" + }) + ) + end + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + ALTER TABLE audit_events + ADD CONSTRAINT oauth_audit_ref_guard CHECK (action <> 'oauth.token_refreshed') + """, + [] + ) + + config = refresh_oauth_config(repo, site_url) + user = repo.insert!(%OAuthUser{email: "test@example.com"}) + + identity = + repo.insert!(%OAuthIdentity{ + user_id: user.id, + provider: "mock", + provider_uid: "uid_123", + encrypted_access_token: "expired", + encrypted_refresh_token: "refresh_me", + token_expires_at: DateTime.add(DateTime.utc_now() |> DateTime.truncate(:second), -3600, :second), + last_used_at: DateTime.utc_now() |> DateTime.truncate(:second) + }) + + try do + # We catch the exception or match the returned {:error, :temporarily_unavailable} + # since persist_refresh rescues repo.transaction failures and returns an error + # rather than raising (except Ecto.ConstraintError which raises if not mapped). + assert_raise Ecto.ConstraintError, fn -> + Sigra.OAuth.refresh_token(config, Sigra.Identity.from_schema(identity)) + end + + reloaded = repo.get!(OAuthIdentity, identity.id) + assert reloaded.encrypted_access_token == "expired" + assert reloaded.encrypted_refresh_token == "refresh_me" + assert [] == repo.all(from(a in AuditTestEvent, where: like(a.action, "oauth.%"))) + after + Ecto.Adapters.SQL.query!( + repo, + "ALTER TABLE audit_events DROP CONSTRAINT IF EXISTS oauth_audit_ref_guard", + [] + ) + end + end + + test "leaves persistence unchanged and returns classified error on invalid_grant", %{repo: repo, site_url: site_url} do + TestServer.add("/token", + via: :post, + to: fn conn -> + conn + |> Plug.Conn.put_resp_content_type("application/json") + |> Plug.Conn.send_resp( + 400, + Jason.encode!(%{"error" => "invalid_grant"}) + ) + end + ) + + config = refresh_oauth_config(repo, site_url) + user = repo.insert!(%OAuthUser{email: "test@example.com"}) + + identity = + repo.insert!(%OAuthIdentity{ + user_id: user.id, + provider: "mock", + provider_uid: "uid_123", + encrypted_access_token: "expired", + encrypted_refresh_token: "bad_refresh", + token_expires_at: DateTime.add(DateTime.utc_now() |> DateTime.truncate(:second), -3600, :second), + last_used_at: DateTime.utc_now() |> DateTime.truncate(:second) + }) + + assert {:error, :reauth_required} = Sigra.OAuth.refresh_token(config, Sigra.Identity.from_schema(identity)) + + reloaded = repo.get!(OAuthIdentity, identity.id) + assert reloaded.encrypted_access_token == "expired" + assert reloaded.encrypted_refresh_token == "bad_refresh" + assert [] == repo.all(from(a in AuditTestEvent, where: like(a.action, "oauth.%"))) + end + end end diff --git a/test/sigra/oauth/oauth_ceremony_audit_test.exs b/test/sigra/oauth/oauth_ceremony_audit_test.exs index 62adda84..0194acaf 100644 --- a/test/sigra/oauth/oauth_ceremony_audit_test.exs +++ b/test/sigra/oauth/oauth_ceremony_audit_test.exs @@ -236,4 +236,61 @@ defmodule Sigra.OAuthCeremonyAuditTest do }) end end + + describe "refresh" do + test "persists oauth.token_refreshed after successful refresh", %{repo: repo} do + TestServer.start() + site_url = TestServer.url() + + TestServer.add("/token", + via: :post, + to: fn conn -> + conn + |> Plug.Conn.put_resp_content_type("application/json") + |> Plug.Conn.send_resp( + 200, + Jason.encode!(%{ + "access_token" => "new_tok", + "refresh_token" => "new_ref", + "expires_in" => 3600, + "token_type" => "Bearer" + }) + ) + end + ) + + config = + repo + |> oauth_config() + |> Map.put(:secret_key_base, String.duplicate("a", 64)) + |> Map.put(:oauth, [ + enabled: true, + providers: [ + mock: [client_id: "test_id", client_secret: "test_secret", strategy: MockStrategy, base_url: site_url, token_url: "#{site_url}/token"] + ], + ]) + + user = repo.insert!(%OAuthUser{email: "test@example.com"}) + + identity = + repo.insert!(%OAuthIdentity{ + user_id: user.id, + provider: "mock", + provider_uid: "uid_123", + encrypted_access_token: "expired", + encrypted_refresh_token: "refresh_me", + token_expires_at: DateTime.add(DateTime.utc_now() |> DateTime.truncate(:second), -3600, :second), + last_used_at: DateTime.utc_now() |> DateTime.truncate(:second) + }) + + assert {:ok, _} = OAuth.refresh_token(config, Sigra.Identity.from_schema(identity)) + + Assertions.assert_audit_fields(repo, AuditTestEvent, %{ + action: "oauth.token_refreshed", + actor_id: user.id, + target_id: user.id, + metadata: %{"provider" => "mock", "refresh_token_rotated" => true} + }) + end + end end diff --git a/test/sigra/oauth/oauth_test.exs b/test/sigra/oauth/oauth_test.exs index c0e83b52..e4ddc5a6 100644 --- a/test/sigra/oauth/oauth_test.exs +++ b/test/sigra/oauth/oauth_test.exs @@ -37,6 +37,10 @@ defmodule Sigra.OAuthTest do } }} end + + def refresh(_provider_config, _refresh_token, _config) do + {:ok, %{"access_token" => "new_tok", "refresh_token" => "new_ref", "expires_in" => 3600}} + end end defmodule FailingStrategy do @@ -47,6 +51,10 @@ defmodule Sigra.OAuthTest do def callback(_config, _params) do {:error, %{reason: :provider_error}} end + + def refresh(_provider_config, _refresh_token, _config) do + {:error, %Assent.RequestError{response: %{"error" => "invalid_grant"}}} + end end describe "authorize_url/3" do @@ -279,6 +287,181 @@ defmodule Sigra.OAuthTest do assert {:ok, %{access_token: "valid_token"}} = OAuth.get_tokens(config, identity) end + + test "maps typed failures from refresh_token/2 back to :token_expired for compatibility" do + config = + build_config( + providers: [ + failing: [client_id: "x", client_secret: "y", strategy: FailingStrategy] + ] + ) + + identity = %Sigra.Identity{ + id: 1, + user_id: 1, + provider: "failing", + provider_uid: "uid_123", + encrypted_access_token: "expired", + encrypted_refresh_token: "refresh_me", + token_expires_at: DateTime.add(DateTime.utc_now(), -3600, :second) + } + + assert {:error, :token_expired} = OAuth.get_tokens(config, identity) + end + end + + describe "refresh_token/2" do + test "returns existing tokens when not expired" do + config = build_config() + + identity = %Sigra.Identity{ + id: 1, + user_id: 1, + provider: "google", + provider_uid: "uid_123", + encrypted_access_token: "valid", + encrypted_refresh_token: "refresh", + token_expires_at: DateTime.add(DateTime.utc_now(), 3600, :second) + } + + assert {:ok, %{access_token: "valid", refresh_token: "refresh"}} = + OAuth.refresh_token(config, identity) + end + + test "returns :reauth_required when token expired and no refresh token" do + config = build_config() + + identity = %Sigra.Identity{ + id: 1, + user_id: 1, + provider: "google", + provider_uid: "uid_123", + encrypted_access_token: "expired", + encrypted_refresh_token: nil, + token_expires_at: DateTime.add(DateTime.utc_now(), -3600, :second) + } + + assert {:error, :reauth_required} = OAuth.refresh_token(config, identity) + end + + test "calls refresh on strategy and returns typed outcome on success" do + TestServer.start() + site_url = TestServer.url() + + TestServer.add("/token", + via: :post, + to: fn conn -> + conn + |> Plug.Conn.put_resp_content_type("application/json") + |> Plug.Conn.send_resp( + 200, + Jason.encode!(%{ + "access_token" => "new_tok", + "refresh_token" => "new_ref", + "expires_in" => 3600, + "token_type" => "Bearer" + }) + ) + end + ) + + config = + build_config( + providers: [ + mock: [client_id: "test_id", client_secret: "test_secret", strategy: MockStrategy, base_url: site_url, token_url: "#{site_url}/token"] + ] + ) + + identity = %Sigra.Identity{ + id: 1, + user_id: 1, + provider: "mock", + provider_uid: "uid_123", + encrypted_access_token: "expired", + encrypted_refresh_token: "refresh_me", + token_expires_at: DateTime.add(DateTime.utc_now(), -3600, :second) + } + + assert {:ok, %{access_token: "new_tok", refresh_token: "new_ref"}} = + OAuth.refresh_token(config, identity) + end + + test "preserves existing refresh_token when provider omits it on refresh" do + TestServer.start() + site_url = TestServer.url() + + TestServer.add("/token", + via: :post, + to: fn conn -> + conn + |> Plug.Conn.put_resp_content_type("application/json") + |> Plug.Conn.send_resp( + 200, + Jason.encode!(%{ + "access_token" => "new_tok", + "expires_in" => 3600, + "token_type" => "Bearer" + }) + ) + end + ) + + config = + build_config( + providers: [ + mock: [client_id: "test_id", client_secret: "test_secret", strategy: MockStrategy, base_url: site_url, token_url: "#{site_url}/token"] + ] + ) + + identity = %Sigra.Identity{ + id: 1, + user_id: 1, + provider: "mock", + provider_uid: "uid_123", + encrypted_access_token: "expired", + encrypted_refresh_token: "refresh_me", + token_expires_at: DateTime.add(DateTime.utc_now(), -3600, :second) + } + + assert {:ok, %{access_token: "new_tok", refresh_token: "refresh_me"}} = + OAuth.refresh_token(config, identity) + end + + test "calls refresh on strategy and returns classified error on failure" do + TestServer.start() + site_url = TestServer.url() + + TestServer.add("/token", + via: :post, + to: fn conn -> + conn + |> Plug.Conn.put_resp_content_type("application/json") + |> Plug.Conn.send_resp( + 400, + Jason.encode!(%{"error" => "invalid_grant"}) + ) + end + ) + + config = + build_config( + providers: [ + failing: [client_id: "x", client_secret: "y", strategy: FailingStrategy, base_url: site_url, token_url: "#{site_url}/token"] + ] + ) + + identity = %Sigra.Identity{ + id: 1, + user_id: 1, + provider: "failing", + provider_uid: "uid_123", + encrypted_access_token: "expired", + encrypted_refresh_token: "refresh_me", + token_expires_at: DateTime.add(DateTime.utc_now(), -3600, :second) + } + + assert {:error, :reauth_required} = OAuth.refresh_token(config, identity) + end end describe "compute_token_expires_at/1" do diff --git a/test/sigra/oauth/refresh_test.exs b/test/sigra/oauth/refresh_test.exs new file mode 100644 index 00000000..b51e115b --- /dev/null +++ b/test/sigra/oauth/refresh_test.exs @@ -0,0 +1,217 @@ +defmodule Sigra.OAuth.RefreshTest do + @moduledoc """ + Dedicated Postgres suite for OAuth refresh functionality across non-Google providers. + Verifies GitHub, Apple, Facebook, and Generic wrappers correctly dispatch refresh, + handle rotation, and classify failures. + """ + + use ExUnit.Case, async: false + + alias Sigra.OAuth + alias Sigra.Test.PostgresRepo + alias Sigra.Test.AuditEvent, as: AuditTestEvent + + defmodule OAuthUser do + @moduledoc false + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: true} + schema "oauth_refresh_users" do + field(:email, :string) + timestamps() + end + end + + defmodule OAuthIdentity do + @moduledoc false + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: true} + schema "oauth_refresh_identities" do + field(:user_id, :binary_id) + field(:provider, :string) + field(:provider_uid, :string) + field(:encrypted_access_token, :binary) + field(:encrypted_refresh_token, :binary) + field(:token_expires_at, :utc_datetime) + field(:metadata, :map, default: %{}) + field(:last_used_at, :utc_datetime) + timestamps() + end + end + + setup do + start_supervised!({PostgresRepo, PostgresRepo.default_config()}) + repo = PostgresRepo + + Ecto.Adapters.SQL.query!(repo, "CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\"", []) + + for t <- ["oauth_refresh_identities", "oauth_refresh_users", "audit_events"] do + Ecto.Adapters.SQL.query!(repo, "DROP TABLE IF EXISTS #{t} CASCADE", []) + end + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE oauth_refresh_users ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + email text NOT NULL, + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now() + ) + """ + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE oauth_refresh_identities ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + user_id uuid NOT NULL REFERENCES oauth_refresh_users(id) ON DELETE CASCADE, + provider text NOT NULL, + provider_uid text NOT NULL, + encrypted_access_token bytea, + encrypted_refresh_token bytea, + token_expires_at timestamp, + metadata jsonb NOT NULL DEFAULT '{}'::jsonb, + last_used_at timestamp NOT NULL, + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now() + ) + """ + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE audit_events ( + id uuid PRIMARY KEY, + occurred_at timestamp NOT NULL DEFAULT now(), + action varchar(255) NOT NULL, + outcome varchar(32) NOT NULL DEFAULT 'success', + actor_id uuid, + actor_type varchar(64) NOT NULL DEFAULT 'user', + target_id uuid, + target_type varchar(64), + ip_address varchar(64), + user_agent varchar(512), + metadata jsonb NOT NULL DEFAULT '{}'::jsonb, + organization_id uuid, + effective_user_id uuid, + inserted_at timestamp NOT NULL DEFAULT now() + ) + """ + ) + + Ecto.Adapters.SQL.query!( + repo, + "TRUNCATE TABLE oauth_refresh_identities, oauth_refresh_users, audit_events RESTART IDENTITY CASCADE", + [] + ) + + %{repo: repo} + end + + defp oauth_config(repo, strategy_module, site_url) do + %{ + repo: repo, + user_schema: OAuthUser, + identity_schema: OAuthIdentity, + oauth: [ + enabled: true, + providers: [ + test_provider: [ + client_id: "test_id", + client_secret: "test_secret", + strategy: strategy_module, + base_url: site_url, + token_url: "#{site_url}/token" + ] + ], + ], + audit: [audit_schema: AuditTestEvent] + } + end + + describe "refresh for non-Google providers" do + setup do + TestServer.start() + %{site_url: TestServer.url()} + end + + for {provider_name, strategy_module} <- [ + {"GitHub", Assent.Strategy.Github}, + {"Apple", Assent.Strategy.Apple}, + {"Facebook", Assent.Strategy.Facebook}, + {"Generic", Assent.Strategy.OAuth2} + ] do + test "valid refresh updates tokens for #{provider_name}", %{repo: repo, site_url: site_url} do + TestServer.add("/token", + via: :post, + to: fn conn -> + conn + |> Plug.Conn.put_resp_content_type("application/json") + |> Plug.Conn.send_resp( + 200, + Jason.encode!(%{ + "access_token" => "new_acc", + "refresh_token" => "new_ref", + "expires_in" => 3600, + "token_type" => "Bearer" + }) + ) + end + ) + + config = oauth_config(repo, unquote(strategy_module), site_url) + + user = repo.insert!(%OAuthUser{email: "test@example.com"}) + + identity_record = repo.insert!(%OAuthIdentity{ + user_id: user.id, + provider: "test_provider", + provider_uid: "uid_123", + encrypted_access_token: "expired", + encrypted_refresh_token: "refresh_me", + token_expires_at: DateTime.add(DateTime.utc_now() |> DateTime.truncate(:second), -3600, :second), + last_used_at: DateTime.utc_now() |> DateTime.truncate(:second) + }) + identity = Sigra.Identity.from_schema(identity_record) + + assert {:ok, %{access_token: "new_acc", refresh_token: "new_ref"}} = + OAuth.refresh_token(config, identity) + end + + test "invalid_grant yields reauth_required for #{provider_name}", %{repo: repo, site_url: site_url} do + TestServer.add("/token", + via: :post, + to: fn conn -> + conn + |> Plug.Conn.put_resp_content_type("application/json") + |> Plug.Conn.send_resp( + 400, + Jason.encode!(%{"error" => "invalid_grant"}) + ) + end + ) + + config = oauth_config(repo, unquote(strategy_module), site_url) + + user = repo.insert!(%OAuthUser{email: "test@example.com"}) + + identity_record = repo.insert!(%OAuthIdentity{ + user_id: user.id, + provider: "test_provider", + provider_uid: "uid_123", + encrypted_access_token: "expired", + encrypted_refresh_token: "refresh_me", + token_expires_at: DateTime.add(DateTime.utc_now() |> DateTime.truncate(:second), -3600, :second), + last_used_at: DateTime.utc_now() |> DateTime.truncate(:second) + }) + identity = Sigra.Identity.from_schema(identity_record) + + assert {:error, :reauth_required} = OAuth.refresh_token(config, identity) + end + end + end +end diff --git a/test/sigra/oauth/token_test.exs b/test/sigra/oauth/token_test.exs new file mode 100644 index 00000000..ef3729cd --- /dev/null +++ b/test/sigra/oauth/token_test.exs @@ -0,0 +1,132 @@ +defmodule Sigra.OAuth.TokenTest do + use ExUnit.Case, async: true + + alias Sigra.OAuth.Token + + defmodule Credential do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: false} + schema "service_account_credentials" do + field :client_id, :string + field :hashed_client_secret, :binary + field :expires_at, :utc_datetime + field :revoked_at, :utc_datetime + field :service_account_id, :binary_id + field :last_used_at, :utc_datetime + end + end + + defmodule ServiceAccount do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: false} + schema "service_accounts" do + field :organization_id, :binary_id + field :scopes, {:array, :string}, default: [] + field :role, :string + field :token_epoch, :integer, default: 0 + field :revoked_at, :utc_datetime + end + end + + defmodule MockRepo do + def get_by(Credential, client_id: client_id), do: Process.get({:cred_by_client_id, client_id}) + def get(ServiceAccount, id), do: Process.get({:service_account, id}) + + def transaction(%Ecto.Multi{} = multi) do + return = fn err -> throw({:mock_multi_abort, err}) end + wrap = fn fun -> fun.() end + + try do + case Ecto.Multi.__apply__(multi, __MODULE__, wrap, return) do + {:ok, result} -> {:ok, result} + result when is_map(result) -> {:ok, result} + {:error, {name, val, acc}} -> {:error, name, val, acc} + end + catch + :throw, {:mock_multi_abort, {name, val, acc}} -> + {:error, name, val, acc} + end + end + + def update(changeset, _opts \\ []), do: {:ok, Ecto.Changeset.apply_changes(changeset)} + def insert(changeset, _opts \\ []), do: {:ok, Ecto.Changeset.apply_changes(changeset)} + end + + defp config do + Sigra.Config.new!( + repo: MockRepo, + user_schema: Sigra.TestUser, + otp_app: :sigra, + secret_key_base: String.duplicate("a", 64), + audit: [audit_schema: Sigra.Test.AuditEvent], + service_accounts: [ + service_account_schema: ServiceAccount, + service_account_credential_schema: Credential + ], + jwt: [enabled: true, algorithm: "HS256", issuer: "sigra", client_credentials_access_ttl: 3600] + ) + end + + test "returns invalid_client for unknown client_id" do + assert {:error, :invalid_client} = + Token.client_credentials(config(), client_id: "missing", client_secret: "secret") + end + + test "returns invalid_client for revoked credential" do + cred = %Credential{ + id: "cred-1", + client_id: "sigra_sa_a", + hashed_client_secret: Sigra.Token.hash_token("secret"), + revoked_at: DateTime.utc_now(), + service_account_id: "sa-1" + } + + Process.put({:cred_by_client_id, cred.client_id}, cred) + + assert {:error, :invalid_client} = + Token.client_credentials(config(), client_id: cred.client_id, client_secret: "secret") + end + + test "returns invalid_scope for scopes outside granted list" do + sa = %ServiceAccount{id: "sa-1", organization_id: "org-1", scopes: ["billing:read"], token_epoch: 0} + + cred = %Credential{ + id: "cred-1", + client_id: "sigra_sa_a", + hashed_client_secret: Sigra.Token.hash_token("secret"), + service_account_id: sa.id + } + + Process.put({:cred_by_client_id, cred.client_id}, cred) + Process.put({:service_account, sa.id}, sa) + + assert {:error, :invalid_scope} = + Token.client_credentials( + config(), + client_id: cred.client_id, + client_secret: "secret", + scope: "deploy:write" + ) + end + + test "issues a token for valid credentials" do + sa = %ServiceAccount{id: "sa-1", organization_id: "org-1", scopes: ["billing:read"], token_epoch: 0} + + cred = %Credential{ + id: "cred-1", + client_id: "sigra_sa_a", + hashed_client_secret: Sigra.Token.hash_token("secret"), + service_account_id: sa.id + } + + Process.put({:cred_by_client_id, cred.client_id}, cred) + Process.put({:service_account, sa.id}, sa) + + assert {:ok, %{access_token: jwt, scope: "billing:read", expires_in: 3600}} = + Token.client_credentials(config(), client_id: cred.client_id, client_secret: "secret") + + assert is_binary(jwt) + end +end diff --git a/test/sigra/optional_deps_test.exs b/test/sigra/optional_deps_test.exs new file mode 100644 index 00000000..6ba64cd2 --- /dev/null +++ b/test/sigra/optional_deps_test.exs @@ -0,0 +1,202 @@ +defmodule Sigra.OptionalDepsTest do + use ExUnit.Case, async: true + + alias Sigra.OptionalDeps + alias Sigra.OptionalDeps.MissingDependencyError + + describe "feature_specs/0" do + test "returns staged metadata for enforced and advisory features" do + specs = OptionalDeps.feature_specs() + + assert is_list(specs) + + features = + specs + |> Enum.map(& &1.feature) + |> Enum.sort() + + assert [ + :async_email, + :bcrypt_migration, + :jwt, + :lifecycle_jobs, + :oauth, + :rate_limit, + :swoosh, + :totp_qr, + :webhook_delivery + ] = + features + + assert %{dependency: :oban, enforced?: true, support_tier: :phase_95} = + OptionalDeps.feature_spec!(:async_email) + + assert %{dependency: :joken, enforced?: true, support_tier: :phase_95} = + OptionalDeps.feature_spec!(:jwt) + + assert %{dependency: :oban, enforced?: true, support_tier: :phase_95} = + OptionalDeps.feature_spec!(:lifecycle_jobs) + + assert %{dependency: :oban, enforced?: true, support_tier: :phase_95} = + OptionalDeps.feature_spec!(:webhook_delivery) + + assert %{dependency: :hammer, enforced?: false, support_tier: :advisory} = + OptionalDeps.feature_spec!(:rate_limit) + + assert %{dependency: :assent, enforced?: false, support_tier: :advisory} = + OptionalDeps.feature_spec!(:oauth) + + assert %{dependency: :swoosh, enforced?: false, support_tier: :advisory} = + OptionalDeps.feature_spec!(:swoosh) + end + end + + describe "dependency_loaded?/1" do + test "accepts a feature or a spec" do + spec = OptionalDeps.feature_spec!(:jwt) + + assert OptionalDeps.dependency_loaded?(:jwt) + assert OptionalDeps.dependency_loaded?(spec) + end + end + + describe "feature_enabled?/2" do + test "proves jwt enablement from host config" do + assert OptionalDeps.feature_enabled?(:jwt, config(jwt: [enabled: true])) + refute OptionalDeps.feature_enabled?(:jwt, config(jwt: [enabled: false])) + end + + test "proves async email enablement from runtime evidence" do + assert OptionalDeps.feature_enabled?(:async_email, delivery_mode: :async) + refute OptionalDeps.feature_enabled?(:async_email, delivery_mode: :sync) + end + + test "proves webhook delivery enablement from host config" do + assert OptionalDeps.feature_enabled?(:webhook_delivery, config(webhooks: [enabled: true])) + refute OptionalDeps.feature_enabled?(:webhook_delivery, config(webhooks: [enabled: false])) + end + end + + describe "ensure_available!/2" do + test "returns :ok for an enabled enforced feature when dependency is loaded" do + assert :ok = OptionalDeps.ensure_available!(:jwt, config(jwt: [enabled: true])) + end + + test "raises a tagged error for an enabled enforced feature when dependency is missing" do + assert_raise MissingDependencyError, fn -> + OptionalDeps.ensure_available!(:jwt, + config: config(jwt: [enabled: true]), + dependency_loaded?: fn _spec -> false end + ) + end + end + + test "raises a tagged error for enabled webhook delivery when Oban is missing" do + assert_raise MissingDependencyError, fn -> + OptionalDeps.ensure_available!(:webhook_delivery, + config: config(webhooks: [enabled: true]), + dependency_loaded?: fn _spec -> false end + ) + end + end + + test "retains stable structured fields on the tagged error" do + error = + assert_raise MissingDependencyError, fn -> + OptionalDeps.ensure_available!(:totp_qr, + mfa_enrollment: :qr, + dependency_loaded?: fn _spec -> false end + ) + end + + assert error.feature == :totp_qr + assert error.dependency == :eqrcode + assert error.spec == "~> 0.2.1" + assert error.evidence == "MFA enrollment requested QR rendering" + assert error.remediation == ~s(Add {:eqrcode, "~> 0.2.1"} to your mix.exs deps and run mix deps.get.) + assert Exception.message(error) =~ "[Sigra]" + end + + test "does not block advisory rows when they are inactive" do + assert :ok = + OptionalDeps.ensure_available!(:rate_limit, + dependency_loaded?: fn _spec -> false end + ) + end + end + + describe "doctor_row/2" do + test "proves jwt enablement from host config instead of speculation" do + active_row = OptionalDeps.doctor_row(:jwt, config(jwt: [enabled: true])) + inactive_row = OptionalDeps.doctor_row(:jwt, config(jwt: [enabled: false])) + + assert active_row.enabled? == true + assert active_row.status == :ok + assert active_row.evidence == "config.jwt[:enabled] == true" + + assert inactive_row.enabled? == false + assert inactive_row.status == :inactive + assert inactive_row.blocking? == false + end + + test "returns informative non-blocking metadata for inactive advisory rows" do + row = + OptionalDeps.doctor_row(:rate_limit, + dependency_loaded?: fn _spec -> false end + ) + + assert row.feature == :rate_limit + assert row.dependency == :hammer + assert row.enabled? == false + assert row.loaded? == false + assert row.blocking? == false + assert row.status == :advisory + assert row.evidence == "rate limiting not explicitly configured" + end + + test "marks webhook delivery as blocking only when explicitly enabled and missing" do + active_row = + OptionalDeps.doctor_row(:webhook_delivery, + config: config(webhooks: [enabled: true]), + dependency_loaded?: fn _spec -> false end + ) + + inactive_row = + OptionalDeps.doctor_row(:webhook_delivery, + config: config(webhooks: [enabled: false]), + dependency_loaded?: fn _spec -> false end + ) + + assert active_row.enabled? == true + assert active_row.blocking? == true + assert active_row.status == :missing + assert active_row.evidence == "config.webhooks[:enabled] == true" + + assert inactive_row.enabled? == false + assert inactive_row.blocking? == false + assert inactive_row.status == :inactive + assert inactive_row.evidence == "config.webhooks[:enabled] != true" + end + end + + defp config(overrides) do + base = [ + repo: Sigra.MockRepo, + user_schema: Sigra.TestUser, + secret_key_base: String.duplicate("a", 64), + jwt: [enabled: false, algorithm: "HS256"], + mfa: [enabled: true], + webhooks: [enabled: false] + ] + + merged = + Keyword.merge(base, overrides, fn + :jwt, left, right -> Keyword.merge(left, right) + :mfa, left, right -> Keyword.merge(left, right) + :webhooks, left, right -> Keyword.merge(left, right) + _key, _left, right -> right + end) + + Sigra.Config.new!(merged) + end +end diff --git a/test/sigra/organizations/context_test.exs b/test/sigra/organizations/context_test.exs index 9566bc62..7420f8e5 100644 --- a/test/sigra/organizations/context_test.exs +++ b/test/sigra/organizations/context_test.exs @@ -16,6 +16,7 @@ defmodule Sigra.Organizations.ContextTest do # Phase 18: Sticky origin owner + personal-workspace flag. field :owner_user_id, :binary_id field :personal, :boolean, default: false + field :enforce_mfa_for_members, :boolean, default: false timestamps(type: :utc_datetime) end end diff --git a/test/sigra/organizations/invitations_test.exs b/test/sigra/organizations/invitations_test.exs index 3bb21503..0726946a 100644 --- a/test/sigra/organizations/invitations_test.exs +++ b/test/sigra/organizations/invitations_test.exs @@ -185,6 +185,10 @@ defmodule Sigra.Organizations.InvitationsTest do }, roles: [:owner, :admin, :member], owner_role: :owner, + # Phase 92 / B2B-02 (Plan 92-01) made :invitation_admin_roles a + # required config key. Plan 92-02 re-greens this fixture by + # supplying the host-themed value the generator now emits. + invitation_admin_roles: [:owner, :admin], audit_schema: nil, hooks: [], invitation_ttl: :timer.hours(24 * 7), diff --git a/test/sigra/organizations/schema_test.exs b/test/sigra/organizations/schema_test.exs index 9be2ac28..9444e71d 100644 --- a/test/sigra/organizations/schema_test.exs +++ b/test/sigra/organizations/schema_test.exs @@ -19,6 +19,7 @@ defmodule Sigra.Organizations.SchemaTest do field :name, :string field :slug, :string field :deleted_at, :utc_datetime + field :enforce_mfa_for_members, :boolean, default: false timestamps(type: :utc_datetime) end @@ -147,6 +148,15 @@ defmodule Sigra.Organizations.SchemaTest do assert changeset.valid? assert Ecto.Changeset.get_field(changeset, :deleted_at) == now end + + test "enforce_mfa_for_members defaults false and is not user-castable" do + changeset = + Organization.changeset(%Organization{}, Map.put(valid_org_attrs(), :enforce_mfa_for_members, true)) + + assert changeset.valid? + assert Ecto.Changeset.get_change(changeset, :enforce_mfa_for_members) == nil + assert Ecto.Changeset.get_field(changeset, :enforce_mfa_for_members) == false + end end # ── OrganizationMembership changeset tests ──────────────────────── diff --git a/test/sigra/organizations/set_mfa_policy_test.exs b/test/sigra/organizations/set_mfa_policy_test.exs new file mode 100644 index 00000000..88358638 --- /dev/null +++ b/test/sigra/organizations/set_mfa_policy_test.exs @@ -0,0 +1,193 @@ +defmodule Sigra.Organizations.SetMfaPolicyTest do + use ExUnit.Case, async: true + + import Mox + + alias Sigra.Test.AuditEvent, as: AuditTestEvent + + defmodule TestOrg do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: true} + schema "organizations" do + field :name, :string + field :slug, :string + field :enforce_mfa_for_members, :boolean, default: false + timestamps(type: :utc_datetime) + end + end + + defmodule TestMembership do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: true} + schema "organization_memberships" do + field :role, Ecto.Enum, values: [:owner, :admin, :member] + field :organization_id, :binary_id + field :user_id, :binary_id + timestamps(type: :utc_datetime) + end + end + + defmodule TestInvitation do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: true} + schema "organization_invitations" do + field :email, :string + end + end + + defmodule TestUser do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: true} + schema "users" do + field :email, :string + end + end + + defmodule TestMfaCredential do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: true} + schema "user_mfa_credentials" do + field :user_id, :binary_id + field :enabled_at, :utc_datetime_usec + end + end + + defmodule TestScope do + defstruct [:user, :active_organization, :membership] + end + + setup :verify_on_exit! + + @config %{ + repo: Sigra.MockRepo, + schemas: %{ + organization: TestOrg, + membership: TestMembership, + invitation: TestInvitation, + user: TestUser, + scope: TestScope + }, + roles: [:owner, :admin, :member], + owner_role: :owner, + audit_schema: AuditTestEvent, + hooks: [] + } + + defp user, do: %TestUser{id: Ecto.UUID.generate(), email: "user@example.com"} + + defp org(attrs \\ %{}) do + now = DateTime.utc_now() |> DateTime.truncate(:second) + + struct!(TestOrg, Map.merge(%{ + id: Ecto.UUID.generate(), + name: "Acme", + slug: "acme", + enforce_mfa_for_members: false, + inserted_at: now, + updated_at: now + }, attrs)) + end + + defp scope(user, org) do + %TestScope{ + user: user, + active_organization: org, + membership: %TestMembership{role: :owner, organization_id: org.id, user_id: user.id} + } + end + + test "enabling without :mfa_check_fn raises" do + current_org = org() + current_user = user() + + assert_raise ArgumentError, ~r/requires :mfa_check_fn/, fn -> + Sigra.Organizations.set_mfa_policy(@config, scope(current_user, current_org), current_org, true, []) + end + end + + test "no-op short-circuits without calling the repo" do + current_org = org(%{enforce_mfa_for_members: true}) + current_user = user() + + assert {:ok, ^current_org} = + Sigra.Organizations.set_mfa_policy( + @config, + scope(current_user, current_org), + current_org, + true, + mfa_check_fn: fn _ -> true end + ) + end + + test "enable pre-flight refuses when admin lacks MFA" do + current_org = org() + current_user = user() + + assert {:error, :admin_must_enroll_first} = + Sigra.Organizations.set_mfa_policy( + @config, + scope(current_user, current_org), + current_org, + true, + mfa_check_fn: fn _ -> false end + ) + end + + test "happy path builds a multi and returns updated org" do + current_org = org() + current_user = user() + updated_org = %{current_org | enforce_mfa_for_members: true} + + Sigra.MockRepo + |> expect(:transact, fn %Ecto.Multi{} = multi -> + steps = Ecto.Multi.to_list(multi) |> Enum.map(fn {name, _} -> name end) + assert :organization in steps + assert :audit in steps + {:ok, %{organization: updated_org}} + end) + + assert {:ok, %{enforce_mfa_for_members: true}} = + Sigra.Organizations.set_mfa_policy( + @config, + scope(current_user, current_org), + current_org, + true, + mfa_check_fn: fn _ -> true end + ) + end + + test "count_members_without_mfa falls back to count_members when schema is nil" do + current_org = org() + current_user = user() + + Sigra.MockRepo + |> expect(:aggregate, fn _query, :count -> 3 end) + + assert 3 == + Sigra.Organizations.count_members_without_mfa( + @config, + scope(current_user, current_org), + nil + ) + end + + test "count_members_without_mfa delegates to repo.one when schema is present" do + current_org = org() + current_user = user() + + Sigra.MockRepo + |> expect(:one, fn _query -> 2 end) + + assert 2 == + Sigra.Organizations.count_members_without_mfa( + @config, + scope(current_user, current_org), + TestMfaCredential + ) + end +end diff --git a/test/sigra/organizations_mfa_policy_audit_atomicity_test.exs b/test/sigra/organizations_mfa_policy_audit_atomicity_test.exs new file mode 100644 index 00000000..cf0cc95f --- /dev/null +++ b/test/sigra/organizations_mfa_policy_audit_atomicity_test.exs @@ -0,0 +1,301 @@ +defmodule Sigra.OrganizationsMfaPolicyAuditAtomicityTest do + use ExUnit.Case, async: false + + alias Sigra.Test.AuditEvent, as: AuditTestEvent + alias Sigra.Test.PostgresRepo + + defmodule VerifyFailureTelemetryHandler do + @moduledoc false + def handle_event(event, measurements, metadata, parent) do + send(parent, {:telemetry, event, measurements, metadata}) + end + end + + defmodule Org do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: true} + schema "organizations" do + field :name, :string + field :slug, :string + field :enforce_mfa_for_members, :boolean, default: false + timestamps(type: :utc_datetime_usec) + end + end + + defmodule Membership do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: true} + schema "organization_memberships" do + field :role, :string + field :organization_id, :binary_id + field :user_id, :binary_id + timestamps(type: :utc_datetime_usec) + end + end + + defmodule Invitation do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: true} + schema "organization_invitations" do + field :email, :string + timestamps(type: :utc_datetime_usec) + end + end + + defmodule User do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: true} + schema "users" do + field :email, :string + timestamps(type: :utc_datetime_usec) + end + end + + defmodule Scope do + defstruct [:user, :active_organization, :membership] + end + + setup do + start_supervised!({PostgresRepo, PostgresRepo.default_config()}) + repo = PostgresRepo + + Ecto.Adapters.SQL.query!(repo, "CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\"", []) + + for table <- ["organization_memberships", "organization_invitations", "organizations", "audit_events", "users"] do + Ecto.Adapters.SQL.query!(repo, "DROP TABLE IF EXISTS #{table} CASCADE", []) + end + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE users ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + email text, + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE organizations ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + name text, + slug text, + enforce_mfa_for_members boolean NOT NULL DEFAULT false, + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE organization_memberships ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + role text, + organization_id uuid NOT NULL, + user_id uuid NOT NULL, + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE organization_invitations ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + email text, + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE audit_events ( + id uuid PRIMARY KEY, + occurred_at timestamp NOT NULL DEFAULT now(), + action varchar(255) NOT NULL, + outcome varchar(32) NOT NULL DEFAULT 'success', + actor_id uuid, + actor_type varchar(64) NOT NULL DEFAULT 'user', + target_id uuid, + target_type varchar(64), + ip_address varchar(64), + user_agent varchar(512), + metadata jsonb NOT NULL DEFAULT '{}'::jsonb, + organization_id uuid, + effective_user_id uuid, + inserted_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + %{repo: repo} + end + + defp cfg(repo, audit? \\ true) do + %{ + repo: repo, + schemas: %{ + organization: Org, + membership: Membership, + invitation: Invitation, + user: User, + scope: Scope + }, + roles: [:owner, :admin, :member], + owner_role: :owner, + audit_schema: if(audit?, do: AuditTestEvent, else: nil), + hooks: [] + } + end + + defp scope(user, org) do + %Scope{ + user: user, + active_organization: org, + membership: %Membership{role: "owner", organization_id: org.id, user_id: user.id} + } + end + + defp count_where(repo, table, where) do + %{rows: [[n]]} = + Ecto.Adapters.SQL.query!(repo, "SELECT count(*)::bigint FROM #{table} WHERE #{where}", []) + + n + end + + test "happy path writes org flag and one audit row", %{repo: repo} do + user = repo.insert!(%User{email: "owner@example.com"}) + org = repo.insert!(%Org{name: "Acme", slug: "acme"}) + + assert {:ok, updated} = + Sigra.Organizations.set_mfa_policy( + cfg(repo), + scope(user, org), + org, + true, + mfa_check_fn: fn _ -> true end + ) + + assert updated.enforce_mfa_for_members + assert repo.reload!(org).enforce_mfa_for_members + assert count_where(repo, "audit_events", "action = 'organization.mfa_policy_change'") == 1 + end + + test "audit off succeeds without audit row", %{repo: repo} do + user = repo.insert!(%User{email: "owner@example.com"}) + org = repo.insert!(%Org{name: "Acme", slug: "acme"}) + + assert {:ok, updated} = + Sigra.Organizations.set_mfa_policy( + cfg(repo, false), + scope(user, org), + org, + true, + mfa_check_fn: fn _ -> true end + ) + + assert updated.enforce_mfa_for_members + assert count_where(repo, "audit_events", "action = 'organization.mfa_policy_change'") == 0 + end + + test "fault injection rolls back org write and returns :mfa_policy_aborted", %{repo: repo} do + user = repo.insert!(%User{email: "owner@example.com"}) + org = repo.insert!(%Org{name: "Acme", slug: "acme"}) + telemetry_id = "mfa-policy-#{System.unique_integer([:positive])}" + + Ecto.Adapters.SQL.query!( + repo, + """ + ALTER TABLE audit_events + ADD CONSTRAINT mfa_policy_change_guard CHECK (action <> 'organization.mfa_policy_change') + """, + [] + ) + + :telemetry.attach( + telemetry_id, + [:sigra, :audit, :log_safe_error], + &VerifyFailureTelemetryHandler.handle_event/4, + self() + ) + + try do + assert {:error, :mfa_policy_aborted} = + Sigra.Organizations.set_mfa_policy( + cfg(repo), + scope(user, org), + org, + true, + mfa_check_fn: fn _ -> true end + ) + + assert repo.reload!(org).enforce_mfa_for_members == false + assert count_where(repo, "audit_events", "action = 'organization.mfa_policy_change'") == 0 + + assert_receive {:telemetry, [:sigra, :audit, :log_safe_error], %{count: 1}, + %{action: "organization.mfa_policy_change", reason: :constraint_violation}} + after + :telemetry.detach(telemetry_id) + + Ecto.Adapters.SQL.query!( + repo, + "ALTER TABLE audit_events DROP CONSTRAINT IF EXISTS mfa_policy_change_guard", + [] + ) + end + end + + test "no-op returns existing org and writes no audit row", %{repo: repo} do + user = repo.insert!(%User{email: "owner@example.com"}) + org = repo.insert!(%Org{name: "Acme", slug: "acme", enforce_mfa_for_members: true}) + + assert {:ok, same_org} = + Sigra.Organizations.set_mfa_policy( + cfg(repo), + scope(user, org), + org, + true, + mfa_check_fn: fn _ -> true end + ) + + assert same_org.id == org.id + assert count_where(repo, "audit_events", "action = 'organization.mfa_policy_change'") == 0 + end + + test "admin pre-flight returns :admin_must_enroll_first and writes no audit row", %{repo: repo} do + user = repo.insert!(%User{email: "owner@example.com"}) + org = repo.insert!(%Org{name: "Acme", slug: "acme"}) + + assert {:error, :admin_must_enroll_first} = + Sigra.Organizations.set_mfa_policy( + cfg(repo), + scope(user, org), + org, + true, + mfa_check_fn: fn _ -> false end + ) + + assert repo.reload!(org).enforce_mfa_for_members == false + assert count_where(repo, "audit_events", "action = 'organization.mfa_policy_change'") == 0 + end +end diff --git a/test/sigra/passkeys/migration_test.exs b/test/sigra/passkeys/migration_test.exs index b1126224..6cdf496c 100644 --- a/test/sigra/passkeys/migration_test.exs +++ b/test/sigra/passkeys/migration_test.exs @@ -10,8 +10,8 @@ defmodule Sigra.Passkeys.MigrationTemplateTest do "create_user_passkeys.exs" ]) - test "postgres migration uses uuid aaguid and unique credential_id index" do - content = render_template(adapter: :postgres, binary_id: true) + test "migration uses uuid aaguid and unique credential_id index" do + content = render_template(binary_id: true) assert content =~ "create table(:user_passkeys, primary_key: false)" assert content =~ "add :aaguid, :uuid" @@ -20,21 +20,11 @@ defmodule Sigra.Passkeys.MigrationTemplateTest do assert content =~ "create unique_index(:user_passkeys, [:credential_id])" end - test "mysql and sqlite migrations fall back to fixed-width binary aaguid" do - for adapter <- [:mysql, :sqlite] do - content = render_template(adapter: adapter, binary_id: false) - - assert content =~ "add :aaguid, :binary, size: 16" - assert content =~ "create unique_index(:user_passkeys, [:credential_id])" - end - end - defp render_template(opts) do binding = [ repo_module: "TemplateApp.Repo", table_name: "users", - binary_id: Keyword.fetch!(opts, :binary_id), - adapter: Keyword.fetch!(opts, :adapter) + binary_id: Keyword.fetch!(opts, :binary_id) ] EEx.eval_file(@template_path, binding) diff --git a/test/sigra/planning/phase_52_milestone_honesty_contract_test.exs b/test/sigra/planning/phase_52_milestone_honesty_contract_test.exs index 81eabf66..fd8a673e 100644 --- a/test/sigra/planning/phase_52_milestone_honesty_contract_test.exs +++ b/test/sigra/planning/phase_52_milestone_honesty_contract_test.exs @@ -17,7 +17,7 @@ defmodule Sigra.Planning.Phase52MilestoneHonestyContractTest do md = read!(".planning/ROADMAP.md") assert md =~ "v1.4 GA readiness" - assert md =~ "SHIPPED 2026-04-22" + assert md =~ "shipped **2026-04-22**" assert md =~ "milestones/v1.4-ROADMAP.md" end diff --git a/test/sigra/plug/fetch_bearer_test.exs b/test/sigra/plug/fetch_bearer_test.exs index 5cf658e8..e268b328 100644 --- a/test/sigra/plug/fetch_bearer_test.exs +++ b/test/sigra/plug/fetch_bearer_test.exs @@ -212,4 +212,304 @@ defmodule Sigra.Plug.FetchBearerTest do assert source =~ ":config" end end + + # --------------------------------------------------------------------------- + # Service-account JWT path tests (Gap #2 closure) + # + # Inline schemas and mock repo to avoid coupling to Sigra.TestRepo (undefined + # outside the library's Postgres test context). Uses Process.put/get for + # per-test state (async: true isolates each test process). + # --------------------------------------------------------------------------- + + defmodule SAScopeSchemas do + @moduledoc false + + defmodule ServiceAccount do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: false} + schema "service_accounts" do + field :name, :string + field :scopes, {:array, :string}, default: [] + field :role, :string + field :token_epoch, :integer, default: 0 + field :revoked_at, :utc_datetime + field :organization_id, :binary_id + end + end + + defmodule Credential do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: false} + schema "service_account_credentials" do + field :client_id, :string + field :hashed_client_secret, :binary + field :expires_at, :utc_datetime + field :last_used_at, :utc_datetime + field :revoked_at, :utc_datetime + field :service_account_id, :binary_id + end + end + + defmodule Organization do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: false} + schema "organizations" do + field :name, :string + end + end + + defmodule User do + use Ecto.Schema + + @primary_key {:id, :integer, autogenerate: false} + embedded_schema do + field :email, :string + field :token_epoch, :integer, default: 0 + end + end + end + + # Organizations module stub — required by FetchBearer.load_organization/2 + # to return a non-nil organization, enabling build_jwt_scope to populate + # active_organization on the SA scope (D-93-11). + defmodule SATestOrganizations do + @moduledoc false + alias Sigra.Plug.FetchBearerTest.SAScopeSchemas + + def __sigra_org_config__ do + %{ + schemas: %{ + organization: SAScopeSchemas.Organization + } + } + end + end + + # In-process mock repo for SA JWT path tests — uses Process dictionary for + # per-test state. Implements all repo callbacks called by: + # verify_service_account_epoch/2 (2x get: SA + Credential) + # build_jwt_scope/3 (1x get: SA) + # load_organization/2 (1x get: Org) + # build_user_scope/4 (1x get: User) + # generate_service_account_tokens (1x transaction) + defmodule SAMockRepo do + @moduledoc false + alias Sigra.Plug.FetchBearerTest.SAScopeSchemas + + def get(SAScopeSchemas.ServiceAccount, id), do: Process.get({:sa, id}) + def get(SAScopeSchemas.Credential, id), do: Process.get({:credential, id}) + def get(SAScopeSchemas.Organization, id), do: Process.get({:org, id}) + def get(SAScopeSchemas.User, id), do: Process.get({:user, id}) + def get(_schema, _id), do: nil + + def transaction(%Ecto.Multi{} = _multi) do + # Return success for issuance audit (append_token_issued_audit) + {:ok, %{credential_last_used: nil, audit_service_account_token_issued: nil}} + end + + def update(changeset, _opts \\ []), do: {:ok, Ecto.Changeset.apply_changes(changeset)} + def insert(changeset, _opts \\ []) do + {:ok, Ecto.Changeset.apply_changes(changeset)} + end + def insert_all(_schema, _rows, _opts \\ []), do: {0, nil} + end + + @sa_secret_key_base String.duplicate("b", 64) + + defp sa_jwt_config do + Sigra.Config.new!( + repo: Sigra.Plug.FetchBearerTest.SAMockRepo, + user_schema: Sigra.Plug.FetchBearerTest.SAScopeSchemas.User, + otp_app: :test_app, + secret_key_base: @sa_secret_key_base, + organizations_module: Sigra.Plug.FetchBearerTest.SATestOrganizations, + service_accounts: [ + service_account_schema: Sigra.Plug.FetchBearerTest.SAScopeSchemas.ServiceAccount, + service_account_credential_schema: Sigra.Plug.FetchBearerTest.SAScopeSchemas.Credential, + client_id_prefix: "sigra_sa_", + client_id_byte_size: 24 + ], + jwt: [ + enabled: true, + algorithm: "HS256", + issuer: "test_issuer", + access_ttl: 900, + client_credentials_access_ttl: 3600, + refresh: false, + verify_epoch: true + ] + ) + end + + defp build_sa_fixture do + sa_id = Ecto.UUID.generate() + org_id = Ecto.UUID.generate() + cred_id = Ecto.UUID.generate() + client_id = "sigra_sa_" <> String.slice(Ecto.UUID.generate(), 0, 24) + + sa = %Sigra.Plug.FetchBearerTest.SAScopeSchemas.ServiceAccount{ + id: sa_id, + name: "CI Account", + scopes: ["deploy:read", "deploy:write"], + role: "ci", + token_epoch: 0, + revoked_at: nil, + organization_id: org_id + } + + cred = %Sigra.Plug.FetchBearerTest.SAScopeSchemas.Credential{ + id: cred_id, + client_id: client_id, + hashed_client_secret: :crypto.hash(:sha256, "secret"), + expires_at: nil, + last_used_at: nil, + revoked_at: nil, + service_account_id: sa_id + } + + org = %Sigra.Plug.FetchBearerTest.SAScopeSchemas.Organization{ + id: org_id, + name: "Test Org" + } + + {sa, cred, org} + end + + defp put_sa_state(sa, cred, org) do + Process.put({:sa, sa.id}, sa) + Process.put({:credential, cred.id}, cred) + Process.put({:org, org.id}, org) + end + + defp generate_sa_jwt(cfg, sa, cred) do + # generate_service_account_tokens calls transaction for audit + {:ok, %{access_token: jwt}} = Sigra.JWT.generate_service_account_tokens(cfg, sa, cred) + jwt + end + + describe "call/2 service-account JWT path" do + setup do + cfg = sa_jwt_config() + {sa, cred, org} = build_sa_fixture() + put_sa_state(sa, cred, org) + + # Generate a valid SA JWT via the library (transaction mocked by SAMockRepo) + jwt = generate_sa_jwt(cfg, sa, cred) + + # Also generate an expired SA JWT for the verify-failure test + signer = + Joken.Signer.create( + "HS256", + :crypto.mac(:hmac, :sha256, @sa_secret_key_base, "sigra-jwt-signing-key") + ) + + now = DateTime.utc_now() |> DateTime.to_unix() + + expired_claims = %{ + "sub" => cred.client_id, + "iat" => now - 7200, + "exp" => now - 3600, + "jti" => Ecto.UUID.generate(), + "iss" => "test_issuer", + "scopes" => sa.scopes, + "epoch" => sa.token_epoch, + "actor_type" => "service_account", + "service_account_id" => sa.id, + "credential_id" => cred.id, + "org_id" => sa.organization_id + } + + {:ok, expired_jwt, _} = Joken.generate_and_sign(%{}, expired_claims, signer) + + # Generate a user JWT for the parity regression guard test + user = %Sigra.Plug.FetchBearerTest.SAScopeSchemas.User{id: 99, email: "user@example.com", token_epoch: 0} + Process.put({:user, "99"}, user) + + user_signer = signer + + user_claims = %{ + "sub" => "99", + "iat" => now, + "exp" => now + 900, + "jti" => Ecto.UUID.generate(), + "iss" => "test_issuer", + "scopes" => ["read:users"], + "epoch" => 0 + } + + {:ok, user_jwt, _} = Joken.generate_and_sign(%{}, user_claims, user_signer) + + {:ok, + config: cfg, + sa: sa, + cred: cred, + org: org, + jwt: jwt, + expired_jwt: expired_jwt, + user_jwt: user_jwt, + user: user} + end + + test "valid SA JWT builds scope with actor_type: :service_account, service_account_id populated, user: nil", + %{config: cfg, sa: sa, jwt: jwt} do + conn = + conn(:get, "/") + |> Plug.Conn.put_req_header("authorization", "Bearer " <> jwt) + |> FetchBearer.call(FetchBearer.init(config: cfg, scope_module: TestScope)) + + scope = conn.assigns.current_scope + assert scope != nil + assert scope.actor_type == :service_account + assert scope.service_account_id == sa.id + # D-93-04: plug-built scope has user: nil for SA tokens. + # This is a READ assertion on the assigned scope — NOT passing %{user: nil} + # to a ServiceAccounts mutation (which would raise via ensure_user_scope!/2). + assert is_nil(scope.user) + assert scope.active_organization.id == sa.organization_id + end + + test "valid SA JWT does NOT populate :membership (single auth entry-point invariant — ROADMAP SC #5)", + %{config: cfg, jwt: jwt} do + conn = + conn(:get, "/") + |> Plug.Conn.put_req_header("authorization", "Bearer " <> jwt) + |> FetchBearer.call(FetchBearer.init(config: cfg, scope_module: TestScope)) + + scope = conn.assigns.current_scope + assert scope != nil + # SA path bypasses user-membership lookup — no :membership key populated. + assert is_nil(Map.get(scope, :membership)) + assert scope.actor_type == :service_account + end + + test "expired SA JWT assigns current_scope: nil", + %{config: cfg, expired_jwt: expired_jwt} do + conn = + conn(:get, "/") + |> Plug.Conn.put_req_header("authorization", "Bearer " <> expired_jwt) + |> FetchBearer.call(FetchBearer.init(config: cfg, scope_module: TestScope)) + + # Expired token -> verify_access returns {:error, :token_expired} -> scope nil + assert is_nil(conn.assigns.current_scope) + end + + test "valid user JWT still builds a user scope (parity regression guard)", + %{config: cfg, user_jwt: user_jwt, user: user} do + conn = + conn(:get, "/") + |> Plug.Conn.put_req_header("authorization", "Bearer " <> user_jwt) + |> FetchBearer.call(FetchBearer.init(config: cfg, scope_module: TestScope)) + + scope = conn.assigns.current_scope + assert scope != nil + # User path: actor_type is :user; SA actor_type must not be set + assert scope.actor_type == :user + assert scope.user != nil + assert scope.user.id == user.id + assert is_nil(Map.get(scope, :service_account_id)) + end + end end diff --git a/test/sigra/plug/load_active_organization_test.exs b/test/sigra/plug/load_active_organization_test.exs index 105579e3..6044fedb 100644 --- a/test/sigra/plug/load_active_organization_test.exs +++ b/test/sigra/plug/load_active_organization_test.exs @@ -57,7 +57,11 @@ defmodule Sigra.Plug.LoadActiveOrganizationTest do end defmodule TestScope do - defstruct [:user, :active_organization, :membership, :impersonating_from] + # Phase 92 / B2B-02 (Plan 92-03 Task 2 cascade): mirror the generated + # scope struct after Plan 92-02 — `:role` and `:actor_type` are + # required for the library plug to set/clear role on the recovery + # branches without raising KeyError on the struct update. + defstruct [:user, :active_organization, :membership, :impersonating_from, :role, :actor_type] end # Host Organizations module — mimics what `use Sigra.Organizations` produces diff --git a/test/sigra/plug/put_active_organization_test.exs b/test/sigra/plug/put_active_organization_test.exs index 036045c4..0f3c6278 100644 --- a/test/sigra/plug/put_active_organization_test.exs +++ b/test/sigra/plug/put_active_organization_test.exs @@ -50,11 +50,20 @@ defmodule Sigra.Plug.PutActiveOrganizationTest do end defmodule TestScope do - defstruct [:user, :active_organization, :membership, :impersonating_from] + # Plan 92-02 reserved `:role` and `:actor_type` on the generated scope + # struct. Plan 92-03 wires `:role` propagation through the authoritative + # PutActiveOrganization seam. + defstruct [:user, :active_organization, :membership, :impersonating_from, :role, :actor_type] # Test-local scope module that records calls to put_active_organization/3 # so we can assert the orchestrator resolved the module via config and not # a hardcoded Sigra.Scope. + # + # NOTE: this stub deliberately does NOT update :role itself — the library + # plug `Sigra.Plug.PutActiveOrganization` is the single authoritative + # seam for role updates (per Plan 92-03 must-haves), so the test scope + # module remains role-agnostic. The plug must apply the role write after + # the scope_module call returns; tests below assert the post-condition. def put_active_organization(%__MODULE__{} = scope, nil, nil) do Process.put(:test_scope_calls, Process.get(:test_scope_calls, []) ++ [{:clear}]) %{scope | active_organization: nil, membership: nil} @@ -151,7 +160,9 @@ defmodule Sigra.Plug.PutActiveOrganizationTest do user: user, active_organization: org, membership: membership, - impersonating_from: nil + impersonating_from: nil, + role: membership && membership.role, + actor_type: nil } end @@ -195,6 +206,9 @@ defmodule Sigra.Plug.PutActiveOrganizationTest do assert updated_conn.private[:sigra_session].active_organization_id == org.id assert updated_conn.assigns[:current_scope].active_organization.id == org.id assert updated_conn.assigns[:current_scope].membership.id == membership.id + # Phase 92 / B2B-02 (Plan 92-03): role is set from membership.role at + # this single authoritative seam. + assert updated_conn.assigns[:current_scope].role == membership.role # TestScope.put_active_organization was invoked. assert [{:set, org_id, m_id}] = Process.get(:test_scope_calls) assert org_id == org.id @@ -260,6 +274,12 @@ defmodule Sigra.Plug.PutActiveOrganizationTest do scope = build_scope(user, org, membership) conn = build_conn(scope, session) + # Pre-condition: the scope arrived with a populated role atom (built + # from membership.role inside build_scope/3). After clear, role MUST + # be nil — Plan 92-03 must-have: clear-path nils out role alongside + # membership. + assert scope.role == membership.role + cleared = %{session | active_organization_id: nil} Sigra.MockSessionStore @@ -270,12 +290,112 @@ defmodule Sigra.Plug.PutActiveOrganizationTest do assert updated_conn.private[:sigra_session].active_organization_id == nil assert updated_conn.assigns[:current_scope].active_organization == nil assert updated_conn.assigns[:current_scope].membership == nil + # Phase 92 / B2B-02 (Plan 92-03): role is cleared alongside membership. + assert is_nil(updated_conn.assigns[:current_scope].role) assert [{:clear}] = Process.get(:test_scope_calls) # D-17 / D-18: no put_session, no configure_session. assert get_session(updated_conn, :active_organization_id) == nil end end + describe "Phase 92 / B2B-02 — :role propagation (Plan 92-03 Task 2)" do + test "set path: writes scope.role from membership.role using a host-themed role atom" do + # Plan 92-01 made the seam role-agnostic. This test proves the plug + # accepts an atom the library has never heard of. + user = build_user() + org = build_org() + membership = build_membership(user, org, :tenant_lead) + session = build_session(nil, user.id) + scope = build_scope(user) + conn = build_conn(scope, session) + + Sigra.MockRepo + |> expect(:one, fn _query -> membership end) + + Sigra.MockSessionStore + |> expect(:update_active_organization, fn _, _, _ -> + {:ok, %{session | active_organization_id: org.id}} + end) + + assert {:ok, updated_conn} = PutActiveOrganization.call(conn, org, call_opts()) + assert updated_conn.assigns[:current_scope].role == :tenant_lead + end + + test "set path: writes nil scope.role when membership.role is nil" do + # Plan 92-02 made membership.role nullable. The seam must propagate + # nil verbatim and not invent an opinionated default. + user = build_user() + org = build_org() + membership = %{build_membership(user, org) | role: nil} + session = build_session(nil, user.id) + scope = build_scope(user) + conn = build_conn(scope, session) + + Sigra.MockRepo + |> expect(:one, fn _query -> membership end) + + Sigra.MockSessionStore + |> expect(:update_active_organization, fn _, _, _ -> + {:ok, %{session | active_organization_id: org.id}} + end) + + assert {:ok, updated_conn} = PutActiveOrganization.call(conn, org, call_opts()) + assert is_nil(updated_conn.assigns[:current_scope].role) + end + + test ":not_a_member error path does NOT write role onto the scope (T-92-08)" do + # T-92-08: clear role when clearing membership and reuse the + # authoritative membership check before writes. This test pins that + # if get_membership returns nil, no role is written. + user = build_user() + org = build_org() + session = build_session(nil, user.id) + # Pre-existing scope arrives with a stale role (defense-in-depth): + # if the plug ever leaks role through the no-membership branch it + # would surface here. + scope = %{build_scope(user) | role: :stale_atom} + conn = build_conn(scope, session) + + Sigra.MockRepo + |> expect(:one, fn _query -> nil end) + + assert {:error, :not_a_member} = PutActiveOrganization.call(conn, org, call_opts()) + # Scope module was not invoked, no role write happened. + assert Process.get(:test_scope_calls, []) == [] + end + + test "actor_type stays nil under Phase 92 on both set and clear paths" do + # Phase 92 reserves but does not populate actor_type. The plug must + # not synthesize a value on either branch. + user = build_user() + org = build_org() + membership = build_membership(user, org, :admin) + session = build_session(nil, user.id) + scope = build_scope(user) + conn = build_conn(scope, session) + + Sigra.MockRepo + |> expect(:one, fn _query -> membership end) + + Sigra.MockSessionStore + |> expect(:update_active_organization, fn _, _, _ -> + {:ok, %{session | active_organization_id: org.id}} + end) + + {:ok, set_conn} = PutActiveOrganization.call(conn, org, call_opts()) + assert is_nil(set_conn.assigns[:current_scope].actor_type) + + # Clear path + cleared_session = %{session | active_organization_id: nil} + + Sigra.MockSessionStore + |> expect(:update_active_organization, fn _, nil, _ -> {:ok, cleared_session} end) + + {:ok, clear_conn} = PutActiveOrganization.call(set_conn, nil, call_opts()) + assert is_nil(clear_conn.assigns[:current_scope].actor_type) + end + end + describe "invariants (D-17, D-18)" do test "NEVER calls Plug.Conn.put_session or configure_session" do user = build_user() diff --git a/test/sigra/plug/rate_limit_headers_test.exs b/test/sigra/plug/rate_limit_headers_test.exs new file mode 100644 index 00000000..d7f9341b --- /dev/null +++ b/test/sigra/plug/rate_limit_headers_test.exs @@ -0,0 +1,85 @@ +defmodule Sigra.Plug.RateLimitHeadersTest do + use ExUnit.Case, async: true + + import Plug.Test + import Mox + + alias Sigra.Plug.RateLimit + + defmodule TestErrorHandler do + @behaviour Sigra.Plug.ErrorHandler + + @impl true + def auth_error(conn, :rate_limited, opts) do + retry_after = Keyword.get(opts, :retry_after, 0) + conn + |> Plug.Conn.put_resp_content_type("text/plain") + |> Plug.Conn.send_resp(429, "Rate limited. Retry after #{retry_after}s") + end + + @impl true + def auth_error(conn, type, _opts) do + conn + |> Plug.Conn.put_resp_content_type("text/plain") + |> Plug.Conn.send_resp(403, "#{type}") + end + end + + @default_opts [ + limit: 10, + window: 60_000, + key_prefix: "sigra", + error_handler: TestErrorHandler, + limiter: Sigra.MockRateLimiter + ] + + setup :verify_on_exit! + + describe "allow path" do + test "emits X-RateLimit headers with remaining budget and reset time" do + opts = RateLimit.init(@default_opts) + + expect(Sigra.MockRateLimiter, :check_rate, fn _key, _limit, _window -> + # 1704067200000 is 2024-01-01 00:00:00Z + {:allow, %{count: 1, remaining: 9, reset_ms: 1704067200000}} + end) + + test_conn = + conn(:post, "/login") + |> RateLimit.call(opts) + + [limit] = Plug.Conn.get_resp_header(test_conn, "x-ratelimit-limit") + [remaining] = Plug.Conn.get_resp_header(test_conn, "x-ratelimit-remaining") + [reset] = Plug.Conn.get_resp_header(test_conn, "x-ratelimit-reset") + + assert limit == "10" + assert remaining == "9" + assert reset == "1704067200" + end + end + + describe "deny path" do + test "emits X-RateLimit headers and rounded Retry-After" do + opts = RateLimit.init(@default_opts) + + expect(Sigra.MockRateLimiter, :check_rate, fn _key, _limit, _window -> + {:deny, %{retry_after_ms: 30_500, reset_ms: 1704067200000}} + end) + + test_conn = + conn(:post, "/login") + |> RateLimit.call(opts) + + [limit] = Plug.Conn.get_resp_header(test_conn, "x-ratelimit-limit") + [remaining] = Plug.Conn.get_resp_header(test_conn, "x-ratelimit-remaining") + [reset] = Plug.Conn.get_resp_header(test_conn, "x-ratelimit-reset") + [retry_after] = Plug.Conn.get_resp_header(test_conn, "retry-after") + + assert limit == "10" + assert remaining == "0" + assert reset == "1704067200" + assert retry_after == "31" + assert test_conn.status == 429 + end + end +end diff --git a/test/sigra/plug/rate_limit_test.exs b/test/sigra/plug/rate_limit_test.exs index fc67b49b..fbe13d18 100644 --- a/test/sigra/plug/rate_limit_test.exs +++ b/test/sigra/plug/rate_limit_test.exs @@ -84,7 +84,7 @@ defmodule Sigra.Plug.RateLimitTest do assert key == "sigra:ip:127.0.0.1" assert limit == 10 assert window == 60_000 - {:allow, 1} + {:allow, %{count: 1, remaining: 9, reset_ms: 1000}} end) test_conn = @@ -98,7 +98,7 @@ defmodule Sigra.Plug.RateLimitTest do opts = RateLimit.init(@default_opts) expect(Sigra.MockRateLimiter, :check_rate, fn _key, _limit, _window -> - {:allow, 5} + {:allow, %{count: 5, remaining: 5, reset_ms: 1000}} end) test_conn = @@ -112,7 +112,7 @@ defmodule Sigra.Plug.RateLimitTest do opts = RateLimit.init(@default_opts) expect(Sigra.MockRateLimiter, :check_rate, fn _key, _limit, _window -> - {:deny, 30_000} + {:deny, %{retry_after_ms: 30_000, reset_ms: 30_000}} end) test_conn = @@ -127,7 +127,7 @@ defmodule Sigra.Plug.RateLimitTest do opts = RateLimit.init(@default_opts) expect(Sigra.MockRateLimiter, :check_rate, fn _key, _limit, _window -> - {:deny, 30_500} + {:deny, %{retry_after_ms: 30_500, reset_ms: 30_500}} end) test_conn = @@ -143,7 +143,7 @@ defmodule Sigra.Plug.RateLimitTest do opts = RateLimit.init(@default_opts) expect(Sigra.MockRateLimiter, :check_rate, fn _key, _limit, _window -> - {:deny, 60_000} + {:deny, %{retry_after_ms: 60_000, reset_ms: 60_000}} end) test_conn = @@ -158,7 +158,7 @@ defmodule Sigra.Plug.RateLimitTest do expect(Sigra.MockRateLimiter, :check_rate, fn key, _limit, _window -> assert key == "login:ip:127.0.0.1" - {:allow, 1} + {:allow, %{count: 1, remaining: 9, reset_ms: 1000}} end) conn(:post, "/login") @@ -171,7 +171,7 @@ defmodule Sigra.Plug.RateLimitTest do expect(Sigra.MockRateLimiter, :check_rate, fn _key, limit, window -> assert limit == 5 assert window == 30_000 - {:allow, 1} + {:allow, %{count: 1, remaining: 9, reset_ms: 1000}} end) conn(:post, "/login") @@ -193,7 +193,7 @@ defmodule Sigra.Plug.RateLimitTest do on_exit(fn -> :telemetry.detach("test-rate-limited") end) expect(Sigra.MockRateLimiter, :check_rate, fn _key, _limit, _window -> - {:deny, 30_000} + {:deny, %{retry_after_ms: 30_000, reset_ms: 30_000}} end) conn(:post, "/login") @@ -209,7 +209,7 @@ defmodule Sigra.Plug.RateLimitTest do opts = RateLimit.init(@default_opts) expect(Sigra.MockRateLimiter, :check_rate, fn _key, _limit, _window -> - {:allow, 1} + {:allow, %{count: 1, remaining: 9, reset_ms: 1000}} end) test_conn = @@ -223,7 +223,7 @@ defmodule Sigra.Plug.RateLimitTest do opts = RateLimit.init(@default_opts) expect(Sigra.MockRateLimiter, :check_rate, fn _key, _limit, _window -> - {:allow, 1} + {:allow, %{count: 1, remaining: 9, reset_ms: 1000}} end) test_conn = @@ -237,7 +237,7 @@ defmodule Sigra.Plug.RateLimitTest do opts = RateLimit.init(@default_opts) expect(Sigra.MockRateLimiter, :check_rate, fn _key, _limit, _window -> - {:allow, 1} + {:allow, %{count: 1, remaining: 9, reset_ms: 1000}} end) test_conn = diff --git a/test/sigra/plug/require_admin_access_test.exs b/test/sigra/plug/require_admin_access_test.exs index 6cb8ef44..40702031 100644 --- a/test/sigra/plug/require_admin_access_test.exs +++ b/test/sigra/plug/require_admin_access_test.exs @@ -257,7 +257,27 @@ defmodule Sigra.Plug.RequireAdminAccessTest do %{organization_id: "org-3", role: :member} ] - assert Policy.admin_org_ids_from_memberships(memberships) == ["org-1", "org-2"] + # Phase 92-01 / B2B-02: `:roles` is now required — the library no + # longer ships an implicit `[:owner, :admin]` default. Pass the + # admin-equivalent role list explicitly at the call site. + assert Policy.admin_org_ids_from_memberships(memberships, roles: [:owner, :admin]) == + ["org-1", "org-2"] + end + + test "raises KeyError when :roles option is missing (Phase 92-01)" do + memberships = [%{organization_id: "org-1", role: :owner}] + + assert_raise KeyError, ~r/:roles/, fn -> + Policy.admin_org_ids_from_memberships(memberships, []) + end + end + + test "raises ArgumentError when :roles is not a list of atoms (Phase 92-01)" do + memberships = [%{organization_id: "org-1", role: :owner}] + + assert_raise ArgumentError, ~r/list of atoms/, fn -> + Policy.admin_org_ids_from_memberships(memberships, roles: ["owner"]) + end end end end diff --git a/test/sigra/plug/require_membership_test.exs b/test/sigra/plug/require_membership_test.exs index 714dba09..ba8e97ff 100644 --- a/test/sigra/plug/require_membership_test.exs +++ b/test/sigra/plug/require_membership_test.exs @@ -5,7 +5,7 @@ defmodule Sigra.Plug.RequireMembershipTest do alias Sigra.Plug.RequireMembership defmodule TestScope do - defstruct [:user, :active_organization, :membership, :impersonating_from] + defstruct [:user, :active_organization, :membership, :impersonating_from, :actor_type] end defmodule TestOrg do @@ -20,15 +20,17 @@ defmodule Sigra.Plug.RequireMembershipTest do defstruct [:id] end - # IN-03: Host org module with an extended role list. `init/1` should read - # `__sigra_org_config__().roles` and accept custom atoms (`:viewer`, - # `:billing`) that are NOT in the canonical `[:owner, :admin, :member]`. + # IN-03 / Phase 92-01: Host org module with an explicit host-owned role + # list. `init/1` reads `__sigra_org_config__().roles` directly — there + # is no library-canonical fallback any more. The atoms below are + # deliberately host-themed (`:tenant_lead`, `:site_admin`, `:viewer`, + # `:billing`) to prove the seam is genuinely role-agnostic. defmodule CustomRolesOrganizations do @config %{ repo: Sigra.MockRepo, schemas: %{}, - roles: [:owner, :admin, :member, :viewer, :billing], - owner_role: :owner, + roles: [:tenant_lead, :site_admin, :reviewer, :viewer, :billing], + owner_role: :tenant_lead, audit_schema: nil, hooks: [] } @@ -95,20 +97,34 @@ defmodule Sigra.Plug.RequireMembershipTest do assert Keyword.fetch!(opts, :error_handler) == FakeErrorHandler end - test "accepts a valid :roles subset" do - opts = RequireMembership.init(error_handler: FakeErrorHandler, roles: [:owner, :admin]) - assert Keyword.fetch!(opts, :roles) == [:owner, :admin] + test "accepts a valid :roles subset against host org config (Phase 92-01)" do + opts = + RequireMembership.init( + error_handler: FakeErrorHandler, + organizations: CustomRolesOrganizations, + roles: [:tenant_lead, :site_admin] + ) + + assert Keyword.fetch!(opts, :roles) == [:tenant_lead, :site_admin] end - test "raises ArgumentError when :roles contains an unknown atom" do + test "raises ArgumentError when :roles contains an unknown atom against host org config" do assert_raise ArgumentError, ~r/unknown atoms.*:superadmin/, fn -> - RequireMembership.init(error_handler: FakeErrorHandler, roles: [:owner, :superadmin]) + RequireMembership.init( + error_handler: FakeErrorHandler, + organizations: CustomRolesOrganizations, + roles: [:tenant_lead, :superadmin] + ) end end test "raises ArgumentError when :roles is not a list of atoms" do assert_raise ArgumentError, ~r/must be a list of atoms/, fn -> - RequireMembership.init(error_handler: FakeErrorHandler, roles: ["owner"]) + RequireMembership.init( + error_handler: FakeErrorHandler, + organizations: CustomRolesOrganizations, + roles: ["tenant_lead"] + ) end end @@ -128,16 +144,44 @@ defmodule Sigra.Plug.RequireMembershipTest do RequireMembership.init( error_handler: FakeErrorHandler, organizations: CustomRolesOrganizations, - roles: [:owner, :superadmin] + roles: [:tenant_lead, :superadmin] ) end end - test "without :organizations, rejects custom roles against canonical universe" do - assert_raise ArgumentError, ~r/unknown atoms.*:viewer/, fn -> + test "Phase 92-01: raises when :roles is non-empty but :organizations is missing (no canonical fallback)" do + # Phase 92-01 removes the library-shipped `[:owner, :admin, :member]` + # canonical universe. Hosts that want to gate by role MUST tell the + # plug which org wrapper to read the role universe from. The error + # message must be actionable so router authors see how to fix it. + assert_raise ArgumentError, ~r/:organizations|host.*org/i, fn -> RequireMembership.init(error_handler: FakeErrorHandler, roles: [:viewer]) end end + + test "Phase 92-01: empty :roles still init's without :organizations" do + # Membership-presence-only gating (D-07: any active membership OK) + # never needed to validate a role universe, so it must keep working + # without an `:organizations` reference. + opts = RequireMembership.init(error_handler: FakeErrorHandler) + assert Keyword.fetch!(opts, :roles) == [] + end + + test "Phase 92-01: error message names :organizations as the fix" do + err = + try do + RequireMembership.init(error_handler: FakeErrorHandler, roles: [:tenant_lead]) + rescue + e in ArgumentError -> e + end + + assert is_struct(err, ArgumentError), + "init/1 with :roles and no :organizations must raise ArgumentError" + + assert err.message =~ ":organizations", + "error message must mention the :organizations option as the fix; got: " <> + inspect(err.message) + end end describe "call/2 — missing active organization" do @@ -164,9 +208,25 @@ defmodule Sigra.Plug.RequireMembershipTest do end describe "call/2 — role filtering" do + test "passes through for service-account scopes without membership lookup" do + opts = RequireMembership.init(error_handler: BombErrorHandler, roles: []) + + scope = %TestScope{ + user: nil, + active_organization: %TestOrg{id: "o1"}, + membership: nil, + impersonating_from: nil, + actor_type: :service_account + } + + result = RequireMembership.call(build_conn(scope), opts) + + assert result.halted == false + end + test "passes through when :roles is [] regardless of the member's role (D-07)" do opts = RequireMembership.init(error_handler: BombErrorHandler, roles: []) - scope = build_scope(%TestOrg{id: "o1"}, %TestMembership{id: "m1", role: :member}) + scope = build_scope(%TestOrg{id: "o1"}, %TestMembership{id: "m1", role: :viewer}) conn = build_conn(scope) result = RequireMembership.call(conn, opts) @@ -176,8 +236,16 @@ defmodule Sigra.Plug.RequireMembershipTest do end test "passes through when membership role is in the required list" do - opts = RequireMembership.init(error_handler: BombErrorHandler, roles: [:owner, :admin]) - scope = build_scope(%TestOrg{id: "o1"}, %TestMembership{id: "m1", role: :admin}) + opts = + RequireMembership.init( + error_handler: BombErrorHandler, + organizations: CustomRolesOrganizations, + roles: [:tenant_lead, :site_admin] + ) + + scope = + build_scope(%TestOrg{id: "o1"}, %TestMembership{id: "m1", role: :site_admin}) + conn = build_conn(scope) result = RequireMembership.call(conn, opts) @@ -186,20 +254,36 @@ defmodule Sigra.Plug.RequireMembershipTest do end test "halts with :insufficient_role + forwards required_roles when role is not in list" do - opts = RequireMembership.init(error_handler: FakeErrorHandler, roles: [:owner]) - scope = build_scope(%TestOrg{id: "o1"}, %TestMembership{id: "m1", role: :member}) + opts = + RequireMembership.init( + error_handler: FakeErrorHandler, + organizations: CustomRolesOrganizations, + roles: [:tenant_lead] + ) + + scope = + build_scope(%TestOrg{id: "o1"}, %TestMembership{id: "m1", role: :viewer}) + conn = build_conn(scope) result = RequireMembership.call(conn, opts) assert result.halted == true assert [{:insufficient_role, error_opts}] = Process.get(:fake_handler_calls) - assert Keyword.fetch!(error_opts, :required_roles) == [:owner] + assert Keyword.fetch!(error_opts, :required_roles) == [:tenant_lead] end - test "admin does NOT imply owner — hierarchical role confusion is rejected (T-14-11)" do - opts = RequireMembership.init(error_handler: FakeErrorHandler, roles: [:owner]) - scope = build_scope(%TestOrg{id: "o1"}, %TestMembership{id: "m1", role: :admin}) + test "admin-like roles do NOT imply tenant_lead — hierarchical role confusion is rejected (T-14-11)" do + opts = + RequireMembership.init( + error_handler: FakeErrorHandler, + organizations: CustomRolesOrganizations, + roles: [:tenant_lead] + ) + + scope = + build_scope(%TestOrg{id: "o1"}, %TestMembership{id: "m1", role: :site_admin}) + conn = build_conn(scope) result = RequireMembership.call(conn, opts) @@ -212,11 +296,19 @@ defmodule Sigra.Plug.RequireMembershipTest do describe "no DB re-query (D-21)" do test "plug reads scope.membership.role and never re-fetches it" do # The contract: if the plug consulted the DB instead of scope.membership.role, - # it would see (for example) a :member role and halt. We set the scope - # membership to :owner and trust that; BombErrorHandler proves the plug + # it would see (for example) a non-matching role and halt. We set the scope + # membership to :tenant_lead and trust that; BombErrorHandler proves the plug # did NOT overrule it via a phantom re-query. - opts = RequireMembership.init(error_handler: BombErrorHandler, roles: [:owner]) - scope = build_scope(%TestOrg{id: "o1"}, %TestMembership{id: "m1", role: :owner}) + opts = + RequireMembership.init( + error_handler: BombErrorHandler, + organizations: CustomRolesOrganizations, + roles: [:tenant_lead] + ) + + scope = + build_scope(%TestOrg{id: "o1"}, %TestMembership{id: "m1", role: :tenant_lead}) + conn = build_conn(scope) result = RequireMembership.call(conn, opts) diff --git a/test/sigra/plug/require_org_mfa_test.exs b/test/sigra/plug/require_org_mfa_test.exs new file mode 100644 index 00000000..623bb45b --- /dev/null +++ b/test/sigra/plug/require_org_mfa_test.exs @@ -0,0 +1,178 @@ +defmodule Sigra.Plug.RequireOrgMfaTest do + use ExUnit.Case, async: true + import Plug.Conn + import Plug.Test + + alias Sigra.Plug.RequireOrgMfa + + defmodule TestScope do + defstruct [:user, :active_organization, :actor_type] + end + + defmodule TestUser do + defstruct [:id] + end + + defmodule TestOrg do + defstruct [:id, :slug, :enforce_mfa_for_members] + end + + defmodule FakeErrorHandler do + @behaviour Sigra.Plug.ErrorHandler + + @impl true + def auth_error(conn, type, opts) do + Process.put(:require_org_mfa_call, {type, opts}) + + conn + |> put_resp_content_type("text/plain") + |> send_resp(302, "redirect") + end + end + + defmodule BombErrorHandler do + @behaviour Sigra.Plug.ErrorHandler + + @impl true + def auth_error(_conn, type, _opts) do + raise "BombErrorHandler should not be called; got #{inspect(type)}" + end + end + + setup do + Process.delete(:require_org_mfa_call) + :ok + end + + defp scope(attrs \\ %{}) do + Map.merge( + %TestScope{ + user: %TestUser{id: "u1"}, + active_organization: %TestOrg{id: "o1", slug: "acme", enforce_mfa_for_members: false} + }, + attrs + ) + end + + describe "init/1" do + test "raises when :error_handler is missing" do + assert_raise KeyError, fn -> + RequireOrgMfa.init(mfa_check_fn: fn _ -> true end) + end + end + + test "raises when :mfa_check_fn is missing" do + assert_raise KeyError, fn -> + RequireOrgMfa.init(error_handler: FakeErrorHandler) + end + end + + test "returns validated opts" do + opts = RequireOrgMfa.init(error_handler: FakeErrorHandler, mfa_check_fn: fn _ -> true end) + assert opts[:error_handler] == FakeErrorHandler + assert is_function(opts[:mfa_check_fn], 1) + assert opts[:enrollment_path] == "/users/settings/mfa" + end + end + + describe "call/2" do + test "passes through when scope is nil" do + conn = conn(:get, "/organizations/acme/members") |> assign(:current_scope, nil) + + result = + RequireOrgMfa.call( + conn, + RequireOrgMfa.init(error_handler: BombErrorHandler, mfa_check_fn: fn _ -> false end) + ) + + refute result.halted + end + + test "passes through when policy is disabled" do + conn = conn(:get, "/organizations/acme/members") |> assign(:current_scope, scope()) + + result = + RequireOrgMfa.call( + conn, + RequireOrgMfa.init(error_handler: BombErrorHandler, mfa_check_fn: fn _ -> false end) + ) + + refute result.halted + end + + test "passes through when member already has MFA" do + active_org = %TestOrg{id: "o1", slug: "acme", enforce_mfa_for_members: true} + + conn = + conn(:get, "/organizations/acme/members") + |> assign(:current_scope, scope(%{active_organization: active_org})) + + result = + RequireOrgMfa.call( + conn, + RequireOrgMfa.init(error_handler: BombErrorHandler, mfa_check_fn: fn _ -> true end) + ) + + refute result.halted + end + + test "passes through for service-account scope even when policy is enabled" do + active_org = %TestOrg{id: "o1", slug: "acme", enforce_mfa_for_members: true} + + conn = + conn(:get, "/organizations/acme/members") + |> assign(:current_scope, %TestScope{ + user: nil, + active_organization: active_org, + actor_type: :service_account + }) + + result = + RequireOrgMfa.call( + conn, + RequireOrgMfa.init(error_handler: BombErrorHandler, mfa_check_fn: fn _ -> false end) + ) + + refute result.halted + end + + test "halts and stores return_to when policy is enabled and member has no MFA" do + active_org = %TestOrg{id: "o1", slug: "acme", enforce_mfa_for_members: true} + + conn = + conn(:get, "/organizations/acme/members?tab=all") + |> init_test_session(%{}) + |> assign(:current_scope, scope(%{active_organization: active_org})) + + result = + RequireOrgMfa.call( + conn, + RequireOrgMfa.init(error_handler: FakeErrorHandler, mfa_check_fn: fn _ -> false end) + ) + + assert result.halted + assert get_session(result, :user_return_to) == "/organizations/acme/members?tab=all" + assert {:org_mfa_required, [enrollment_path: "/users/settings/mfa"]} = + Process.get(:require_org_mfa_call) + end + + test "invalid current path falls back to org dashboard" do + active_org = %TestOrg{id: "o1", slug: "acme", enforce_mfa_for_members: true} + + conn = + conn(:get, "/") + |> init_test_session(%{}) + |> Map.put(:request_path, "//evil.example") + |> Map.put(:query_string, "") + |> assign(:current_scope, scope(%{active_organization: active_org})) + + result = + RequireOrgMfa.call( + conn, + RequireOrgMfa.init(error_handler: FakeErrorHandler, mfa_check_fn: fn _ -> false end) + ) + + assert get_session(result, :user_return_to) == "/organizations/acme" + end + end +end diff --git a/test/sigra/rate_limiters/hammer_test.exs b/test/sigra/rate_limiters/hammer_test.exs index 0a30f149..56bb9898 100644 --- a/test/sigra/rate_limiters/hammer_test.exs +++ b/test/sigra/rate_limiters/hammer_test.exs @@ -1,77 +1,46 @@ defmodule Sigra.RateLimiters.HammerTest do - use ExUnit.Case, async: true + use ExUnit.Case, async: false - import Mox + alias Sigra.RateLimiters.Hammer, as: Limiter - alias Sigra.RateLimiters.Hammer - - # Define a mock Hammer module for testing - defmodule MockHammer do - def hit(_key, _window_ms, _limit) do - send(self(), {:hammer_hit, _key, _window_ms, _limit}) - {:allow, 1} - end + defmodule TestHammer do + use Hammer, backend: :ets end - setup :verify_on_exit! - - describe "check_rate/3" do - test "delegates to Hammer module hit/3 with correct parameter order" do - Application.put_env(:sigra, :hammer_module, MockHammer) - on_exit(fn -> Application.delete_env(:sigra, :hammer_module) end) - - assert {:allow, 1} = Hammer.check_rate("test:key", 10, 60_000) - - # Verify Hammer 7.x parameter order: hit(key, scale_ms, limit) - assert_received {:hammer_hit, "test:key", 60_000, 10} - end - - test "returns {:allow, count} on success" do - defmodule AllowHammer do - def hit(_key, _window_ms, _limit), do: {:allow, 5} - end - - Application.put_env(:sigra, :hammer_module, AllowHammer) - on_exit(fn -> Application.delete_env(:sigra, :hammer_module) end) - - assert {:allow, 5} = Hammer.check_rate("test:key", 10, 60_000) - end - - test "returns {:deny, retry_after_ms} when rate exceeded" do - defmodule DenyHammer do - def hit(_key, _window_ms, _limit), do: {:deny, 30_000} - end - - Application.put_env(:sigra, :hammer_module, DenyHammer) - on_exit(fn -> Application.delete_env(:sigra, :hammer_module) end) - - assert {:deny, 30_000} = Hammer.check_rate("test:key", 10, 60_000) - end - - test "fails open with warning when Hammer module raises" do - defmodule CrashHammer do - def hit(_key, _window_ms, _limit), do: raise("not started") - end - - Application.put_env(:sigra, :hammer_module, CrashHammer) - on_exit(fn -> Application.delete_env(:sigra, :hammer_module) end) + setup do + Application.put_env(:sigra, :hammer_module, TestHammer) + + start_supervised!({TestHammer, clean_period: :timer.minutes(1)}) - assert {:allow, 0} = Hammer.check_rate("test:key", 10, 60_000) - end - - test "raises when :hammer_module not configured" do - Application.delete_env(:sigra, :hammer_module) + :ok + end - assert_raise RuntimeError, ~r/requires :hammer_module config/, fn -> - Hammer.check_rate("test:key", 10, 60_000) - end + describe "check_rate/3" do + test "returns enriched metadata on allow" do + key = "test_allow" + limit = 10 + window_ms = 60_000 + + assert {:allow, %{count: 1, remaining: 9, reset_ms: reset_ms}} = Limiter.check_rate(key, limit, window_ms) + assert is_integer(reset_ms) + assert reset_ms > System.system_time(:millisecond) end - end - describe "behaviour" do - test "implements Sigra.RateLimiter behaviour" do - Code.ensure_loaded!(Hammer) - assert function_exported?(Hammer, :check_rate, 3) + test "returns enriched metadata on deny" do + key = "test_deny" + limit = 1 + window_ms = 60_000 + + # First hit should allow + assert {:allow, %{count: 1, remaining: 0}} = Limiter.check_rate(key, limit, window_ms) + + # Second hit should deny + assert {:deny, %{retry_after_ms: retry_after, reset_ms: reset_ms}} = Limiter.check_rate(key, limit, window_ms) + assert is_integer(retry_after) + assert retry_after > 0 + assert retry_after <= 60_000 + assert is_integer(reset_ms) + assert reset_ms > System.system_time(:millisecond) end end end diff --git a/test/sigra/scope/build_test.exs b/test/sigra/scope/build_test.exs index 1ed162c2..8793c056 100644 --- a/test/sigra/scope/build_test.exs +++ b/test/sigra/scope/build_test.exs @@ -6,12 +6,25 @@ defmodule Sigra.Scope.BuildTest do Created as `@tag :skip` stubs by Plan 15-01 Task 0 and un-skipped by Plan 15-01 Task 1. + + Phase 92 / B2B-02 (Plan 92-03 Task 1) extended the constructor with + additive `:role` and `:actor_type` fields. `:role` carries the active + membership's host-defined role atom (populated only at the shared + org-enrichment seams in `Sigra.Scope.Hydration` and + `Sigra.Plug.PutActiveOrganization`). `:actor_type` is reserved Phase 93 + prep — nil-only under Phase 92 with no library-side branching. + + The reflected scope builder MUST NOT turn worker/audit scopes into + authoritative authorization state — these are transport fields, not + request-time authz decisions. """ use ExUnit.Case, async: true defmodule Scope do @moduledoc false - defstruct [:user, :active_organization, :membership, :impersonating_from] + # Mirrors the generated scope struct AFTER Plan 92-02: includes the + # additive `:role` and `:actor_type` RBAC seam fields. + defstruct [:user, :active_organization, :membership, :impersonating_from, :role, :actor_type] end test "Sigra.Scope.build/3 with minimal opts returns struct with user set and others nil" do @@ -23,6 +36,9 @@ defmodule Sigra.Scope.BuildTest do assert is_nil(scope.active_organization) assert is_nil(scope.membership) assert is_nil(scope.impersonating_from) + # Phase 92: role and actor_type default to nil when not supplied. + assert is_nil(scope.role) + assert is_nil(scope.actor_type) end test "Sigra.Scope.build/3 propagates :active_organization and :membership from opts" do @@ -49,4 +65,100 @@ defmodule Sigra.Scope.BuildTest do assert scope.impersonating_from == admin end + + describe "Phase 92 / B2B-02 — :role and :actor_type carry-through" do + test "Sigra.Scope.build/3 carries :role from opts when supplied" do + user = %{id: Ecto.UUID.generate()} + scope = Sigra.Scope.build(Scope, user, role: :tenant_lead) + + assert scope.role == :tenant_lead + assert is_nil(scope.actor_type) + end + + test "Sigra.Scope.build/3 carries :actor_type from opts when supplied (Phase 93 prep, Phase 92 inert)" do + # Reservation-only: Phase 92 must accept the field WITHOUT branching on it. + # Phase 93 will populate it for service accounts; this test proves the + # field round-trips today so Phase 93 stays additive. + user = %{id: Ecto.UUID.generate()} + scope = Sigra.Scope.build(Scope, user, actor_type: :service_account) + + assert scope.actor_type == :service_account + assert is_nil(scope.role) + end + + test "Sigra.Scope.build/3 defaults :role and :actor_type to nil when omitted from opts" do + user = %{id: Ecto.UUID.generate()} + + # Even when other fields are supplied, omitted role/actor_type must default to nil. + scope = + Sigra.Scope.build(Scope, user, + active_organization: %{id: Ecto.UUID.generate()}, + membership: %{id: Ecto.UUID.generate(), role: :ignored_for_build} + ) + + assert is_nil(scope.role) + assert is_nil(scope.actor_type) + end + + test "Sigra.Scope.build/3 carries :role and :actor_type together additively" do + user = %{id: Ecto.UUID.generate()} + + scope = Sigra.Scope.build(Scope, user, role: :site_admin, actor_type: :user) + + assert scope.role == :site_admin + assert scope.actor_type == :user + end + + test "Sigra.Scope.from_opts/2 carries :role and :actor_type when supplied" do + user = %{id: Ecto.UUID.generate()} + + scope = + Sigra.Scope.from_opts( + [scope_module: Scope, role: :tenant_lead, actor_type: :user], + user + ) + + assert scope.user == user + assert scope.role == :tenant_lead + assert scope.actor_type == :user + # active_organization is intentionally always nil at integration sites + # that fire BEFORE org selection (Phase 15 D-26..D-28). + assert is_nil(scope.active_organization) + end + + test "Sigra.Scope.from_opts/2 defaults :role and :actor_type to nil when omitted" do + user = %{id: Ecto.UUID.generate()} + + scope = Sigra.Scope.from_opts([scope_module: Scope], user) + + assert scope.user == user + assert is_nil(scope.role) + assert is_nil(scope.actor_type) + end + + test "Sigra.Scope.from_config/2 carries :role and :actor_type when supplied on the config" do + user = %{id: Ecto.UUID.generate()} + + config = %{scope_module: Scope, role: :tenant_lead, actor_type: :user} + + scope = Sigra.Scope.from_config(config, user) + + assert scope.user == user + assert scope.role == :tenant_lead + assert scope.actor_type == :user + assert is_nil(scope.active_organization) + end + + test "Sigra.Scope.from_config/2 defaults :role and :actor_type to nil when absent from the config" do + user = %{id: Ecto.UUID.generate()} + + config = %{scope_module: Scope} + + scope = Sigra.Scope.from_config(config, user) + + assert scope.user == user + assert is_nil(scope.role) + assert is_nil(scope.actor_type) + end + end end diff --git a/test/sigra/scope/hydration_impersonation_test.exs b/test/sigra/scope/hydration_impersonation_test.exs index 8af09627..73b680d5 100644 --- a/test/sigra/scope/hydration_impersonation_test.exs +++ b/test/sigra/scope/hydration_impersonation_test.exs @@ -56,7 +56,11 @@ defmodule Sigra.Scope.HydrationImpersonationTest do end defmodule TestScope do - defstruct [:user, :active_organization, :membership, :impersonating_from] + # Phase 92 / B2B-02 (Plan 92-03 Task 2 cascade): mirror the generated + # scope struct after Plan 92-02 — `:role` and `:actor_type` are + # required so Hydration's role-write doesn't raise KeyError on the + # struct update. + defstruct [:user, :active_organization, :membership, :impersonating_from, :role, :actor_type] end setup :verify_on_exit! diff --git a/test/sigra/scope/hydration_test.exs b/test/sigra/scope/hydration_test.exs index 70351afd..1b963551 100644 --- a/test/sigra/scope/hydration_test.exs +++ b/test/sigra/scope/hydration_test.exs @@ -57,7 +57,11 @@ defmodule Sigra.Scope.HydrationTest do end defmodule TestScope do - defstruct [:user, :active_organization, :membership, :impersonating_from] + # Mirrors the generated scope struct after Plan 92-02: includes the + # additive `:role` and `:actor_type` RBAC seam fields. Phase 92 / B2B-02 + # (Plan 92-03) populates `:role` only at this hydration seam on + # successful org-active enrichment. + defstruct [:user, :active_organization, :membership, :impersonating_from, :role, :actor_type] end setup :verify_on_exit! @@ -129,7 +133,14 @@ defmodule Sigra.Scope.HydrationTest do end defp build_scope(user) do - %TestScope{user: user, active_organization: nil, membership: nil, impersonating_from: nil} + %TestScope{ + user: user, + active_organization: nil, + membership: nil, + impersonating_from: nil, + role: nil, + actor_type: nil + } end describe "hydrate/3" do @@ -179,6 +190,11 @@ defmodule Sigra.Scope.HydrationTest do assert hydrated.user == user # Contract: scope.active_organization.id == session.active_organization_id assert hydrated.active_organization.id == session.active_organization_id + # Phase 92 / B2B-02 (Plan 92-03): role is derived from membership.role + # only at this shared seam. + assert hydrated.role == :admin + # actor_type stays nil under Phase 92 (Phase 93 prep). + assert is_nil(hydrated.actor_type) end test "returns {:error, :not_a_member} when user was removed from the org" do @@ -270,6 +286,120 @@ defmodule Sigra.Scope.HydrationTest do end end + describe "Phase 92 / B2B-02 — :role propagation (Plan 92-03 Task 2)" do + test "happy path: scope.role mirrors membership.role atom verbatim" do + # Use a host-themed role atom (not :owner / :admin / :member) to prove + # the seam is genuinely role-agnostic per Plan 92-01. + user = build_user() + org = build_org() + membership = build_membership(%{organization_id: org.id, user_id: user.id, role: :tenant_lead}) + session = build_session(org.id) + scope = build_scope(user) + + Sigra.MockRepo + |> expect(:one, fn _query -> org end) + |> expect(:one, fn _query -> membership end) + + assert {:ok, hydrated} = Hydration.hydrate(scope, @test_config, session) + assert hydrated.role == :tenant_lead + assert hydrated.membership.role == :tenant_lead + end + + test "happy path: nil membership.role passes through as nil scope.role" do + # Plan 92-02 made membership role nullable + plain :string, no Ecto.Enum, + # no default. The hydrator must tolerate a nil-role membership without + # raising and without inventing a role atom. + user = build_user() + org = build_org() + membership = build_membership(%{organization_id: org.id, user_id: user.id, role: nil}) + session = build_session(org.id) + scope = build_scope(user) + + Sigra.MockRepo + |> expect(:one, fn _query -> org end) + |> expect(:one, fn _query -> membership end) + + assert {:ok, hydrated} = Hydration.hydrate(scope, @test_config, session) + assert hydrated.membership.id == membership.id + assert is_nil(hydrated.role) + end + + test "nil active_organization_id branch leaves :role nil even if scope arrived with a role atom" do + # Defense-in-depth: if a caller threads a pre-populated scope with a + # stale role atom (e.g. from an earlier request), the hydrator must + # NOT preserve that role on the no-org path. The shared seam is the + # only place role is touched; on the nil-pointer branch the scope + # carries no active org, therefore no role. + user = build_user() + stale_scope = %{build_scope(user) | role: :stale_atom} + session = build_session(nil) + + assert {:ok, returned} = Hydration.hydrate(stale_scope, @test_config, session) + # Nil org_id branch returns the scope unchanged (PITFALLS WR-02). On + # this path callers retain whatever role they passed in — but no + # production caller threads a populated role here: FetchSession / + # FetchBearer never write role, and PutActiveOrganization clears it + # alongside the org. This test pins the contract. + assert returned == stale_scope + end + + test "nil-user branch leaves :role nil (WR-02 fail-closed)" do + scope = build_scope(nil) + session = build_session(Ecto.UUID.generate()) + + assert {:ok, returned} = Hydration.hydrate(scope, @test_config, session) + assert returned == scope + assert is_nil(returned.role) + end + + test ":not_a_member error path does NOT mutate the scope's :role" do + # Stale-pointer / membership-revoked: hydrator returns {:error, :not_a_member}. + # The caller (LoadActiveOrganization) takes the recovery branch. The + # error tuple itself does NOT include the scope, so :role cannot leak + # through it. This test pins the contract that no scope is returned + # with a populated :role on this branch. + user = build_user() + org = build_org() + session = build_session(org.id) + scope = build_scope(user) + + Sigra.MockRepo + |> expect(:one, fn _query -> org end) + |> expect(:one, fn _query -> nil end) + + assert {:error, :not_a_member} = Hydration.hydrate(scope, @test_config, session) + end + + test ":org_not_found error path does NOT mutate the scope's :role" do + user = build_user() + session = build_session(Ecto.UUID.generate()) + scope = build_scope(user) + + Sigra.MockRepo + |> expect(:one, fn _query -> nil end) + + assert {:error, :org_not_found} = Hydration.hydrate(scope, @test_config, session) + end + + test "actor_type stays nil under Phase 92 even on the happy path" do + # Phase 92 reserves actor_type but does NOT populate it. Phase 93 will + # write actor_type from the calling token (user / service_account). + # Hydration must not invent a value. + user = build_user() + org = build_org() + membership = build_membership(%{organization_id: org.id, user_id: user.id, role: :admin}) + session = build_session(org.id) + scope = build_scope(user) + + Sigra.MockRepo + |> expect(:one, fn _query -> org end) + |> expect(:one, fn _query -> membership end) + + assert {:ok, hydrated} = Hydration.hydrate(scope, @test_config, session) + assert is_nil(hydrated.actor_type) + end + end + # Small helper so the "never raises" test reads well. defp assert_no_raise(fun) do try do diff --git a/test/sigra/scope/plug_liveview_parity_test.exs b/test/sigra/scope/plug_liveview_parity_test.exs index 0a220253..31cac304 100644 --- a/test/sigra/scope/plug_liveview_parity_test.exs +++ b/test/sigra/scope/plug_liveview_parity_test.exs @@ -67,7 +67,10 @@ defmodule Sigra.Scope.PlugLiveViewParityTest do end defmodule TestScope do - defstruct [:user, :active_organization, :membership, :impersonating_from] + # Plan 92-02 reserved `:role` and `:actor_type` on the generated scope. + # Plan 92-03 wires `:role` propagation through the shared hydrator seam; + # this test asserts plug ↔ on_mount parity for both fields. + defstruct [:user, :active_organization, :membership, :impersonating_from, :role, :actor_type] end @test_config %{ @@ -160,7 +163,14 @@ defmodule Sigra.Scope.PlugLiveViewParityTest do end defp build_scope(user) do - %TestScope{user: user, active_organization: nil, membership: nil, impersonating_from: nil} + %TestScope{ + user: user, + active_organization: nil, + membership: nil, + impersonating_from: nil, + role: nil, + actor_type: nil + } end defp build_conn(scope, session) do @@ -210,6 +220,13 @@ defmodule Sigra.Scope.PlugLiveViewParityTest do assert plug_scope.active_organization.id == lv_scope.active_organization.id assert plug_scope.membership.id == lv_scope.membership.id assert plug_scope.impersonating_from == lv_scope.impersonating_from + # Phase 92 / B2B-02 (Plan 92-03): both paths derive scope.role from + # the same membership.role atom via Sigra.Scope.Hydration.hydrate/3. + assert plug_scope.role == lv_scope.role + assert plug_scope.role == :admin + # actor_type stays nil on both paths under Phase 92. + assert plug_scope.actor_type == lv_scope.actor_type + assert is_nil(plug_scope.actor_type) end test "nil active_organization_id: both paths return the same zero-org scope" do @@ -225,6 +242,8 @@ defmodule Sigra.Scope.PlugLiveViewParityTest do assert lv_scope == scope assert lv_scope.active_organization == nil assert lv_scope.membership == nil + # Phase 92: role stays nil on the zero-org branch. + assert is_nil(lv_scope.role) # Plug path conn = build_conn(scope, session) @@ -234,6 +253,8 @@ defmodule Sigra.Scope.PlugLiveViewParityTest do assert plug_scope == scope assert plug_scope.active_organization == nil assert plug_scope.membership == nil + # Phase 92: plug path also leaves role nil (parity). + assert is_nil(plug_scope.role) refute plug_conn.halted end @@ -280,6 +301,41 @@ defmodule Sigra.Scope.PlugLiveViewParityTest do refute plug_conn.halted assert plug_scope.active_organization == nil assert plug_scope.membership == nil + # Phase 92 / B2B-02 (Plan 92-03): on stale-pointer recovery the plug + # path leaves role nil (no membership → no role). The LV path saw the + # error tuple and never wrote a scope, so its role is also nil. + assert is_nil(plug_scope.role) + end + + test "host-themed role atom: both paths propagate :tenant_lead from membership.role to scope.role" do + # Plan 92-01 made the seam role-agnostic; this test proves the plug ↔ + # on_mount parity holds for a host-defined role atom that the library + # has never heard of. + user = build_user() + org = build_org() + membership = build_membership(user, org, :tenant_lead) + session = build_session(org.id, user.id) + scope = build_scope(user) + + # LV path + Sigra.MockRepo + |> expect(:one, fn _query -> org end) + |> expect(:one, fn _query -> membership end) + + assert {:ok, lv_scope} = Hydration.hydrate(scope, @test_config, session) + assert lv_scope.role == :tenant_lead + + # Plug path + Sigra.MockRepo + |> expect(:one, fn _query -> org end) + |> expect(:one, fn _query -> membership end) + + conn = build_conn(scope, session) + plug_conn = LoadActiveOrganization.call(conn, plug_opts()) + plug_scope = plug_conn.assigns[:current_scope] + + assert plug_scope.role == :tenant_lead + assert plug_scope.role == lv_scope.role end end end diff --git a/test/sigra/security_activity_test.exs b/test/sigra/security_activity_test.exs new file mode 100644 index 00000000..c0d18a33 --- /dev/null +++ b/test/sigra/security_activity_test.exs @@ -0,0 +1,177 @@ +defmodule Sigra.SecurityActivityTest do + use ExUnit.Case, async: true + + alias Sigra.SecurityActivity + + defmodule TestAuditEvent do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: false} + schema "audit_events" do + field :action, :string + field :outcome, :string + field :ip_address, :string + field :metadata, :map + field :target_id, :binary_id + field :effective_user_id, :binary_id + field :inserted_at, :utc_datetime_usec + end + end + + defmodule TestSession do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: false} + schema "user_sessions" do + field :type, :string + field :ip, :string + field :geo_city, :string + field :geo_country_code, :string + end + end + + defmodule StubRepo do + @moduledoc false + + def reset do + Process.put({__MODULE__, :audit_rows}, []) + Process.put({__MODULE__, :session_rows}, []) + Process.put({__MODULE__, :queries}, []) + end + + def put_audit_rows(rows), do: Process.put({__MODULE__, :audit_rows}, rows) + def put_session_rows(rows), do: Process.put({__MODULE__, :session_rows}, rows) + def queries, do: Enum.reverse(Process.get({__MODULE__, :queries}, [])) + + def all(%Ecto.Query{} = query) do + Process.put({__MODULE__, :queries}, [query | Process.get({__MODULE__, :queries}, [])]) + + case query.from.source do + {"audit_events", TestAuditEvent} -> Process.get({__MODULE__, :audit_rows}, []) + {"user_sessions", TestSession} -> Process.get({__MODULE__, :session_rows}, []) + end + end + end + + @config %Sigra.Config{ + repo: StubRepo, + user_schema: Sigra.TestUser, + session: [session_schema: TestSession], + audit: [audit_schema: TestAuditEvent] + } + + setup do + StubRepo.reset() + :ok + end + + test "builds a subject-scoped deterministic descending query" do + now = DateTime.utc_now() + + StubRepo.put_audit_rows([ + %TestAuditEvent{ + id: "evt-1", + action: "session.revoke_others", + outcome: "success", + target_id: "user-1", + effective_user_id: "user-1", + inserted_at: now, + metadata: %{} + } + ]) + + _rows = SecurityActivity.list_recent_activity(@config, "user-1", limit: 5) + + [audit_query] = StubRepo.queries() + assert audit_query.limit.expr == {:^, [], [0]} + assert audit_query.limit.params |> hd() |> elem(0) == 15 + + order_text = + audit_query.order_bys + |> Enum.map(&Macro.to_string(&1.expr)) + |> Enum.join("\n") + + assert order_text =~ "inserted_at" + assert order_text =~ "id" + + where_text = + audit_query.wheres + |> Enum.map(&Macro.to_string(&1.expr)) + |> Enum.join("\n") + + assert where_text =~ "effective_user_id" + assert where_text =~ "target_id" + assert where_text =~ "action" + end + + test "normalizes labels, keeps bounded metadata, and suppresses duplicate MFA sign-in rows" do + now = DateTime.utc_now() |> DateTime.truncate(:second) + + StubRepo.put_audit_rows([ + %TestAuditEvent{ + id: "evt-logout", + action: "auth.logout", + outcome: "success", + ip_address: "4.4.4.4", + target_id: "user-1", + effective_user_id: "user-1", + inserted_at: now, + metadata: %{session_id: "session-1", type: :standard} + }, + %TestAuditEvent{ + id: "evt-mfa", + action: "auth.mfa_verified", + outcome: "success", + target_id: "user-1", + effective_user_id: "user-1", + inserted_at: DateTime.add(now, -1, :second), + metadata: %{session_id: "session-2", type: :remember_me} + }, + %TestAuditEvent{ + id: "evt-session-create", + action: "session.create", + outcome: "success", + ip_address: "3.3.3.3", + target_id: "user-1", + effective_user_id: "user-1", + inserted_at: DateTime.add(now, -2, :second), + metadata: %{session_id: "session-2", type: :remember_me} + }, + %TestAuditEvent{ + id: "evt-suspicious", + action: "security.suspicious_login", + outcome: "failure", + ip_address: "9.9.9.9", + target_id: "user-1", + effective_user_id: "user-1", + inserted_at: DateTime.add(now, -3, :second), + metadata: %{geo_city: "Berlin", geo_country_code: "DE"} + } + ]) + + StubRepo.put_session_rows([ + %TestSession{id: "session-1", type: "standard", ip: "4.4.4.4"}, + %TestSession{id: "session-2", type: "remember_me", ip: "3.3.3.3"} + ]) + + rows = SecurityActivity.list_recent_activity(@config, "user-1", limit: 10) + + assert Enum.map(rows, & &1.action) == [ + "auth.logout", + "auth.mfa_verified", + "security.suspicious_login" + ] + + assert Enum.map(rows, & &1.action_label) == [ + "Signed out", + "Completed multi-factor verification", + "Suspicious sign-in attempt" + ] + + assert Enum.map(rows, & &1.kind) == [:logout, :mfa_verified, :suspicious_login] + assert Enum.all?(rows, &(Map.keys(&1) -- ~w(action action_label geo_city geo_country_code id ip_address kind occurred_at outcome session_id session_type)a == [])) + assert Enum.at(rows, 1).session_type in [:remember_me, "remember_me"] + assert Enum.at(rows, 2).geo_city == "Berlin" + assert Enum.at(rows, 2).geo_country_code == "DE" + end +end diff --git a/test/sigra/service_accounts_audit_atomicity_test.exs b/test/sigra/service_accounts_audit_atomicity_test.exs new file mode 100644 index 00000000..e4c85081 --- /dev/null +++ b/test/sigra/service_accounts_audit_atomicity_test.exs @@ -0,0 +1,478 @@ +defmodule Sigra.ServiceAccountsAuditAtomicityTest do + @moduledoc """ + D-AUD-08 / D-93-22 co-fated rollback proof for all five Phase 93 + service-account mutations. + + Mirrors `test/sigra/jwt_refresh_audit_cofate_test.exs` (Phase 82) line + for line, swapping the JWT refresh-flow schemas/calls for service-account + schemas/calls. Per D-AUD-07, every fault path is its own named `test` + block — no loops, no parametrized fixtures. + + Postgres-only (CLAUDE.md prereq: localhost:5432 postgres/postgres). + """ + use ExUnit.Case, async: false + + @moduletag :capture_log + + alias Sigra.Test.AuditEvent, as: AuditTestEvent + alias Sigra.Test.PostgresRepo + + # --- Schemas --------------------------------------------------------------- + # Minimal Ecto schemas for service_accounts and service_account_credentials. + # These are test-only schema modules backed by the real Postgres test repo. + + defmodule SATestUser do + @moduledoc false + use Ecto.Schema + @primary_key {:id, :binary_id, autogenerate: true} + schema "sa_atomicity_test_users" do + field(:email, :string) + end + end + + defmodule SATestServiceAccount do + @moduledoc false + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: true} + schema "sa_atomicity_service_accounts" do + field(:organization_id, :binary_id) + field(:name, :string) + field(:scopes, {:array, :string}, default: []) + field(:role, :string) + field(:token_epoch, :integer, default: 0) + field(:revoked_at, :utc_datetime_usec) + field(:last_used_at, :utc_datetime_usec) + field(:created_by_user_id, :binary_id) + timestamps(type: :utc_datetime_usec) + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :organization_id, + :name, + :scopes, + :role, + :token_epoch, + :revoked_at, + :last_used_at, + :created_by_user_id + ]) + |> validate_required([:organization_id, :name]) + end + end + + defmodule SATestCredential do + @moduledoc false + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: true} + schema "sa_atomicity_service_account_credentials" do + field(:service_account_id, :binary_id) + field(:client_id, :string) + field(:hashed_client_secret, :binary) + field(:expires_at, :utc_datetime_usec) + field(:last_used_at, :utc_datetime_usec) + field(:revoked_at, :utc_datetime_usec) + timestamps(type: :utc_datetime_usec) + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :service_account_id, + :client_id, + :hashed_client_secret, + :expires_at, + :last_used_at, + :revoked_at + ]) + |> validate_required([:service_account_id, :client_id, :hashed_client_secret]) + |> unique_constraint(:client_id) + end + end + + # --- Telemetry handler ----------------------------------------------------- + defmodule TelemetryHandler do + @moduledoc false + def handle_event(event, measurements, metadata, parent) do + send(parent, {:telemetry, event, measurements, metadata}) + end + end + + # --- Setup ----------------------------------------------------------------- + setup do + start_supervised!({PostgresRepo, PostgresRepo.default_config()}) + repo = PostgresRepo + + Ecto.Adapters.SQL.query!(repo, ~s|CREATE EXTENSION IF NOT EXISTS "uuid-ossp"|, []) + + # Drop and recreate tables owned by this test module. + for t <- [ + "sa_atomicity_service_account_credentials", + "sa_atomicity_service_accounts", + "sa_atomicity_test_users", + "audit_events" + ] do + Ecto.Adapters.SQL.query!(repo, "DROP TABLE IF EXISTS #{t} CASCADE", []) + end + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE sa_atomicity_test_users ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + email text, + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE sa_atomicity_service_accounts ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + organization_id uuid NOT NULL, + name text NOT NULL, + scopes text[] NOT NULL DEFAULT '{}', + role text, + token_epoch integer NOT NULL DEFAULT 0, + revoked_at timestamp, + last_used_at timestamp, + created_by_user_id uuid, + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE sa_atomicity_service_account_credentials ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + service_account_id uuid NOT NULL + REFERENCES sa_atomicity_service_accounts(id) ON DELETE CASCADE, + client_id text NOT NULL UNIQUE, + hashed_client_secret bytea NOT NULL, + expires_at timestamp, + last_used_at timestamp, + revoked_at timestamp, + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE audit_events ( + id uuid PRIMARY KEY, + occurred_at timestamp NOT NULL DEFAULT now(), + action varchar(255) NOT NULL, + outcome varchar(32) NOT NULL DEFAULT 'success', + actor_id uuid, + actor_type varchar(64) NOT NULL DEFAULT 'user', + target_id uuid, + target_type varchar(64), + ip_address varchar(64), + user_agent varchar(512), + metadata jsonb NOT NULL DEFAULT '{}'::jsonb, + organization_id uuid, + effective_user_id uuid, + inserted_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + "TRUNCATE TABLE sa_atomicity_service_accounts, sa_atomicity_service_account_credentials, audit_events RESTART IDENTITY CASCADE", + [] + ) + + %{repo: repo} + end + + # --- Helpers --------------------------------------------------------------- + + defp count_rows(repo, table) do + %{rows: [[n]]} = + Ecto.Adapters.SQL.query!(repo, "SELECT count(*)::bigint FROM #{table}", []) + + n + end + + # Mirror the `Sigra.Config.new!/1` idiom from + # `test/sigra/jwt_refresh_audit_cofate_test.exs` lines 136 and 155 so + # NimbleOptions defaults run and the config struct is identical to what + # production callers see — never `%Sigra.Config{}` literal here. + defp sigra_config(repo) do + Sigra.Config.new!( + repo: repo, + user_schema: SATestUser, + otp_app: :sa_atomicity_test, + secret_key_base: String.duplicate("k", 64), + service_accounts: [ + service_account_schema: SATestServiceAccount, + service_account_credential_schema: SATestCredential, + client_id_byte_size: 24 + ], + audit: [audit_schema: AuditTestEvent], + jwt: [ + enabled: true, + algorithm: "HS256", + issuer: "sa_atomicity_test", + access_ttl: 900, + client_credentials_access_ttl: 3600, + refresh: false, + verify_epoch: false + ] + ) + end + + defp make_scope do + %{ + user: %{id: Ecto.UUID.generate()}, + active_organization: %{id: Ecto.UUID.generate()} + } + end + + defp seed_sa!(repo, scope) do + org_id = get_in(scope, [:active_organization, :id]) + + %{rows: [[id_bytes]]} = + Ecto.Adapters.SQL.query!( + repo, + """ + INSERT INTO sa_atomicity_service_accounts + (organization_id, name, scopes, token_epoch) + VALUES ($1, $2, ARRAY['deploy:write']::text[], 0) + RETURNING id + """, + [Ecto.UUID.dump!(org_id), "ci-bot"] + ) + + {:ok, id_str} = Ecto.UUID.cast(id_bytes) + repo.get(SATestServiceAccount, id_str) + end + + defp attach_telemetry_to_self(name) do + :telemetry.attach( + {__MODULE__, name}, + [:sigra, :audit, :log_safe_error], + &TelemetryHandler.handle_event/4, + self() + ) + end + + # --- The five fault-injection tests ---------------------------------------- + + test "create: audit CHECK rejects service_account.create -> :service_account_aborted, no partial SA row", + %{repo: repo} do + Ecto.Adapters.SQL.query!( + repo, + "ALTER TABLE audit_events ADD CONSTRAINT sa_create_cofate_guard CHECK (action <> 'service_account.create')", + [] + ) + + try do + scope = make_scope() + org_id = get_in(scope, [:active_organization, :id]) + before_count = count_rows(repo, "sa_atomicity_service_accounts") + attach_telemetry_to_self(:create) + + assert {:error, :service_account_aborted} = + Sigra.ServiceAccounts.create(sigra_config(repo), scope, %{ + organization_id: org_id, + name: "ci-bot", + scopes: ["deploy:write"] + }) + + assert_receive {:telemetry, [:sigra, :audit, :log_safe_error], %{count: 1}, + %{action: "service_account.create", reason: :constraint_violation}}, + 1000 + + assert count_rows(repo, "sa_atomicity_service_accounts") == before_count + assert count_rows(repo, "audit_events") == 0 + after + :telemetry.detach({__MODULE__, :create}) + + Ecto.Adapters.SQL.query!( + repo, + "ALTER TABLE audit_events DROP CONSTRAINT IF EXISTS sa_create_cofate_guard", + [] + ) + end + end + + test "revoke: audit CHECK rejects service_account.revoke -> :service_account_aborted, SA stays unrevoked", + %{repo: repo} do + scope = make_scope() + sa = seed_sa!(repo, scope) + original_epoch = sa.token_epoch + + Ecto.Adapters.SQL.query!( + repo, + "ALTER TABLE audit_events ADD CONSTRAINT sa_revoke_cofate_guard CHECK (action <> 'service_account.revoke')", + [] + ) + + try do + attach_telemetry_to_self(:revoke) + + assert {:error, :service_account_aborted} = + Sigra.ServiceAccounts.revoke(sigra_config(repo), scope, sa) + + assert_receive {:telemetry, [:sigra, :audit, :log_safe_error], %{count: 1}, + %{action: "service_account.revoke", reason: :constraint_violation}}, + 1000 + + reread = repo.get(SATestServiceAccount, sa.id) + assert is_nil(reread.revoked_at), "revoked_at must be nil — SA row must have rolled back" + assert reread.token_epoch == original_epoch, "token_epoch must not advance under rollback" + after + :telemetry.detach({__MODULE__, :revoke}) + + Ecto.Adapters.SQL.query!( + repo, + "ALTER TABLE audit_events DROP CONSTRAINT IF EXISTS sa_revoke_cofate_guard", + [] + ) + end + end + + test "credential_create: audit CHECK rejects service_account.credential_create -> :service_account_credential_aborted, no partial credential row", + %{repo: repo} do + scope = make_scope() + sa = seed_sa!(repo, scope) + + Ecto.Adapters.SQL.query!( + repo, + "ALTER TABLE audit_events ADD CONSTRAINT sa_cred_create_cofate_guard CHECK (action <> 'service_account.credential_create')", + [] + ) + + try do + before_creds = count_rows(repo, "sa_atomicity_service_account_credentials") + attach_telemetry_to_self(:cred_create) + + assert {:error, :service_account_credential_aborted} = + Sigra.ServiceAccounts.create_credential(sigra_config(repo), scope, sa, %{}) + + assert_receive {:telemetry, [:sigra, :audit, :log_safe_error], %{count: 1}, + %{ + action: "service_account.credential_create", + reason: :constraint_violation + }}, + 1000 + + assert count_rows(repo, "sa_atomicity_service_account_credentials") == before_creds, + "credential row count must be unchanged — credential insert rolled back" + after + :telemetry.detach({__MODULE__, :cred_create}) + + Ecto.Adapters.SQL.query!( + repo, + "ALTER TABLE audit_events DROP CONSTRAINT IF EXISTS sa_cred_create_cofate_guard", + [] + ) + end + end + + test "credential_revoke: audit CHECK rejects service_account.credential_revoke -> :service_account_credential_aborted, credential stays unrevoked", + %{repo: repo} do + scope = make_scope() + sa = seed_sa!(repo, scope) + cfg = sigra_config(repo) + + # Create a real credential so we have one to revoke. + {:ok, cred, _secret} = Sigra.ServiceAccounts.create_credential(cfg, scope, sa, %{}) + + Ecto.Adapters.SQL.query!( + repo, + "ALTER TABLE audit_events ADD CONSTRAINT sa_cred_revoke_cofate_guard CHECK (action <> 'service_account.credential_revoke')", + [] + ) + + try do + attach_telemetry_to_self(:cred_revoke) + + assert {:error, :service_account_credential_aborted} = + Sigra.ServiceAccounts.revoke_credential(cfg, scope, cred) + + assert_receive {:telemetry, [:sigra, :audit, :log_safe_error], %{count: 1}, + %{ + action: "service_account.credential_revoke", + reason: :constraint_violation + }}, + 1000 + + reread = repo.get(SATestCredential, cred.id) + + assert is_nil(reread.revoked_at), + "revoked_at must remain nil — credential update rolled back" + after + :telemetry.detach({__MODULE__, :cred_revoke}) + + Ecto.Adapters.SQL.query!( + repo, + "ALTER TABLE audit_events DROP CONSTRAINT IF EXISTS sa_cred_revoke_cofate_guard", + [] + ) + end + end + + test "token_issued: audit CHECK rejects service_account.token_issued -> :service_account_token_issuance_aborted, credential.last_used_at unchanged", + %{repo: repo} do + scope = make_scope() + sa = seed_sa!(repo, scope) + cfg = sigra_config(repo) + + {:ok, cred, _secret} = Sigra.ServiceAccounts.create_credential(cfg, scope, sa, %{}) + + before_last_used = repo.get(SATestCredential, cred.id).last_used_at + + Ecto.Adapters.SQL.query!( + repo, + "ALTER TABLE audit_events ADD CONSTRAINT sa_token_issued_cofate_guard CHECK (action <> 'service_account.token_issued')", + [] + ) + + try do + attach_telemetry_to_self(:token_issued) + + # Per `lib/sigra/service_accounts.ex:199`, the multi-failure tuple is + # normalized to `{:error, :service_account_token_issuance_aborted}`. + assert {:error, :service_account_token_issuance_aborted} = + Sigra.ServiceAccounts.issue_token(cfg, sa, cred, []) + + assert_receive {:telemetry, [:sigra, :audit, :log_safe_error], %{count: 1}, + %{action: "service_account.token_issued", reason: :constraint_violation}}, + 1000 + + reread = repo.get(SATestCredential, cred.id) + + assert reread.last_used_at == before_last_used, + "credential.last_used_at must NOT advance when token_issued audit rolls back" + after + :telemetry.detach({__MODULE__, :token_issued}) + + Ecto.Adapters.SQL.query!( + repo, + "ALTER TABLE audit_events DROP CONSTRAINT IF EXISTS sa_token_issued_cofate_guard", + [] + ) + end + end +end diff --git a/test/sigra/service_accounts_test.exs b/test/sigra/service_accounts_test.exs new file mode 100644 index 00000000..bbd47497 --- /dev/null +++ b/test/sigra/service_accounts_test.exs @@ -0,0 +1,163 @@ +defmodule Sigra.ServiceAccountsTest do + use ExUnit.Case, async: true + + alias Sigra.{JWT, ServiceAccounts} + + defmodule ServiceAccount do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "service_accounts" do + field :name, :string + field :scopes, {:array, :string}, default: [] + field :role, :string + field :token_epoch, :integer, default: 0 + field :revoked_at, :utc_datetime + field :last_used_at, :utc_datetime + field :organization_id, :binary_id + field :created_by_user_id, :binary_id + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [:id, :name, :scopes, :role, :token_epoch, :organization_id, :created_by_user_id]) + |> validate_required([:name, :organization_id]) + end + end + + defmodule Credential do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "service_account_credentials" do + field :client_id, :string + field :hashed_client_secret, :binary + field :expires_at, :utc_datetime + field :last_used_at, :utc_datetime + field :revoked_at, :utc_datetime + field :service_account_id, :binary_id + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [:id, :client_id, :hashed_client_secret, :expires_at, :service_account_id]) + |> validate_required([:client_id, :hashed_client_secret, :service_account_id]) + end + end + + defmodule MockRepo do + def insert(changeset, _opts \\ []) do + if changeset.valid? do + {:ok, Ecto.Changeset.apply_changes(changeset) |> with_id()} + else + {:error, changeset} + end + end + + def update(changeset, _opts \\ []), do: {:ok, Ecto.Changeset.apply_changes(changeset)} + + def transaction(%Ecto.Multi{} = multi) do + return = fn err -> throw({:mock_multi_abort, err}) end + wrap = fn fun -> fun.() end + + try do + case Ecto.Multi.__apply__(multi, __MODULE__, wrap, return) do + {:ok, result} -> {:ok, result} + result when is_map(result) -> {:ok, result} + {:error, {name, val, acc}} -> {:error, name, val, acc} + end + catch + :throw, {:mock_multi_abort, {name, val, acc}} -> + {:error, name, val, acc} + end + end + + def get(ServiceAccount, id), do: Process.get({:service_account, id}) + def get(Credential, id), do: Process.get({:credential, id}) + def get_by(Credential, client_id: client_id), do: Process.get({:credential_by_client_id, client_id}) + + defp with_id(struct) do + Map.put(struct, :id, Map.get(struct, :id) || "id-#{System.unique_integer([:positive])}") + end + end + + defp config(overrides \\ []) do + defaults = [ + repo: MockRepo, + user_schema: Sigra.TestUser, + otp_app: :sigra, + secret_key_base: String.duplicate("a", 64), + audit: [audit_schema: Sigra.Test.AuditEvent], + service_accounts: [ + service_account_schema: ServiceAccount, + service_account_credential_schema: Credential, + client_id_prefix: "sigra_sa_", + client_id_byte_size: 24 + ], + jwt: [enabled: true, algorithm: "HS256", issuer: "sigra", client_credentials_access_ttl: 3600] + ] + + Sigra.Config.new!(Keyword.merge(defaults, overrides)) + end + + defp scope, do: %{user: %{id: "user-1"}, active_organization: %{id: "org-1"}} + + test "create/3 inserts a service account" do + assert {:ok, sa} = + ServiceAccounts.create(config(), scope(), %{ + name: "CI", + scopes: ["deploy:write"], + organization_id: "org-1" + }) + + assert sa.name == "CI" + assert sa.organization_id == "org-1" + end + + test "revoke/3 bumps token_epoch and sets revoked_at" do + sa = %ServiceAccount{id: "sa-1", organization_id: "org-1", token_epoch: 3} + + assert {:ok, updated} = ServiceAccounts.revoke(config(), scope(), sa) + assert updated.token_epoch == 4 + refute is_nil(updated.revoked_at) + end + + test "create_credential/4 returns plaintext secret and stored hash" do + sa = %ServiceAccount{id: "sa-1", organization_id: "org-1"} + + assert {:ok, credential, raw_secret} = ServiceAccounts.create_credential(config(), scope(), sa, %{}) + assert credential.service_account_id == sa.id + assert credential.client_id =~ "sigra_sa_" + assert credential.hashed_client_secret == Sigra.Token.hash_token(raw_secret) + end + + test "revoke_credential/3 sets revoked_at without error" do + sa = %ServiceAccount{id: "sa-1", organization_id: "org-1"} + credential = %Credential{id: "cred-1", service_account_id: sa.id, client_id: "sigra_sa_abc"} + Process.put({:service_account, sa.id}, sa) + + assert {:ok, updated} = ServiceAccounts.revoke_credential(config(), scope(), credential) + refute is_nil(updated.revoked_at) + end + + test "issue_token/4 delegates to JWT service-account generator" do + sa = %ServiceAccount{id: "sa-1", organization_id: "org-1", scopes: ["deploy:write"], token_epoch: 0} + credential = %Credential{ + id: "cred-1", + service_account_id: sa.id, + client_id: "sigra_sa_abc", + hashed_client_secret: Sigra.Token.hash_token("secret"), + revoked_at: nil + } + Process.put({:service_account, sa.id}, sa) + Process.put({:credential, credential.id}, credential) + + assert {:ok, %{access_token: jwt, refresh_token: nil, expires_in: 3600}} = + ServiceAccounts.issue_token(config(), sa, credential) + + assert {:ok, claims} = JWT.verify_access(config(), jwt) + assert claims["actor_type"] == "service_account" + end +end diff --git a/test/sigra/session_test.exs b/test/sigra/session_test.exs index c5e57809..4ae4c443 100644 --- a/test/sigra/session_test.exs +++ b/test/sigra/session_test.exs @@ -77,10 +77,12 @@ defmodule Sigra.SessionTest do test "defines 8 callbacks" do callbacks = Sigra.SessionStore.behaviour_info(:callbacks) - assert length(callbacks) == 8 + assert length(callbacks) == 10 assert {:create, 3} in callbacks + assert {:create_session_multi, 3} in callbacks assert {:fetch, 2} in callbacks assert {:delete, 2} in callbacks + assert {:delete_session_multi, 3} in callbacks assert {:list_by_user, 2} in callbacks assert {:delete_all_for_user, 2} in callbacks assert {:update_activity, 3} in callbacks diff --git a/test/sigra/templates/generator_emission_audit_test.exs b/test/sigra/templates/generator_emission_audit_test.exs index 3ada9e33..b29b0d23 100644 --- a/test/sigra/templates/generator_emission_audit_test.exs +++ b/test/sigra/templates/generator_emission_audit_test.exs @@ -108,6 +108,10 @@ defmodule Sigra.Templates.GeneratorEmissionAuditTest do "SessionHTML" -> ["login_html.ex", "session_html.ex"] # `PageLive` is a compile-time stub defined inside `user_auth.ex` (not a separate template). "PageLive" -> ["user_auth.ex"] + # Phase 93 Plan 03: Macro.underscore("OAuthTokenController") returns + # "o_auth_token_controller" but the on-disk filename follows Phoenix + # convention "oauth_token_controller.ex" (single token, RFC 6749 §4.4). + "OAuthTokenController" -> ["oauth_token_controller.ex"] _ -> [] end end diff --git a/test/sigra/templates/session_templates_test.exs b/test/sigra/templates/session_templates_test.exs index 507af00e..7aa0a936 100644 --- a/test/sigra/templates/session_templates_test.exs +++ b/test/sigra/templates/session_templates_test.exs @@ -57,12 +57,8 @@ defmodule Sigra.Templates.SessionTemplatesTest do assert content =~ "index(:user_sessions, [:inserted_at])" end - test "includes user_sessions in all three adapter sections", %{content: content} do - # Each adapter section should have user_sessions - sections = String.split(content, "create table(:user_sessions") - # Original + 3 adapter sections = 4 parts - assert length(sections) == 4, - "Expected user_sessions in postgres, mysql, and sqlite sections" + test "includes user_sessions table definition", %{content: content} do + assert content =~ "create table(:user_sessions" end end @@ -119,8 +115,22 @@ defmodule Sigra.Templates.SessionTemplatesTest do assert content =~ "def revoke_session(" end - test "contains revoke_all_sessions function", %{content: content} do - assert content =~ "def revoke_all_sessions(" + test "contains revoke_other_sessions function", %{content: content} do + assert content =~ "def revoke_other_sessions(" + end + + test "contains current_session_hashed_token function", %{content: content} do + assert content =~ "def current_session_hashed_token(" + end + + test "contains recent_security_activity function", %{content: content} do + assert content =~ "def recent_security_activity(" + assert content =~ "Sigra.SecurityActivity.list_recent_activity" + end + + test "contains truthful logout helper", %{content: content} do + assert content =~ "def log_out_user_session_token(" + assert content =~ "Sigra.Auth.logout" end test "contains confirm_sudo function", %{content: content} do @@ -142,11 +152,36 @@ defmodule Sigra.Templates.SessionTemplatesTest do test "delegates to Sigra.Auth library functions", %{content: content} do assert content =~ "Sigra.Auth.list_sessions" assert content =~ "Sigra.Auth.revoke_session" - assert content =~ "Sigra.Auth.delete_all_sessions" + assert content =~ "Sigra.Auth.revoke_other_sessions" + assert content =~ "Sigra.SecurityActivity.list_recent_activity" assert content =~ "Sigra.Auth.confirm_sudo" end end + describe "session live template" do + setup do + content = File.read!(Path.join(@templates_dir, "session_live.ex")) + %{content: content} + end + + test "renders recent security activity section", %{content: content} do + assert content =~ "Recent security activity" + assert content =~ "security_activity" + assert content =~ "Recent sign-ins" + end + end + + describe "user_auth template" do + setup do + content = File.read!(Path.join(@templates_dir, "user_auth.ex")) + %{content: content} + end + + test "logout delegates through the truthful logout helper", %{content: content} do + assert content =~ "log_out_user_session_token" + end + end + describe "sudo controller template" do setup do content = File.read!(Path.join(@templates_dir, "sudo_controller.ex")) diff --git a/test/sigra/testing/oauth_issuer_test.exs b/test/sigra/testing/oauth_issuer_test.exs new file mode 100644 index 00000000..9e0a54af --- /dev/null +++ b/test/sigra/testing/oauth_issuer_test.exs @@ -0,0 +1,288 @@ +defmodule Sigra.Testing.OAuthIssuerTest do + use ExUnit.Case, async: true + + alias Sigra.Testing.OAuthIssuer + + describe "start_link/1 - provider :google" do + test "returns an issuer handle with request-time state" do + with_issuer([provider: :google], fn issuer -> + assert is_binary(OAuthIssuer.url(issuer)) + assert String.starts_with?(OAuthIssuer.url(issuer), "http://127.0.0.1:") + assert is_pid(issuer.state) + end) + end + end + + describe "/.well-known/openid-configuration" do + test "returns the discovery document" do + with_issuer([], fn issuer -> + response = get!(OAuthIssuer.url(issuer) <> "/.well-known/openid-configuration") + assert response.status == 200 + + assert response.body == %{ + "issuer" => OAuthIssuer.url(issuer), + "authorization_endpoint" => OAuthIssuer.url(issuer) <> "/oauth2/v2/auth", + "token_endpoint" => OAuthIssuer.url(issuer) <> "/token", + "userinfo_endpoint" => OAuthIssuer.url(issuer) <> "/userinfo", + "jwks_uri" => OAuthIssuer.url(issuer) <> "/jwks", + "token_endpoint_auth_methods_supported" => [ + "none", + "client_secret_post", + "client_secret_basic" + ] + } + end) + end + end + + describe "/oauth2/v2/auth -> 302 redirect" do + test "redirects back with code and state" do + with_issuer([], fn issuer -> + response = + get!( + OAuthIssuer.url(issuer) <> + "/oauth2/v2/auth?" <> + URI.encode_query(%{ + "client_id" => "sigra-client", + "redirect_uri" => "http://example.test/callback", + "state" => "state-123", + "code_challenge" => pkce_challenge("verifier-123"), + "code_challenge_method" => "S256", + "nonce" => "nonce-123" + }), + autoredirect: false + ) + + assert response.status == 302 + location = header!(response, "location") + query = URI.parse(location).query |> URI.decode_query() + + assert URI.parse(location).path == "/callback" + assert query["state"] == "state-123" + assert is_binary(query["code"]) + end) + end + end + + describe "/token RS256 sign+verify roundtrip" do + test "returns an RS256 id_token" do + with_issuer([], fn issuer -> + %{code: code} = authorize!(issuer) + response = exchange_code!(issuer, code, "verifier-123") + + assert response.status == 200 + assert response.body["token_type"] == "Bearer" + assert response.body["expires_in"] == 3600 + assert is_binary(response.body["access_token"]) + assert is_binary(response.body["refresh_token"]) + + config = [ + client_id: "sigra-client", + openid_configuration: OAuthIssuer.openid_config(issuer), + session_params: %{nonce: "nonce-123"} + ] + + assert {:ok, jwt} = + Assent.Strategy.OIDC.validate_id_token(config, response.body["id_token"]) + + assert jwt.header["alg"] == "RS256" + assert jwt.header["kid"] == "kid1" + assert jwt.claims["iss"] == OAuthIssuer.url(issuer) + assert jwt.claims["aud"] == "sigra-client" + assert jwt.claims["nonce"] == "nonce-123" + assert jwt.claims["email_verified"] == true + end) + end + end + + describe "/token with bad code_verifier" do + test "returns invalid_grant" do + with_issuer([], fn issuer -> + %{code: code} = authorize!(issuer) + response = exchange_code!(issuer, code, "wrong-verifier") + + assert response.status == 400 + assert response.body["error"] == "invalid_grant" + assert response.body["error_description"] == "invalid code_verifier" + end) + end + end + + describe "/jwks" do + test "exposes the configured key count" do + with_issuer([kid_count: 2], fn issuer -> + response = get!(OAuthIssuer.url(issuer) <> "/jwks") + assert response.status == 200 + assert Enum.map(response.body["keys"], & &1["kid"]) == ["kid1", "kid2"] + end) + end + end + + describe "configurable exp" do + test "respects the requested expiration offset" do + with_issuer([exp: 60], fn issuer -> + %{code: code} = authorize!(issuer) + response = exchange_code!(issuer, code, "verifier-123") + claims = jwt_claims(response.body["id_token"]) + + assert claims["exp"] - claims["iat"] == 60 + end) + end + end + + describe "refresh-token rotation toggle" do + test "keeps refresh tokens stable when disabled" do + with_issuer([refresh_rotation: false], fn issuer -> + %{code: code} = authorize!(issuer) + first = exchange_code!(issuer, code, "verifier-123") + + second = + post_form!(OAuthIssuer.url(issuer) <> "/token", %{ + "grant_type" => "refresh_token", + "refresh_token" => first.body["refresh_token"], + "client_id" => "sigra-client" + }) + + assert second.status == 200 + assert second.body["refresh_token"] == first.body["refresh_token"] + end) + end + end + + describe "email_verified boolean shape" do + test "returns email_verified as a JSON boolean" do + with_issuer([], fn issuer -> + %{code: code} = authorize!(issuer) + token_response = exchange_code!(issuer, code, "verifier-123") + + userinfo = + get!(OAuthIssuer.url(issuer) <> "/userinfo", + headers: [{"authorization", "Bearer " <> token_response.body["access_token"]}] + ) + + assert userinfo.status == 200 + assert userinfo.body["email_verified"] === true + assert is_boolean(userinfo.body["email_verified"]) + end) + end + end + + defp authorize!(issuer) do + response = + get!( + OAuthIssuer.url(issuer) <> + "/oauth2/v2/auth?" <> + URI.encode_query(%{ + "client_id" => "sigra-client", + "redirect_uri" => "http://example.test/callback", + "state" => "state-123", + "code_challenge" => pkce_challenge("verifier-123"), + "code_challenge_method" => "S256", + "nonce" => "nonce-123" + }), + autoredirect: false + ) + + query = + response + |> header!("location") + |> URI.parse() + |> Map.fetch!(:query) + |> URI.decode_query() + + %{code: query["code"], state: query["state"]} + end + + defp exchange_code!(issuer, code, verifier) do + post_form!(OAuthIssuer.url(issuer) <> "/token", %{ + "grant_type" => "authorization_code", + "code" => code, + "redirect_uri" => "http://example.test/callback", + "client_id" => "sigra-client", + "code_verifier" => verifier + }) + end + + defp jwt_claims(token) do + %JOSE.JWT{fields: claims} = JOSE.JWT.peek_payload(token) + claims + end + + defp pkce_challenge(verifier) do + verifier + |> then(&:crypto.hash(:sha256, &1)) + |> Base.url_encode64(padding: false) + end + + defp get!(url, opts \\ []) do + request!(:get, url, opts) + end + + defp post_form!(url, form, opts \\ []) do + request!(:post, url, Keyword.put(opts, :body, URI.encode_query(form))) + end + + defp request!(method, url, opts) do + headers = + opts + |> Keyword.get(:headers, []) + |> Enum.map(fn {key, value} -> {String.to_charlist(key), String.to_charlist(value)} end) + |> maybe_put_form_header(method, opts) + + request = + case {method, Keyword.get(opts, :body)} do + {:post, body} -> + {String.to_charlist(url), headers, ~c"application/x-www-form-urlencoded", body} + + _other -> + {String.to_charlist(url), headers} + end + + http_opts = + [] + |> maybe_put(:autoredirect, Keyword.get(opts, :autoredirect)) + + assert {:ok, {{_, status, _}, raw_headers, raw_body}} = + :httpc.request(method, request, http_opts, body_format: :binary) + + %{ + status: status, + headers: Enum.map(raw_headers, fn {key, value} -> {to_string(key), to_string(value)} end), + body: decode_body(raw_body) + } + end + + defp maybe_put_form_header(headers, :post, _opts) do + [{~c"content-type", ~c"application/x-www-form-urlencoded"} | headers] + end + + defp maybe_put_form_header(headers, _method, _opts), do: headers + + defp decode_body(""), do: "" + + defp decode_body(body) do + case Jason.decode(body) do + {:ok, json} -> json + {:error, _reason} -> body + end + end + + defp header!(response, name) do + response.headers + |> Enum.find_value(fn {header, value} -> if header == name, do: value end) + |> Kernel.||(flunk("missing header #{name} in #{inspect(response.headers)}")) + end + + defp maybe_put(list, _key, nil), do: list + defp maybe_put(list, key, value), do: Keyword.put(list, key, value) + + defp with_issuer(opts, fun) do + {:ok, issuer} = OAuthIssuer.start_link(opts) + + try do + fun.(issuer) + after + OAuthIssuer.stop(issuer) + end + end +end diff --git a/test/sigra/webhooks_audit_atomicity_test.exs b/test/sigra/webhooks_audit_atomicity_test.exs new file mode 100644 index 00000000..2051a7a5 --- /dev/null +++ b/test/sigra/webhooks_audit_atomicity_test.exs @@ -0,0 +1,340 @@ +defmodule Sigra.WebhooksAuditAtomicityTest do + use ExUnit.Case, async: false + + import Ecto.Query + + alias Sigra.Auth + alias Sigra.Test.PostgresRepo + + defmodule WebhookUser do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: true} + schema "webhook_atomicity_users_97" do + field :email, :string + field :hashed_password, :string + timestamps() + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [:email, :hashed_password]) + |> validate_required([:email, :hashed_password]) + |> unique_constraint(:email, name: :webhook_atomicity_users_97_email_key) + end + end + + defmodule WebhookSubscription do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: true} + schema "webhook_atomicity_subscriptions_97" do + field :endpoint_url, :string + field :event_types, {:array, :string}, default: [] + field :enabled, :boolean, default: true + end + end + + defmodule WebhookEvent do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: true} + schema "webhook_atomicity_events_97" do + field :event_id, :string + field :type, :string + field :schema_version, :string + field :occurred_at, :utc_datetime + field :payload, :map + field :actor_id, :binary_id + field :actor_type, :string + field :organization_id, :binary_id + field :request_id, :string + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :event_id, + :type, + :schema_version, + :occurred_at, + :payload, + :actor_id, + :actor_type, + :organization_id, + :request_id + ]) + |> validate_required([:event_id, :type, :schema_version, :occurred_at, :payload]) + end + end + + defmodule WebhookDelivery do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: true} + schema "webhook_atomicity_deliveries_97" do + field :delivery_id, :string + field :status, :string + field :endpoint_url, :string + belongs_to :webhook_subscription, WebhookSubscription, type: :binary_id + belongs_to :webhook_event, WebhookEvent, type: :binary_id + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :delivery_id, + :status, + :endpoint_url, + :webhook_subscription_id, + :webhook_event_id + ]) + |> validate_required([ + :delivery_id, + :status, + :endpoint_url, + :webhook_subscription_id, + :webhook_event_id + ]) + end + end + + setup do + start_supervised!({PostgresRepo, PostgresRepo.default_config()}) + repo = PostgresRepo + + Ecto.Adapters.SQL.query!(repo, "CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\"", []) + ensure_oban_jobs_table!(repo) + Ecto.Adapters.SQL.query!(repo, "DROP TABLE IF EXISTS webhook_atomicity_deliveries_97 CASCADE", []) + Ecto.Adapters.SQL.query!(repo, "DROP TABLE IF EXISTS webhook_atomicity_events_97 CASCADE", []) + Ecto.Adapters.SQL.query!(repo, "DROP TABLE IF EXISTS webhook_atomicity_subscriptions_97 CASCADE", []) + Ecto.Adapters.SQL.query!(repo, "DROP TABLE IF EXISTS webhook_atomicity_users_97 CASCADE", []) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE webhook_atomicity_users_97 ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + email text NOT NULL, + hashed_password text NOT NULL, + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now(), + CONSTRAINT webhook_atomicity_users_97_email_key UNIQUE (email) + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE webhook_atomicity_subscriptions_97 ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + endpoint_url text, + event_types text[] NOT NULL DEFAULT '{}', + enabled boolean NOT NULL DEFAULT true + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE webhook_atomicity_events_97 ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + event_id text NOT NULL, + type text NOT NULL, + schema_version text NOT NULL, + occurred_at timestamp NOT NULL, + payload jsonb NOT NULL, + actor_id uuid, + actor_type text, + organization_id uuid, + request_id text + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE webhook_atomicity_deliveries_97 ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + delivery_id text NOT NULL, + status text NOT NULL, + endpoint_url text, + webhook_subscription_id uuid NOT NULL REFERENCES webhook_atomicity_subscriptions_97(id), + webhook_event_id uuid NOT NULL REFERENCES webhook_atomicity_events_97(id) + ) + """, + [] + ) + + %{repo: repo} + end + + defp ensure_oban_jobs_table!(repo) do + Ecto.Adapters.SQL.query!(repo, "DROP TABLE IF EXISTS oban_jobs CASCADE", []) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE oban_jobs ( + id bigserial PRIMARY KEY, + state text NOT NULL DEFAULT 'available', + queue text NOT NULL DEFAULT 'default', + worker text NOT NULL, + args jsonb NOT NULL, + errors jsonb NOT NULL DEFAULT '[]'::jsonb, + meta jsonb NOT NULL DEFAULT '{}'::jsonb, + tags text[] NOT NULL DEFAULT '{}', + attempt integer NOT NULL DEFAULT 0, + attempted_by text[], + max_attempts integer NOT NULL DEFAULT 20, + priority integer NOT NULL DEFAULT 0, + attempted_at timestamp, + cancelled_at timestamp, + completed_at timestamp, + discarded_at timestamp, + inserted_at timestamp NOT NULL DEFAULT now(), + scheduled_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + end + + defp config(repo, enabled \\ true) do + Sigra.Config.new!( + repo: repo, + user_schema: WebhookUser, + secret_key_base: String.duplicate("a", 64), + webhooks: [ + enabled: enabled, + webhook_subscription_schema: WebhookSubscription, + webhook_event_schema: WebhookEvent, + webhook_delivery_schema: WebhookDelivery + ] + ) + end + + defp register_opts do + [changeset_fn: fn attrs -> WebhookUser.changeset(%WebhookUser{}, attrs) end] + end + + test "persists user, webhook event, delivery, and initial job together when local handoff succeeds", %{repo: repo} do + repo.insert!(%WebhookSubscription{ + endpoint_url: "https://receiver.example/hooks", + event_types: ["user.created"], + enabled: true + }) + + assert {:ok, user} = + Auth.register( + config(repo), + %{"email" => "webhook-ok@example.com", "hashed_password" => "hash"}, + register_opts() + ) + + assert user.email == "webhook-ok@example.com" + assert 1 == repo.aggregate(WebhookUser, :count) + assert 1 == repo.aggregate(WebhookEvent, :count) + assert 1 == repo.aggregate(WebhookDelivery, :count) + assert 1 == + repo.aggregate( + from(job in "oban_jobs", where: job.queue == "sigra_webhooks"), + :count + ) + + event = repo.one(from(e in WebhookEvent, select: e)) + delivery = repo.one(from(d in WebhookDelivery, select: d)) + + assert event.type == "user.created" + assert get_in(event.payload, ["data", "object", "email"]) == "webhook-ok@example.com" + assert delivery.webhook_event_id == event.id + end + + test "rolls back the user insert when delivery persistence fails inside the outer transaction", %{repo: repo} do + Ecto.Adapters.SQL.query!( + repo, + """ + INSERT INTO webhook_atomicity_subscriptions_97 (endpoint_url, event_types, enabled) + VALUES (NULL, ARRAY['user.created'], TRUE) + """, + [] + ) + + assert {:error, %Ecto.Changeset{} = changeset} = + Auth.register( + config(repo), + %{"email" => "webhook-roll@example.com", "hashed_password" => "hash"}, + register_opts() + ) + + assert %{endpoint_url: ["can't be blank"]} = errors_on(changeset) + assert 0 == repo.aggregate(WebhookUser, :count) + assert 0 == repo.aggregate(WebhookEvent, :count) + assert 0 == repo.aggregate(WebhookDelivery, :count) + end + + test "rolls back the outer mutation when the initial job insert fails inside the local handoff boundary", %{ + repo: repo + } do + repo.insert!(%WebhookSubscription{ + endpoint_url: "https://receiver.example/hooks", + event_types: ["user.created"], + enabled: true + }) + + Ecto.Adapters.SQL.query!( + repo, + """ + ALTER TABLE oban_jobs + ADD CONSTRAINT webhook_atomicity_reject_sigra_queue + CHECK (queue <> 'sigra_webhooks') + """, + [] + ) + + try do + assert_raise Ecto.ConstraintError, fn -> + Auth.register_user_multi( + %{"email" => "webhook-job-roll@example.com", "hashed_password" => "hash"}, + Keyword.merge(register_opts(), config: config(repo)) + ) + |> repo.transaction() + end + after + Ecto.Adapters.SQL.query!( + repo, + """ + ALTER TABLE oban_jobs + DROP CONSTRAINT IF EXISTS webhook_atomicity_reject_sigra_queue + """, + [] + ) + end + + assert 0 == repo.aggregate(WebhookUser, :count) + assert 0 == repo.aggregate(WebhookEvent, :count) + assert 0 == repo.aggregate(WebhookDelivery, :count) + assert 0 == + repo.aggregate( + from(job in "oban_jobs", where: job.queue == "sigra_webhooks"), + :count + ) + end + + defp errors_on(%Ecto.Changeset{} = changeset) do + Ecto.Changeset.traverse_errors(changeset, fn {message, opts} -> + Regex.replace(~r"%{(\w+)}", message, fn _, key -> + opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string() + end) + end) + end +end diff --git a/test/sigra/webhooks_dispatcher_test.exs b/test/sigra/webhooks_dispatcher_test.exs new file mode 100644 index 00000000..8d339d8e --- /dev/null +++ b/test/sigra/webhooks_dispatcher_test.exs @@ -0,0 +1,274 @@ +defmodule Sigra.WebhooksDispatcherTest do + use ExUnit.Case, async: true + + alias Ecto.Changeset + alias Ecto.Multi + alias Sigra.Webhooks + alias Sigra.Webhooks.Dispatcher + + defmodule Subscription do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_subscriptions" do + field :endpoint_url, :string + field :event_types, {:array, :string}, default: [] + field :enabled, :boolean, default: true + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [:id, :endpoint_url, :event_types, :enabled]) + |> validate_required([:endpoint_url, :event_types, :enabled]) + end + end + + defmodule Event do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_events" do + field :event_id, :string + field :type, :string + field :schema_version, :string + field :occurred_at, :utc_datetime + field :payload, :map + field :actor_id, :binary_id + field :actor_type, :string + field :organization_id, :binary_id + field :request_id, :string + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :event_id, + :type, + :schema_version, + :occurred_at, + :payload, + :actor_id, + :actor_type, + :organization_id, + :request_id + ]) + |> validate_required([:event_id, :type, :schema_version, :occurred_at, :payload]) + end + end + + defmodule Delivery do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_deliveries" do + field :delivery_id, :string + field :status, :string + field :endpoint_url, :string + field :webhook_subscription_id, :binary_id + field :webhook_event_id, :binary_id + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :delivery_id, + :status, + :endpoint_url, + :webhook_subscription_id, + :webhook_event_id + ]) + |> validate_required([ + :delivery_id, + :status, + :endpoint_url, + :webhook_subscription_id, + :webhook_event_id + ]) + end + end + + defmodule User do + defstruct [:id, :email, :display_name, :confirmed_at, :inserted_at, :updated_at] + end + + defmodule UserRecord do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "users" do + field :email, :string + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [:id, :email]) + |> validate_required([:id, :email]) + end + end + + defmodule MockRepo do + def all(_schema), do: Process.get(:dispatcher_subscriptions, []) + + def insert(changeset) do + if changeset.valid? do + struct = + changeset + |> Changeset.apply_changes() + |> ensure_id() + + store_insert(struct) + {:ok, struct} + else + {:error, changeset} + end + end + + def transaction(%Multi{} = multi), do: Sigra.Test.MultiStub.run(__MODULE__, multi) + + defp store_insert(%Event{} = event) do + Process.put(:dispatcher_events, [event | Process.get(:dispatcher_events, [])]) + end + + defp store_insert(%Delivery{} = delivery) do + Process.put(:dispatcher_deliveries, [delivery | Process.get(:dispatcher_deliveries, [])]) + end + + defp store_insert(%Oban.Job{} = job) do + Process.put(:dispatcher_jobs, [job | Process.get(:dispatcher_jobs, [])]) + end + + defp store_insert(%UserRecord{}), do: :ok + defp store_insert(_struct), do: :ok + + defp ensure_id(%{id: nil} = struct), do: %{struct | id: "id-#{System.unique_integer([:positive])}"} + defp ensure_id(struct), do: struct + end + + setup do + Process.put(:dispatcher_subscriptions, []) + Process.put(:dispatcher_events, []) + Process.put(:dispatcher_deliveries, []) + Process.put(:dispatcher_jobs, []) + + on_exit(fn -> + Process.delete(:dispatcher_subscriptions) + Process.delete(:dispatcher_events) + Process.delete(:dispatcher_deliveries) + Process.delete(:dispatcher_jobs) + end) + + :ok + end + + defp config(overrides \\ []) do + defaults = [ + repo: MockRepo, + user_schema: Sigra.TestUser, + secret_key_base: String.duplicate("a", 64), + webhooks: [ + enabled: true, + webhook_subscription_schema: Subscription, + webhook_event_schema: Event, + webhook_delivery_schema: Delivery + ] + ] + + Sigra.Config.new!(Keyword.merge(defaults, overrides)) + end + + test "matching_subscriptions/2 only returns enabled subscriptions with explicit event matches" do + Process.put(:dispatcher_subscriptions, [ + %Subscription{id: "sub-1", enabled: true, event_types: ["user.created"], endpoint_url: "https://one.test"}, + %Subscription{id: "sub-2", enabled: true, event_types: ["session.created"], endpoint_url: "https://two.test"}, + %Subscription{id: "sub-3", enabled: false, event_types: ["user.created"], endpoint_url: "https://three.test"} + ]) + + assert [%Subscription{id: "sub-1"}] = + Dispatcher.matching_subscriptions(config(), "user.created") + end + + test "dispatch_multi/4 persists one public event plus one pending delivery and initial job per matching subscription" do + Process.put(:dispatcher_subscriptions, [ + %Subscription{id: "sub-1", enabled: true, event_types: ["user.created"], endpoint_url: "https://one.test/hooks"}, + %Subscription{id: "sub-2", enabled: true, event_types: ["user.created"], endpoint_url: "https://two.test/hooks"}, + %Subscription{id: "sub-3", enabled: true, event_types: ["session.created"], endpoint_url: "https://three.test/hooks"} + ]) + + object = %User{ + id: "user-1", + email: "user@example.com", + display_name: "User Example", + inserted_at: ~U[2026-05-06 12:00:00Z], + updated_at: ~U[2026-05-06 12:00:00Z] + } + + multi = + Dispatcher.dispatch_multi(config(), "user.created", object, + step_id: :register, + event_id: "evt_123", + occurred_at: ~U[2026-05-06 12:30:00Z], + context: %{ + actor: %{type: "user", id: "admin-1"}, + organization: %{id: "org-1"}, + request: %{id: "req-1"} + } + ) + + assert {:ok, changes} = MockRepo.transaction(multi) + assert Map.has_key?(changes, {:webhook_subscriptions, :register}) + assert Map.has_key?(changes, {:webhook_event, :register}) + assert Map.has_key?(changes, {:webhook_deliveries, :register}) + assert Map.has_key?(changes, {:webhook_delivery_jobs, :register}) + assert length(changes[{:webhook_deliveries, :register}]) == 2 + assert length(changes[{:webhook_delivery_jobs, :register}]) == 2 + + assert %Event{} = event = changes[{:webhook_event, :register}] + assert event.event_id == "evt_123" + assert event.type == "user.created" + assert event.actor_id == "admin-1" + assert event.organization_id == "org-1" + assert event.request_id == "req-1" + assert get_in(event.payload, ["data", "object", "email"]) == "user@example.com" + + assert Enum.map(changes[{:webhook_deliveries, :register}], & &1.webhook_event_id) == + [event.id, event.id] + + assert Enum.map(changes[{:webhook_delivery_jobs, :register}], & &1.args) == + Enum.map(changes[{:webhook_deliveries, :register}], fn delivery -> + %{"delivery_id" => delivery.delivery_id} + end) + + assert Enum.all?(changes[{:webhook_delivery_jobs, :register}], &(&1.queue == "sigra_webhooks")) + end + + test "append_dispatch_multi/5 composes webhook persistence and initial queue handoff into an outer transaction" do + Process.put(:dispatcher_subscriptions, [ + %Subscription{id: "sub-1", enabled: true, event_types: ["user.created"], endpoint_url: "https://one.test/hooks"} + ]) + + multi = + Multi.new() + |> Multi.insert(:user, UserRecord.changeset(%UserRecord{}, %{id: "user-1", email: "user@example.com"})) + |> Webhooks.append_dispatch_multi( + config(), + "user.created", + {:changes_key, :user}, + step_id: :register, + context: %{actor: %{type: "user", id: "user-1"}} + ) + + assert {:ok, changes} = MockRepo.transaction(multi) + assert changes.user.id == "user-1" + assert length(changes[{:webhook_deliveries, :register}]) == 1 + + assert [%Oban.Job{args: %{"delivery_id" => delivery_id}}] = + changes[{:webhook_delivery_jobs, :register}] + + assert [delivery] = changes[{:webhook_deliveries, :register}] + assert delivery_id == delivery.delivery_id + end +end diff --git a/test/sigra/webhooks_egress_policy_proof_test.exs b/test/sigra/webhooks_egress_policy_proof_test.exs new file mode 100644 index 00000000..61467bbc --- /dev/null +++ b/test/sigra/webhooks_egress_policy_proof_test.exs @@ -0,0 +1,319 @@ +defmodule Sigra.WebhooksEgressPolicyProofTest do + use ExUnit.Case, async: false + + alias Ecto.{Changeset, Multi} + alias Sigra.Workers.WebhookDelivery + + defmodule MockUser do + defstruct [:id] + end + + defmodule Subscription do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_subscriptions" do + field :endpoint_url, :string + field :enabled, :boolean, default: true + field :signing_secret, :string + field :next_signing_secret, :string + field :rotation_state, Ecto.Enum, + values: [:stable, :prepared, :overlap_active, :completed], + default: :stable + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [:endpoint_url, :enabled, :signing_secret, :next_signing_secret, :rotation_state]) + |> validate_required([:endpoint_url, :enabled, :signing_secret]) + end + end + + defmodule Event do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_events" do + field :payload, :map, default: %{} + end + end + + defmodule Delivery do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_deliveries" do + field :delivery_id, :string + field :status, :string, default: "pending" + field :attempt_count, :integer, default: 0 + field :endpoint_url, :string + field :dispatched_at, :utc_datetime + field :last_attempted_at, :utc_datetime + field :next_attempt_at, :utc_datetime + field :last_http_status, :integer + field :last_error_category, :string + field :last_error_detail, :string + field :dead_lettered_at, :utc_datetime + field :terminal_reason, :string + field :webhook_subscription_id, :binary_id + field :webhook_event_id, :binary_id + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :delivery_id, + :status, + :attempt_count, + :endpoint_url, + :dispatched_at, + :last_attempted_at, + :next_attempt_at, + :last_http_status, + :last_error_category, + :last_error_detail, + :dead_lettered_at, + :terminal_reason, + :webhook_subscription_id, + :webhook_event_id + ]) + |> validate_required([ + :delivery_id, + :status, + :attempt_count, + :endpoint_url, + :webhook_subscription_id, + :webhook_event_id + ]) + end + end + + defmodule DeliveryAttempt do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_delivery_attempts" do + field :delivery_id, :string + field :attempt_number, :integer + field :endpoint_url, :string + field :started_at, :utc_datetime + field :finished_at, :utc_datetime + field :response_status, :integer + field :retryable, :boolean, default: false + field :retry_after_seconds, :integer + field :error_category, :string + field :error_detail, :string + field :terminal_reason, :string + field :webhook_delivery_id, :binary_id + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :delivery_id, + :attempt_number, + :endpoint_url, + :started_at, + :finished_at, + :response_status, + :retryable, + :retry_after_seconds, + :error_category, + :error_detail, + :terminal_reason, + :webhook_delivery_id + ]) + |> validate_required([:delivery_id, :attempt_number, :endpoint_url, :started_at, :retryable]) + end + end + + defmodule MockRepo do + def get_by(Delivery, delivery_id: delivery_id), do: Process.get({:delivery, delivery_id}) + def get(Subscription, id), do: Process.get({:subscription, id}) + def get(Event, id), do: Process.get({:event, id}) + + def insert(%Changeset{} = changeset) do + struct = Changeset.apply_changes(changeset) + struct = Map.put_new(struct, :id, Ecto.UUID.generate()) + + case struct do + %DeliveryAttempt{} = attempt -> + attempts = Process.get({:attempts, attempt.delivery_id}, []) + Process.put({:attempts, attempt.delivery_id}, attempts ++ [attempt]) + {:ok, attempt} + + other -> + {:ok, other} + end + end + + def update(%Changeset{} = changeset) do + delivery = Changeset.apply_changes(changeset) + Process.put({:delivery, delivery.delivery_id}, delivery) + {:ok, delivery} + end + + def transaction(%Multi{} = multi) do + Enum.reduce_while(Multi.to_list(multi), {:ok, %{}}, fn + {name, {:insert, changeset, _opts}}, {:ok, acc} -> + {:ok, value} = insert(changeset) + {:cont, {:ok, Map.put(acc, name, value)}} + + {name, {:update, changeset, _opts}}, {:ok, acc} -> + {:ok, value} = update(changeset) + {:cont, {:ok, Map.put(acc, name, value)}} + end) + end + end + + setup do + Application.put_env(:sigra, :repo, MockRepo) + Application.put_env(:sigra, :user_schema, MockUser) + Application.put_env(:sigra, :webhooks, webhooks_config()) + Application.put_env(:sigra, :webhook_delivery_oban, fn _changeset -> {:ok, %{}} end) + + on_exit(fn -> + Application.delete_env(:sigra, :repo) + Application.delete_env(:sigra, :user_schema) + Application.delete_env(:sigra, :webhooks) + Application.delete_env(:sigra, :webhook_delivery_requester) + Application.delete_env(:sigra, :webhook_delivery_oban) + end) + + :ok + end + + test "allowed public https delivery still sends" do + store_fixture_rows(delivery_id: "del_public", endpoint_url: "https://hooks.example.test/inbound") + + Application.put_env(:sigra, :webhook_delivery_requester, fn request -> + send(self(), {:webhook_request, request}) + {:ok, %{status: 202}} + end) + + assert {:ok, :delivered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => "del_public"}}) + + assert_receive {:webhook_request, request} + assert request.url == "https://hooks.example.test/inbound" + end + + test "blocked metadata destination fails locally before any requester call" do + resolver = fn + "metadata.example.test" -> {:ok, [{169, 254, 169, 254}]} + host -> public_test_resolver(host) + end + + Application.put_env(:sigra, :webhooks, webhooks_config(endpoint_resolver: resolver)) + store_fixture_rows(delivery_id: "del_metadata", endpoint_url: "https://metadata.example.test/inbound") + + Application.put_env(:sigra, :webhook_delivery_requester, fn request -> + send(self(), {:webhook_request, request}) + {:ok, %{status: 202}} + end) + + assert {:ok, :dead_lettered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => "del_metadata"}}) + + assert %Delivery{ + last_error_category: "local_policy_error", + terminal_reason: "blocked_metadata_ip" + } = Process.get({:delivery, "del_metadata"}) + + assert [attempt] = Process.get({:attempts, "del_metadata"}) + assert attempt.error_category == "local_policy_error" + assert attempt.terminal_reason == "blocked_metadata_ip" + refute_receive {:webhook_request, _request} + end + + test "host callback denial persists local_policy_error and blocks delivery" do + policy = fn + %{uri: %URI{host: "callback.example.test"}} -> + {:error, :policy_denied, "blocked delivery by deployment callback"} + + _context -> + :ok + end + + Application.put_env(:sigra, :webhooks, webhooks_config(endpoint_policy: policy)) + store_fixture_rows(delivery_id: "del_callback", endpoint_url: "https://callback.example.test/inbound") + + Application.put_env(:sigra, :webhook_delivery_requester, fn request -> + send(self(), {:webhook_request, request}) + {:ok, %{status: 202}} + end) + + assert {:ok, :dead_lettered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => "del_callback"}}) + + assert %Delivery{ + last_error_category: "local_policy_error", + last_error_detail: "blocked delivery by deployment callback", + terminal_reason: "policy_denied" + } = Process.get({:delivery, "del_callback"}) + + refute_receive {:webhook_request, _request} + end + + defp webhooks_config(overrides \\ []) do + Keyword.merge( + [ + enabled: true, + webhook_subscription_schema: Subscription, + webhook_event_schema: Event, + webhook_delivery_schema: Delivery, + webhook_delivery_attempt_schema: DeliveryAttempt, + endpoint_resolver: &public_test_resolver/1, + oban_queue: "sigra_webhooks" + ], + overrides + ) + end + + defp store_fixture_rows(opts) do + delivery_id = Keyword.fetch!(opts, :delivery_id) + endpoint_url = Keyword.fetch!(opts, :endpoint_url) + + delivery = + %Delivery{ + id: "#{delivery_id}_row", + delivery_id: delivery_id, + status: "pending", + attempt_count: 0, + endpoint_url: endpoint_url, + webhook_subscription_id: "sub_1", + webhook_event_id: "evt_row_1" + } + + subscription = + %Subscription{ + id: "sub_1", + endpoint_url: endpoint_url, + enabled: true, + signing_secret: "whsec_phase105", + rotation_state: :stable + } + + event = + %Event{ + id: "evt_row_1", + payload: %{"id" => "evt_1", "type" => "user.created", "data" => %{"object" => %{"id" => "user_1"}}} + } + + Process.put({:delivery, delivery.delivery_id}, delivery) + Process.put({:subscription, subscription.id}, subscription) + Process.put({:event, event.id}, event) + Process.put({:attempts, delivery.delivery_id}, []) + end + + defp public_test_resolver(host) do + case host do + "hooks.example.test" -> {:ok, [{203, 0, 113, 20}]} + "callback.example.test" -> {:ok, [{203, 0, 113, 21}]} + _other -> {:error, :nxdomain} + end + end +end diff --git a/test/sigra/webhooks_event_catalog_test.exs b/test/sigra/webhooks_event_catalog_test.exs new file mode 100644 index 00000000..bc719fa8 --- /dev/null +++ b/test/sigra/webhooks_event_catalog_test.exs @@ -0,0 +1,29 @@ +defmodule Sigra.WebhooksEventCatalogTest do + use ExUnit.Case, async: true + + alias Sigra.Webhooks.EventCatalog + alias Sigra.Webhooks.Serializers + + test "contains the curated Phase 97 event catalog" do + assert EventCatalog.all() == [ + "organization_membership.created", + "organization_membership.deleted", + "organization_membership.updated", + "service_account.created", + "service_account.revoked", + "session.created", + "session.revoked", + "user.created", + "user.deleted", + "user.updated" + ] + end + + test "resolves serializers and resources from one authoritative registry" do + assert EventCatalog.serializer_for!("user.updated") == Serializers.User + assert EventCatalog.serializer_for!("session.revoked") == Serializers.Session + assert EventCatalog.resource_for!("organization_membership.created") == :organization_membership + assert EventCatalog.valid?("service_account.created") + refute EventCatalog.valid?("security.lockout") + end +end diff --git a/test/sigra/webhooks_integration_test.exs b/test/sigra/webhooks_integration_test.exs new file mode 100644 index 00000000..f13e54df --- /dev/null +++ b/test/sigra/webhooks_integration_test.exs @@ -0,0 +1,1206 @@ +defmodule Sigra.WebhooksIntegrationTest do + use ExUnit.Case, async: false + + import Ecto.Query + + alias Ecto.Changeset + alias Sigra.Auth + alias Sigra.ServiceAccounts + alias Sigra.Test.PostgresRepo + alias Sigra.Webhooks + alias Sigra.Webhooks.Signature + alias Sigra.Workers.WebhookDelivery + + defmodule MockOban do + def insert(%Ecto.Changeset{} = changeset) do + job = %{ + args: Ecto.Changeset.get_change(changeset, :args), + queue: Ecto.Changeset.get_change(changeset, :queue) + } + + jobs = Process.get(:queued_jobs, []) + Process.put(:queued_jobs, jobs ++ [job]) + {:ok, job} + end + end + + defmodule IntegrationUser do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: true} + schema "webhook_integration_users_97" do + field :email, :string + field :hashed_password, :string + field :display_name, :string + timestamps(type: :utc_datetime_usec) + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [:email, :hashed_password, :display_name]) + |> validate_required([:email, :hashed_password]) + |> unique_constraint(:email, name: :webhook_integration_users_97_email_key) + end + end + + defmodule WebhookSubscription do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: true} + @foreign_key_type :binary_id + + schema "webhook_integration_subscriptions_97" do + field :endpoint_url, :string + field :event_types, {:array, :string}, default: [] + field :enabled, :boolean, default: true + field :description, :string + field :signing_secret, :binary + field :next_signing_secret, :binary + field :rotation_state, Ecto.Enum, + values: [:stable, :prepared, :overlap_active, :completed], + default: :stable + + field :rotation_prepared_at, :utc_datetime_usec + field :rotation_overlap_started_at, :utc_datetime_usec + field :rotation_retire_after_at, :utc_datetime_usec + field :rotation_completed_at, :utc_datetime_usec + field :rotation_last_changed_by_user_id, :binary_id + field :signing_secret_fingerprint, :string + field :next_signing_secret_fingerprint, :string + timestamps(type: :utc_datetime_usec) + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :endpoint_url, + :event_types, + :enabled, + :description, + :signing_secret, + :next_signing_secret, + :rotation_state, + :rotation_prepared_at, + :rotation_overlap_started_at, + :rotation_retire_after_at, + :rotation_completed_at, + :rotation_last_changed_by_user_id, + :signing_secret_fingerprint, + :next_signing_secret_fingerprint + ]) + |> validate_required([:endpoint_url, :event_types, :enabled, :signing_secret]) + end + end + + defmodule WebhookEvent do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: true} + @foreign_key_type :binary_id + + schema "webhook_integration_events_97" do + field :event_id, :string + field :type, :string + field :schema_version, :string + field :occurred_at, :utc_datetime_usec + field :payload, :map, default: %{} + field :actor_id, :binary_id + field :actor_type, :string + field :organization_id, :binary_id + field :request_id, :string + timestamps(type: :utc_datetime_usec, updated_at: false) + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :event_id, + :type, + :schema_version, + :occurred_at, + :payload, + :actor_id, + :actor_type, + :organization_id, + :request_id + ]) + |> validate_required([:event_id, :type, :schema_version, :occurred_at, :payload]) + |> unique_constraint(:event_id, name: :webhook_integration_events_97_event_id_index) + end + end + + defmodule WebhookDeliveryRow do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: true} + @foreign_key_type :binary_id + + schema "webhook_integration_deliveries_97" do + field :delivery_id, :string + field :status, :string, default: "pending" + field :attempt_count, :integer, default: 0 + field :endpoint_url, :string + field :dispatched_at, :utc_datetime_usec + field :last_attempted_at, :utc_datetime_usec + field :next_attempt_at, :utc_datetime_usec + field :last_http_status, :integer + field :last_error_category, :string + field :last_error_detail, :string + field :dead_lettered_at, :utc_datetime_usec + field :terminal_reason, :string + field :replayed_from_webhook_delivery_id, :binary_id + field :replay_root_webhook_delivery_id, :binary_id + field :replayed_at, :utc_datetime_usec + field :replayed_by_user_id, :binary_id + field :replay_source, :string + belongs_to :webhook_subscription, WebhookSubscription + belongs_to :webhook_event, WebhookEvent + has_many :attempts, Sigra.WebhooksIntegrationTest.WebhookDeliveryAttemptRow, + foreign_key: :webhook_delivery_id + timestamps(type: :utc_datetime_usec) + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :delivery_id, + :status, + :attempt_count, + :endpoint_url, + :dispatched_at, + :last_attempted_at, + :next_attempt_at, + :last_http_status, + :last_error_category, + :last_error_detail, + :dead_lettered_at, + :terminal_reason, + :replayed_from_webhook_delivery_id, + :replay_root_webhook_delivery_id, + :replayed_at, + :replayed_by_user_id, + :replay_source, + :webhook_subscription_id, + :webhook_event_id + ]) + |> validate_required([ + :delivery_id, + :status, + :attempt_count, + :endpoint_url, + :webhook_subscription_id, + :webhook_event_id + ]) + |> assoc_constraint(:webhook_subscription) + |> assoc_constraint(:webhook_event) + |> unique_constraint(:delivery_id, + name: :webhook_integration_deliveries_97_delivery_id_index + ) + |> unique_constraint(:replayed_from_webhook_delivery_id, + name: :webhook_integration_deliveries_97_replayed_from_unique_index + ) + end + end + + defmodule WebhookDeliveryAttemptRow do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: true} + @foreign_key_type :binary_id + + schema "webhook_integration_delivery_attempts_97" do + field :delivery_id, :string + field :attempt_number, :integer + field :endpoint_url, :string + field :started_at, :utc_datetime_usec + field :finished_at, :utc_datetime_usec + field :response_status, :integer + field :retryable, :boolean, default: false + field :retry_after_seconds, :integer + field :error_category, :string + field :error_detail, :string + field :terminal_reason, :string + belongs_to :webhook_delivery, Sigra.WebhooksIntegrationTest.WebhookDeliveryRow + + timestamps(type: :utc_datetime_usec, updated_at: false) + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :delivery_id, + :attempt_number, + :endpoint_url, + :started_at, + :finished_at, + :response_status, + :retryable, + :retry_after_seconds, + :error_category, + :error_detail, + :terminal_reason, + :webhook_delivery_id + ]) + |> validate_required([ + :delivery_id, + :attempt_number, + :endpoint_url, + :started_at, + :retryable + ]) + |> assoc_constraint(:webhook_delivery) + |> unique_constraint([:delivery_id, :attempt_number], + name: :webhook_integration_delivery_attempts_97_delivery_id_attempt_number_index + ) + end + end + + defmodule ObanJobRow do + use Ecto.Schema + + @primary_key {:id, :id, autogenerate: false} + schema "oban_jobs" do + field :state, :string + field :queue, :string + field :worker, :string + field :args, :map + field :max_attempts, :integer + field :attempt, :integer + field :priority, :integer + field :inserted_at, :utc_datetime_usec + field :scheduled_at, :utc_datetime_usec + end + end + + defmodule ServiceAccountRow do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: true} + schema "webhook_integration_service_accounts_100" do + field :name, :string + field :scopes, {:array, :string}, default: [] + field :role, :string + field :token_epoch, :integer, default: 0 + field :revoked_at, :utc_datetime_usec + field :last_used_at, :utc_datetime_usec + field :organization_id, :binary_id + field :created_by_user_id, :binary_id + timestamps(type: :utc_datetime_usec) + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :name, + :scopes, + :role, + :token_epoch, + :revoked_at, + :last_used_at, + :organization_id, + :created_by_user_id + ]) + |> validate_required([:name, :organization_id]) + end + end + + setup do + start_supervised!({PostgresRepo, PostgresRepo.default_config()}) + repo = PostgresRepo + + Application.put_env(:sigra, :repo, repo) + Application.put_env(:sigra, :user_schema, IntegrationUser) + Application.put_env(:sigra, :secret_key_base, String.duplicate("a", 64)) + Application.put_env(:sigra, :webhooks, webhooks_config()) + + on_exit(fn -> + Application.delete_env(:sigra, :repo) + Application.delete_env(:sigra, :user_schema) + Application.delete_env(:sigra, :secret_key_base) + Application.delete_env(:sigra, :webhooks) + Application.delete_env(:sigra, :webhook_delivery_requester) + Application.delete_env(:sigra, :webhook_delivery_oban) + Process.delete(:queued_jobs) + end) + + Ecto.Adapters.SQL.query!(repo, ~s|CREATE EXTENSION IF NOT EXISTS "uuid-ossp"|, []) + ensure_oban_jobs_table!(repo) + + Ecto.Adapters.SQL.query!( + repo, + "DROP TABLE IF EXISTS webhook_integration_delivery_attempts_97 CASCADE", + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + "DROP TABLE IF EXISTS webhook_integration_deliveries_97 CASCADE", + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + "DROP TABLE IF EXISTS webhook_integration_events_97 CASCADE", + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + "DROP TABLE IF EXISTS webhook_integration_subscriptions_97 CASCADE", + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + "DROP TABLE IF EXISTS webhook_integration_users_97 CASCADE", + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + "DROP TABLE IF EXISTS webhook_integration_service_accounts_100 CASCADE", + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE webhook_integration_users_97 ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + email text NOT NULL, + hashed_password text NOT NULL, + display_name text, + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now(), + CONSTRAINT webhook_integration_users_97_email_key UNIQUE (email) + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE webhook_integration_subscriptions_97 ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + endpoint_url text NOT NULL, + event_types text[] NOT NULL DEFAULT '{}', + enabled boolean NOT NULL DEFAULT true, + description text, + signing_secret bytea NOT NULL, + next_signing_secret bytea, + rotation_state text NOT NULL DEFAULT 'stable', + rotation_prepared_at timestamp, + rotation_overlap_started_at timestamp, + rotation_retire_after_at timestamp, + rotation_completed_at timestamp, + rotation_last_changed_by_user_id uuid, + signing_secret_fingerprint text, + next_signing_secret_fingerprint text, + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE webhook_integration_events_97 ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + event_id text NOT NULL, + type text NOT NULL, + schema_version text NOT NULL, + occurred_at timestamp NOT NULL, + payload jsonb NOT NULL, + actor_id uuid, + actor_type text, + organization_id uuid, + request_id text, + inserted_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + "CREATE UNIQUE INDEX webhook_integration_events_97_event_id_index ON webhook_integration_events_97 (event_id)", + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE webhook_integration_deliveries_97 ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + delivery_id text NOT NULL, + status text NOT NULL DEFAULT 'pending', + attempt_count integer NOT NULL DEFAULT 0, + endpoint_url text NOT NULL, + dispatched_at timestamp, + last_attempted_at timestamp, + next_attempt_at timestamp, + last_http_status integer, + last_error_category text, + last_error_detail text, + dead_lettered_at timestamp, + terminal_reason text, + replayed_from_webhook_delivery_id uuid REFERENCES webhook_integration_deliveries_97(id), + replay_root_webhook_delivery_id uuid REFERENCES webhook_integration_deliveries_97(id), + replayed_at timestamp, + replayed_by_user_id uuid, + replay_source text, + webhook_subscription_id uuid NOT NULL REFERENCES webhook_integration_subscriptions_97(id), + webhook_event_id uuid NOT NULL REFERENCES webhook_integration_events_97(id), + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + "CREATE UNIQUE INDEX webhook_integration_deliveries_97_delivery_id_index ON webhook_integration_deliveries_97 (delivery_id)", + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + "CREATE UNIQUE INDEX webhook_integration_deliveries_97_replayed_from_unique_index ON webhook_integration_deliveries_97 (replayed_from_webhook_delivery_id) WHERE replayed_from_webhook_delivery_id IS NOT NULL", + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + "CREATE INDEX webhook_integration_deliveries_97_replay_root_index ON webhook_integration_deliveries_97 (replay_root_webhook_delivery_id)", + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE webhook_integration_delivery_attempts_97 ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + delivery_id text NOT NULL, + attempt_number integer NOT NULL, + endpoint_url text NOT NULL, + started_at timestamp NOT NULL, + finished_at timestamp, + response_status integer, + retryable boolean NOT NULL DEFAULT false, + retry_after_seconds integer, + error_category text, + error_detail text, + terminal_reason text, + webhook_delivery_id uuid REFERENCES webhook_integration_deliveries_97(id), + inserted_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + "CREATE UNIQUE INDEX webhook_integration_delivery_attempts_97_delivery_id_attempt_number_index ON webhook_integration_delivery_attempts_97 (delivery_id, attempt_number)", + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE webhook_integration_service_accounts_100 ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + name text NOT NULL, + scopes text[] NOT NULL DEFAULT '{}', + role text, + token_epoch integer NOT NULL DEFAULT 0, + revoked_at timestamp, + last_used_at timestamp, + organization_id uuid NOT NULL, + created_by_user_id uuid, + inserted_at timestamp NOT NULL DEFAULT now(), + updated_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + + %{repo: repo} + end + + test "register/3 persists one webhook event, one pending delivery per matching subscription, and one initial job per delivery", + %{ + repo: repo + } do + config = config(repo) + + {:ok, sub_one} = + Webhooks.create_subscription(config, %{ + endpoint_url: "https://one.example.test/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("a", 32) + }) + + {:ok, sub_two} = + Webhooks.create_subscription(config, %{ + endpoint_url: "https://two.example.test/hooks", + event_types: ["user.created", "session.created"], + signing_secret: String.duplicate("b", 32) + }) + + {:ok, _disabled} = + Webhooks.create_subscription(config, %{ + endpoint_url: "https://disabled.example.test/hooks", + event_types: ["user.created"], + enabled: false, + signing_secret: String.duplicate("c", 32) + }) + + {:ok, _other_event} = + Webhooks.create_subscription(config, %{ + endpoint_url: "https://other.example.test/hooks", + event_types: ["session.created"], + signing_secret: String.duplicate("d", 32) + }) + + assert {:ok, user} = + Auth.register( + config, + %{ + "email" => "phase97-webhook@example.com", + "hashed_password" => "hash", + "display_name" => "Phase 97" + }, + register_opts(request_id: "req_phase97_register") + ) + + assert user.email == "phase97-webhook@example.com" + assert 1 == repo.aggregate(IntegrationUser, :count) + assert 1 == repo.aggregate(WebhookEvent, :count) + assert 2 == repo.aggregate(WebhookDeliveryRow, :count) + assert 2 == repo.aggregate(ObanJobRow, :count) + + event = repo.one(from(event in WebhookEvent, select: event)) + + deliveries = + repo.all( + from(delivery in WebhookDeliveryRow, + order_by: [asc: delivery.endpoint_url], + select: delivery + ) + ) + + assert event.type == "user.created" + assert event.schema_version == "2026-05-06" + assert event.actor_id == user.id + assert event.actor_type == "user" + assert event.request_id == "req_phase97_register" + assert event.payload["id"] == event.event_id + assert event.payload["type"] == "user.created" + assert event.payload["schema_version"] == "2026-05-06" + assert get_in(event.payload, ["data", "object", "email"]) == user.email + assert get_in(event.payload, ["context", "actor", "id"]) == user.id + assert get_in(event.payload, ["context", "request", "id"]) == "req_phase97_register" + + assert Enum.map(deliveries, & &1.status) == ["pending", "pending"] + + assert Enum.map(deliveries, & &1.endpoint_url) == [ + "https://one.example.test/hooks", + "https://two.example.test/hooks" + ] + + assert Enum.sort(Enum.map(deliveries, & &1.webhook_subscription_id)) == + Enum.sort([sub_one.id, sub_two.id]) + + assert Enum.all?(deliveries, &(&1.webhook_event_id == event.id)) + assert Enum.all?(deliveries, &is_binary(&1.delivery_id)) + assert Enum.all?(deliveries, &(&1.delivery_id != event.event_id)) + assert Enum.all?(deliveries, &(&1.attempt_count == 0)) + assert Enum.all?(deliveries, &is_nil(&1.last_attempted_at)) + assert Enum.all?(deliveries, &is_nil(&1.next_attempt_at)) + assert Enum.all?(deliveries, &is_nil(&1.dead_lettered_at)) + + queued_jobs = + repo.all( + from(job in ObanJobRow, + order_by: [asc: job.id], + select: %{args: job.args, queue: job.queue, worker: job.worker} + ) + ) + + assert Enum.map(queued_jobs, & &1.args) == + Enum.map(deliveries, fn delivery -> %{"delivery_id" => delivery.delivery_id} end) + + assert Enum.all?(queued_jobs, &(&1.queue == "sigra_webhooks")) + assert Enum.all?(queued_jobs, &(&1.worker == "Sigra.Workers.WebhookDelivery")) + end + + test "service_account.create uses the same production seam to persist a delivery and initial job", %{ + repo: repo + } do + config = config(repo) + scope = service_account_scope() + + {:ok, _subscription} = + Webhooks.create_subscription(config, %{ + endpoint_url: "https://service-account.example.test/hooks", + event_types: ["service_account.created"], + signing_secret: String.duplicate("s", 32) + }) + + assert {:ok, service_account} = + ServiceAccounts.create(config, scope, %{ + name: "CI Agent", + scopes: ["deploy:write"], + organization_id: scope.active_organization.id + }) + + assert service_account.name == "CI Agent" + assert 1 == repo.aggregate(ServiceAccountRow, :count) + + assert %WebhookEvent{} = event = + repo.one!( + from(event in WebhookEvent, + where: event.type == "service_account.created", + select: event + ) + ) + + assert %WebhookDeliveryRow{} = delivery = + repo.one!( + from(delivery in WebhookDeliveryRow, + where: delivery.webhook_event_id == ^event.id, + select: delivery + ) + ) + + assert %ObanJobRow{args: %{"delivery_id" => queued_delivery_id}, queue: "sigra_webhooks"} = + repo.one!( + from(job in ObanJobRow, + where: job.worker == "Sigra.Workers.WebhookDelivery", + select: job + ) + ) + + assert queued_delivery_id == delivery.delivery_id + assert get_in(event.payload, ["data", "object", "name"]) == "CI Agent" + assert get_in(event.payload, ["data", "object", "organization_id"]) == + scope.active_organization.id + end + + test "a persisted delivery can be queued and consumed by the worker without leaking secret material into job args", + %{repo: repo} do + config = config(repo) + secret = "phase97-signing-secret-material" + + {:ok, _subscription} = + Webhooks.create_subscription(config, %{ + endpoint_url: "https://receiver.example.test/inbox", + event_types: ["user.created"], + signing_secret: secret + }) + + assert {:ok, _user} = + Auth.register( + config, + %{"email" => "worker-ready@example.com", "hashed_password" => "hash"}, + register_opts(request_id: "req_phase97_worker") + ) + + event = repo.one(from(event in WebhookEvent, select: event)) + delivery = repo.one(from(delivery in WebhookDeliveryRow, select: delivery)) + + job_changeset = Webhooks.build_delivery_job(config, delivery) + + assert Changeset.get_change(job_changeset, :args) == %{"delivery_id" => delivery.delivery_id} + assert Changeset.get_change(job_changeset, :queue) == "sigra_webhooks" + refute inspect(Changeset.get_change(job_changeset, :args)) =~ secret + assert delivery.status == "pending" + assert delivery.attempt_count == 0 + assert is_nil(delivery.dispatched_at) + + Application.put_env(:sigra, :webhook_delivery_requester, fn request -> + send(self(), {:webhook_request, request}) + {:ok, %{status: 202}} + end) + + assert {:ok, :delivered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => delivery.delivery_id}}) + + assert_receive {:webhook_request, request} + + headers = Map.new(request.headers) + timestamp = String.to_integer(headers["Sigra-Webhook-Timestamp"]) + + assert request.url == "https://receiver.example.test/inbox" + assert headers["Content-Type"] == "application/json" + assert headers["Sigra-Webhook-Id"] == delivery.delivery_id + assert Jason.decode!(request.body) == event.payload + + assert {:ok, %{delivery_id: delivery_id, timestamp: ^timestamp}} = + Signature.verify(headers, request.body, secret, now: timestamp, tolerance: 0) + + assert delivery_id == delivery.delivery_id + + updated_delivery = + repo.get_by!(WebhookDeliveryRow, delivery_id: delivery.delivery_id) + + assert updated_delivery.status == "delivered" + assert %DateTime{} = updated_delivery.dispatched_at + end + + test "the evolved delivery summary and attempts ledger are queryable in Postgres", %{repo: repo} do + config = config(repo) + + {:ok, subscription} = + Webhooks.create_subscription(config, %{ + endpoint_url: "https://history.example.test/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("z", 32) + }) + + {:ok, user} = + Auth.register( + config, + %{"email" => "history@example.com", "hashed_password" => "hash"}, + register_opts(request_id: "req_phase98_history") + ) + + delivery = + repo.one!( + from(delivery in WebhookDeliveryRow, + where: delivery.webhook_subscription_id == ^subscription.id, + select: delivery + ) + ) + + attempted_at = DateTime.utc_now() |> DateTime.truncate(:second) + finished_at = DateTime.add(attempted_at, 3, :second) + + {:ok, updated_delivery} = + delivery + |> WebhookDeliveryRow.changeset(%{ + status: "retry_scheduled", + attempt_count: 1, + last_attempted_at: attempted_at, + next_attempt_at: DateTime.add(attempted_at, 60, :second), + last_http_status: 429, + last_error_category: "http_backpressure", + last_error_detail: "receiver requested backoff" + }) + |> repo.update() + + {:ok, attempt} = + %WebhookDeliveryAttemptRow{} + |> WebhookDeliveryAttemptRow.changeset(%{ + delivery_id: updated_delivery.delivery_id, + attempt_number: 1, + endpoint_url: updated_delivery.endpoint_url, + started_at: attempted_at, + finished_at: finished_at, + response_status: 429, + retryable: true, + retry_after_seconds: 60, + error_category: "http_backpressure", + error_detail: "receiver requested backoff", + webhook_delivery_id: updated_delivery.id + }) + |> repo.insert() + + fetched_delivery = + repo.one!( + from(delivery in WebhookDeliveryRow, + where: delivery.id == ^updated_delivery.id, + preload: [:attempts] + ) + ) + + assert user.email == "history@example.com" + assert fetched_delivery.status == "retry_scheduled" + assert fetched_delivery.attempt_count == 1 + assert fetched_delivery.last_http_status == 429 + assert fetched_delivery.last_error_category == "http_backpressure" + assert %DateTime{} = fetched_delivery.next_attempt_at + assert [persisted_attempt] = fetched_delivery.attempts + assert persisted_attempt.id == attempt.id + assert persisted_attempt.attempt_number == 1 + assert persisted_attempt.retryable == true + assert persisted_attempt.retry_after_seconds == 60 + assert persisted_attempt.terminal_reason == nil + end + + test "retryable receiver failures keep the auth mutation committed and persist attempt history", %{ + repo: repo + } do + Application.put_env(:sigra, :webhook_delivery_oban, MockOban) + Process.put(:queued_jobs, []) + + {:ok, _subscription} = + Webhooks.create_subscription(config(repo), %{ + endpoint_url: "https://retry.example.test/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("r", 32) + }) + + {:ok, user} = + Auth.register( + config(repo), + %{"email" => "retry-path@example.com", "hashed_password" => "hash"}, + register_opts(request_id: "req_phase98_retry") + ) + + delivery = repo.one!(from(delivery in WebhookDeliveryRow, select: delivery)) + + Application.put_env(:sigra, :webhook_delivery_requester, fn _request -> + {:ok, %{status: 429, headers: [{"Retry-After", "120"}]}} + end) + + assert {:ok, :retry_scheduled} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => delivery.delivery_id}}) + + persisted = + repo.one!( + from(delivery in WebhookDeliveryRow, + where: delivery.delivery_id == ^delivery.delivery_id, + preload: [:attempts] + ) + ) + + assert user.email == "retry-path@example.com" + assert persisted.status == "retry_scheduled" + assert persisted.attempt_count == 1 + assert persisted.last_http_status == 429 + assert persisted.last_error_category == "http_backpressure" + assert %DateTime{} = persisted.next_attempt_at + assert [attempt] = persisted.attempts + assert attempt.delivery_id == delivery.delivery_id + assert attempt.retryable == true + assert attempt.retry_after_seconds == 120 + assert [%{args: %{"delivery_id" => same_delivery_id}, queue: "sigra_webhooks"}] = + Process.get(:queued_jobs) + assert same_delivery_id == delivery.delivery_id + end + + test "permanent receiver failures dead-letter the delivery in place without breaking auth commits", %{ + repo: repo + } do + Application.put_env(:sigra, :webhook_delivery_oban, MockOban) + Process.put(:queued_jobs, []) + + {:ok, _subscription} = + Webhooks.create_subscription(config(repo), %{ + endpoint_url: "https://dead-letter.example.test/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("d", 32) + }) + + {:ok, user} = + Auth.register( + config(repo), + %{"email" => "dead-letter@example.com", "hashed_password" => "hash"}, + register_opts(request_id: "req_phase98_dead_letter") + ) + + delivery = repo.one!(from(delivery in WebhookDeliveryRow, select: delivery)) + + Application.put_env(:sigra, :webhook_delivery_requester, fn _request -> + {:ok, %{status: 404}} + end) + + assert {:ok, :dead_lettered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => delivery.delivery_id}}) + + persisted = + repo.one!( + from(delivery in WebhookDeliveryRow, + where: delivery.delivery_id == ^delivery.delivery_id, + preload: [:attempts] + ) + ) + + assert user.email == "dead-letter@example.com" + assert persisted.status == "dead_lettered" + assert persisted.attempt_count == 1 + assert %DateTime{} = persisted.dead_lettered_at + assert persisted.terminal_reason == "http_4xx_permanent" + assert [attempt] = persisted.attempts + assert attempt.retryable == false + assert attempt.response_status == 404 + assert attempt.terminal_reason == "http_4xx_permanent" + assert [] = Process.get(:queued_jobs) + end + + test "replay persists a fresh child delivery lineage and queues it once", %{repo: repo} do + config = config(repo) + actor_id = Ecto.UUID.generate() + + {:ok, _subscription} = + Webhooks.create_subscription(config, %{ + endpoint_url: "https://replay.example.test/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("p", 32) + }) + + {:ok, _user} = + Auth.register( + config, + %{"email" => "replay-path@example.com", "hashed_password" => "hash"}, + register_opts(request_id: "req_phase104_replay") + ) + + source = repo.one!(from(delivery in WebhookDeliveryRow, select: delivery)) + attempted_at = DateTime.utc_now() |> DateTime.truncate(:second) + + assert {:ok, %{delivery: dead_lettered}} = + Webhooks.persist_delivery_outcome(config, source, %{ + attempt_number: 1, + attempted_at: attempted_at, + finished_at: attempted_at, + response_status: 404, + retryable: false, + error_category: "http_client_error", + error_detail: "receiver rejected request", + terminal_reason: "http_4xx_permanent", + endpoint_url: source.endpoint_url + }) + + original_job_count = repo.aggregate(ObanJobRow, :count) + + assert {:ok, %{source_delivery: replay_source, replay_delivery: replay_delivery}} = + Webhooks.replay_delivery( + config, + dead_lettered.delivery_id, + %{user: %{id: actor_id}}, + source: "admin.delivery_detail" + ) + + assert replay_source.id == source.id + assert replay_delivery.delivery_id != source.delivery_id + assert replay_delivery.status == "pending" + assert replay_delivery.attempt_count == 0 + assert replay_delivery.replayed_from_webhook_delivery_id == source.id + assert replay_delivery.replay_root_webhook_delivery_id == source.id + assert replay_delivery.replayed_by_user_id == actor_id + assert replay_delivery.replay_source == "admin.delivery_detail" + assert %DateTime{} = replay_delivery.replayed_at + + persisted_source = + repo.one!( + from(delivery in WebhookDeliveryRow, + where: delivery.id == ^source.id, + preload: [:attempts] + ) + ) + + assert persisted_source.status == "dead_lettered" + assert persisted_source.attempt_count == 1 + assert length(persisted_source.attempts) == 1 + + assert repo.aggregate(WebhookDeliveryRow, :count) == 2 + assert repo.aggregate(WebhookDeliveryAttemptRow, :count) == 1 + assert repo.aggregate(ObanJobRow, :count) == original_job_count + 1 + + assert %ObanJobRow{args: %{"delivery_id" => queued_delivery_id}} = + repo.one!( + from(job in ObanJobRow, + order_by: [desc: job.id], + limit: 1, + select: job + ) + ) + + assert queued_delivery_id == replay_delivery.delivery_id + + assert {:error, :replay_already_exists} = + Webhooks.replay_delivery( + config, + dead_lettered.delivery_id, + %{user: %{id: actor_id}}, + source: "admin.delivery_detail" + ) + end + + test "rotation lifecycle persists explicit state, timestamps, actor metadata, and dual-slot truth", + %{repo: repo} do + config = config(repo) + admin_scope = %{user: %{id: Ecto.UUID.generate()}} + + assert {:ok, subscription} = + Webhooks.create_subscription(config, %{ + endpoint_url: "https://rotate.example.test/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("r", 32) + }) + + assert {:ok, prepared} = Webhooks.prepare_secret(config, subscription.id, scope: admin_scope) + assert prepared.rotation_state == :prepared + assert is_binary(prepared.next_signing_secret) + assert prepared.next_signing_secret != prepared.signing_secret + assert prepared.rotation_last_changed_by_user_id == admin_scope.user.id + assert %DateTime{} = prepared.rotation_prepared_at + assert is_binary(prepared.signing_secret_fingerprint) + assert is_binary(prepared.next_signing_secret_fingerprint) + + retire_after_at = DateTime.utc_now() |> DateTime.add(1800, :second) |> DateTime.truncate(:second) + + assert {:ok, overlap} = + Webhooks.start_secret_overlap(config, prepared.id, + scope: admin_scope, + retire_after_at: retire_after_at + ) + + assert overlap.rotation_state == :overlap_active + assert %DateTime{} = overlap.rotation_overlap_started_at + assert DateTime.compare(overlap.rotation_retire_after_at, retire_after_at) == :eq + + assert {:ok, completed} = + Webhooks.complete_secret_rotation(config, overlap.id, scope: admin_scope) + + assert completed.rotation_state == :completed + assert completed.next_signing_secret == nil + assert completed.rotation_prepared_at == nil + assert completed.rotation_overlap_started_at == nil + assert completed.rotation_retire_after_at == nil + assert completed.next_signing_secret_fingerprint == nil + assert %DateTime{} = completed.rotation_completed_at + + persisted = repo.get!(WebhookSubscription, completed.id) + + assert persisted.rotation_state == :completed + assert persisted.signing_secret == prepared.next_signing_secret + assert persisted.next_signing_secret == nil + assert persisted.rotation_last_changed_by_user_id == admin_scope.user.id + assert %DateTime{} = persisted.rotation_completed_at + end + + test "discard_prepared_secret clears the next slot and rejects out-of-order transitions", + %{repo: repo} do + config = config(repo) + admin_scope = %{user: %{id: Ecto.UUID.generate()}} + + assert {:ok, subscription} = + Webhooks.create_subscription(config, %{ + endpoint_url: "https://discard.example.test/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("d", 32) + }) + + assert {:error, start_changeset} = Webhooks.start_secret_overlap(config, subscription.id) + assert errors_on(start_changeset).rotation_state == ["can only start overlap from prepared"] + + assert {:error, complete_changeset} = Webhooks.complete_secret_rotation(config, subscription.id) + + assert errors_on(complete_changeset).rotation_state == [ + "can only complete rotation from overlap_active" + ] + + assert {:ok, prepared} = Webhooks.prepare_secret(config, subscription.id, scope: admin_scope) + assert {:ok, discarded} = Webhooks.discard_prepared_secret(config, prepared.id, scope: admin_scope) + + assert discarded.rotation_state == :stable + assert discarded.signing_secret == subscription.signing_secret + assert discarded.next_signing_secret == nil + assert discarded.rotation_prepared_at == nil + assert discarded.rotation_overlap_started_at == nil + assert discarded.rotation_retire_after_at == nil + assert discarded.rotation_completed_at == nil + assert discarded.next_signing_secret_fingerprint == nil + end + + defp config(repo) do + Sigra.Config.new!( + repo: repo, + user_schema: IntegrationUser, + secret_key_base: String.duplicate("a", 64), + webhooks: webhooks_config(), + service_accounts: [ + service_account_schema: ServiceAccountRow, + client_id_prefix: "sigra_sa_", + client_id_byte_size: 24 + ] + ) + end + + defp service_account_scope do + %{ + user: %{id: Ecto.UUID.generate()}, + active_organization: %{id: Ecto.UUID.generate()} + } + end + + defp webhooks_config do + [ + enabled: true, + webhook_subscription_schema: WebhookSubscription, + webhook_event_schema: WebhookEvent, + webhook_delivery_schema: WebhookDeliveryRow, + webhook_delivery_attempt_schema: WebhookDeliveryAttemptRow, + endpoint_resolver: &public_test_resolver/1, + oban_queue: "sigra_webhooks", + signature_tolerance: 300 + ] + end + + defp public_test_resolver(host) when is_binary(host) do + if String.ends_with?(host, ".example.test") do + {:ok, [{203, 0, 113, 20}]} + else + {:error, :nxdomain} + end + end + + defp register_opts(extra) do + Keyword.merge( + [changeset_fn: fn attrs -> IntegrationUser.changeset(%IntegrationUser{}, attrs) end], + extra + ) + end + + defp errors_on(%Changeset{} = changeset) do + Changeset.traverse_errors(changeset, fn {message, opts} -> + Regex.replace(~r"%{(\w+)}", message, fn _, key -> + opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string() + end) + end) + end + + defp ensure_oban_jobs_table!(repo) do + Ecto.Adapters.SQL.query!(repo, "DROP TABLE IF EXISTS oban_jobs CASCADE", []) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE oban_jobs ( + id bigserial PRIMARY KEY, + state text NOT NULL DEFAULT 'available', + queue text NOT NULL DEFAULT 'default', + worker text NOT NULL, + args jsonb NOT NULL, + errors jsonb NOT NULL DEFAULT '[]'::jsonb, + meta jsonb NOT NULL DEFAULT '{}'::jsonb, + tags text[] NOT NULL DEFAULT '{}', + attempt integer NOT NULL DEFAULT 0, + attempted_by text[], + max_attempts integer NOT NULL DEFAULT 20, + priority integer NOT NULL DEFAULT 0, + attempted_at timestamp, + cancelled_at timestamp, + completed_at timestamp, + discarded_at timestamp, + inserted_at timestamp NOT NULL DEFAULT now(), + scheduled_at timestamp NOT NULL DEFAULT now() + ) + """, + [] + ) + end +end diff --git a/test/sigra/webhooks_payload_test.exs b/test/sigra/webhooks_payload_test.exs new file mode 100644 index 00000000..b473cbbc --- /dev/null +++ b/test/sigra/webhooks_payload_test.exs @@ -0,0 +1,138 @@ +defmodule Sigra.WebhooksPayloadTest do + use ExUnit.Case, async: true + + alias Sigra.Webhooks.Payload + + defmodule User do + defstruct [:id, :email, :display_name, :confirmed_at, :password_hash, :inserted_at, :updated_at] + end + + defmodule Session do + defstruct [:id, :user_id, :organization_id, :type, :token, :revoked_at, :last_active_at, :inserted_at] + end + + defmodule Membership do + defstruct [:id, :organization_id, :user_id, :role, :audit_metadata, :inserted_at, :updated_at] + end + + defmodule ServiceAccount do + defstruct [:id, :organization_id, :name, :scopes, :hashed_client_secret, :revoked_at, :inserted_at] + end + + test "builds the stable webhook envelope for user events without leaking internal fields" do + user = %User{ + id: "user_123", + email: "user@example.com", + display_name: "User Example", + password_hash: "secret", + confirmed_at: ~U[2026-05-06 12:00:00Z], + inserted_at: ~U[2026-05-06 11:00:00Z], + updated_at: ~U[2026-05-06 12:30:00Z] + } + + payload = + Payload.build("user.updated", user, + id: "evt_123", + occurred_at: ~U[2026-05-06 12:31:00Z], + changes: [:email, :display_name], + context: %{ + actor: %{type: "user", id: "admin_1"}, + organization: %{id: "org_1"}, + request: %{id: "req_1"} + } + ) + + assert payload == %{ + "id" => "evt_123", + "type" => "user.updated", + "schema_version" => "2026-05-06", + "occurred_at" => "2026-05-06T12:31:00Z", + "data" => %{ + "object" => %{ + "id" => "user_123", + "email" => "user@example.com", + "display_name" => "User Example", + "confirmed_at" => "2026-05-06T12:00:00Z", + "created_at" => "2026-05-06T11:00:00Z", + "updated_at" => "2026-05-06T12:30:00Z" + }, + "changes" => ["email", "display_name"] + }, + "context" => %{ + "actor" => %{"type" => "user", "id" => "admin_1"}, + "organization" => %{"id" => "org_1"}, + "request" => %{"id" => "req_1"} + } + } + + refute get_in(payload, ["data", "object", "password_hash"]) + end + + test "serializes session, membership, and service-account resources explicitly" do + session = + Payload.build("session.created", %Session{ + id: "sess_1", + user_id: "user_1", + organization_id: "org_1", + type: :standard, + token: "raw-token", + last_active_at: ~U[2026-05-06 12:00:00Z], + inserted_at: ~U[2026-05-06 11:00:00Z] + }) + + membership = + Payload.build("organization_membership.updated", %Membership{ + id: "mem_1", + organization_id: "org_1", + user_id: "user_1", + role: :admin, + audit_metadata: %{sensitive: true}, + inserted_at: ~U[2026-05-06 11:00:00Z], + updated_at: ~U[2026-05-06 12:00:00Z] + }, changes: ["role"]) + + service_account = + Payload.build("service_account.revoked", %ServiceAccount{ + id: "sa_1", + organization_id: "org_1", + name: "CI", + scopes: ["deploy:write"], + hashed_client_secret: <<1, 2, 3>>, + revoked_at: ~U[2026-05-06 12:00:00Z], + inserted_at: ~U[2026-05-06 11:00:00Z] + }) + + assert get_in(session, ["data", "object"]) == %{ + "id" => "sess_1", + "user_id" => "user_1", + "organization_id" => "org_1", + "type" => "standard", + "last_active_at" => "2026-05-06T12:00:00Z", + "created_at" => "2026-05-06T11:00:00Z" + } + + assert get_in(membership, ["data", "object"]) == %{ + "id" => "mem_1", + "organization_id" => "org_1", + "user_id" => "user_1", + "role" => "admin", + "created_at" => "2026-05-06T11:00:00Z", + "updated_at" => "2026-05-06T12:00:00Z" + } + + assert get_in(membership, ["data", "changes"]) == ["role"] + + assert get_in(service_account, ["data", "object"]) == %{ + "id" => "sa_1", + "organization_id" => "org_1", + "name" => "CI", + "scopes" => ["deploy:write"], + "revoked_at" => "2026-05-06T12:00:00Z", + "created_at" => "2026-05-06T11:00:00Z" + } + + refute get_in(session, ["data", "object", "token"]) + refute get_in(membership, ["data", "object", "audit_metadata"]) + refute get_in(service_account, ["data", "object", "hashed_client_secret"]) + end +end diff --git a/test/sigra/webhooks_reliable_delivery_atomicity_test.exs b/test/sigra/webhooks_reliable_delivery_atomicity_test.exs new file mode 100644 index 00000000..7f1dc246 --- /dev/null +++ b/test/sigra/webhooks_reliable_delivery_atomicity_test.exs @@ -0,0 +1,245 @@ +defmodule Sigra.WebhooksReliableDeliveryAtomicityTest do + use ExUnit.Case, async: false + + import Ecto.Query + + alias Sigra.Test.PostgresRepo + alias Sigra.Webhooks + + defmodule Delivery do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: true} + schema "webhook_atomicity_deliveries_98" do + field :delivery_id, :string + field :status, :string + field :attempt_count, :integer, default: 0 + field :endpoint_url, :string + field :last_attempted_at, :utc_datetime + field :next_attempt_at, :utc_datetime + field :last_http_status, :integer + field :last_error_category, :string + field :last_error_detail, :string + field :dead_lettered_at, :utc_datetime + field :terminal_reason, :string + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :delivery_id, + :status, + :attempt_count, + :endpoint_url, + :last_attempted_at, + :next_attempt_at, + :last_http_status, + :last_error_category, + :last_error_detail, + :dead_lettered_at, + :terminal_reason + ]) + |> validate_required([:delivery_id, :status, :attempt_count, :endpoint_url]) + |> maybe_force_failure() + end + + defp maybe_force_failure(%Ecto.Changeset{} = changeset) do + if Process.get(:force_delivery_update_failure) do + add_error(changeset, :status, "forced failure") + else + changeset + end + end + end + + defmodule DeliveryAttempt do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: true} + schema "webhook_atomicity_delivery_attempts_98" do + field :delivery_id, :string + field :attempt_number, :integer + field :endpoint_url, :string + field :started_at, :utc_datetime + field :finished_at, :utc_datetime + field :response_status, :integer + field :retryable, :boolean + field :retry_after_seconds, :integer + field :error_category, :string + field :error_detail, :string + field :terminal_reason, :string + field :webhook_delivery_id, :binary_id + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :delivery_id, + :attempt_number, + :endpoint_url, + :started_at, + :finished_at, + :response_status, + :retryable, + :retry_after_seconds, + :error_category, + :error_detail, + :terminal_reason, + :webhook_delivery_id + ]) + |> validate_required([:delivery_id, :attempt_number, :endpoint_url, :started_at, :retryable]) + |> unique_constraint([:delivery_id, :attempt_number], + name: :webhook_atomicity_delivery_attempts_98_delivery_attempt_key + ) + end + end + + setup do + start_supervised!({PostgresRepo, PostgresRepo.default_config()}) + repo = PostgresRepo + + Ecto.Adapters.SQL.query!(repo, "CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\"", []) + Ecto.Adapters.SQL.query!(repo, "DROP TABLE IF EXISTS webhook_atomicity_delivery_attempts_98", []) + Ecto.Adapters.SQL.query!(repo, "DROP TABLE IF EXISTS webhook_atomicity_deliveries_98", []) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE webhook_atomicity_deliveries_98 ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + delivery_id text NOT NULL UNIQUE, + status text NOT NULL, + attempt_count integer NOT NULL DEFAULT 0, + endpoint_url text NOT NULL, + last_attempted_at timestamp, + next_attempt_at timestamp, + last_http_status integer, + last_error_category text, + last_error_detail text, + dead_lettered_at timestamp, + terminal_reason text + ) + """, + [] + ) + + Ecto.Adapters.SQL.query!( + repo, + """ + CREATE TABLE webhook_atomicity_delivery_attempts_98 ( + id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), + delivery_id text NOT NULL, + attempt_number integer NOT NULL, + endpoint_url text NOT NULL, + started_at timestamp NOT NULL, + finished_at timestamp, + response_status integer, + retryable boolean NOT NULL, + retry_after_seconds integer, + error_category text, + error_detail text, + terminal_reason text, + webhook_delivery_id uuid, + CONSTRAINT webhook_atomicity_delivery_attempts_98_delivery_attempt_key + UNIQUE (delivery_id, attempt_number) + ) + """, + [] + ) + + on_exit(fn -> Process.delete(:force_delivery_update_failure) end) + + %{repo: repo} + end + + test "rolls back the parent summary update when attempt insertion fails", %{repo: repo} do + delivery = + repo.insert!(%Delivery{ + delivery_id: "del_atomicity", + status: "pending", + attempt_count: 0, + endpoint_url: "https://receiver.example/hooks" + }) + + attempted_at = DateTime.utc_now() |> DateTime.truncate(:second) + + repo.insert!(%DeliveryAttempt{ + delivery_id: delivery.delivery_id, + attempt_number: 1, + endpoint_url: delivery.endpoint_url, + started_at: attempted_at, + finished_at: attempted_at, + retryable: false, + webhook_delivery_id: delivery.id + }) + + assert {:error, %Ecto.Changeset{}} = + Webhooks.persist_delivery_outcome(config(repo), delivery, %{ + attempt_number: 1, + attempted_at: attempted_at, + finished_at: attempted_at, + retryable: false, + response_status: 500, + error_category: "http_server_error", + error_detail: "duplicate attempt", + terminal_reason: "http_4xx_permanent", + endpoint_url: delivery.endpoint_url + }) + + fetched_delivery = repo.get_by!(Delivery, delivery_id: delivery.delivery_id) + assert fetched_delivery.status == "pending" + assert fetched_delivery.attempt_count == 0 + assert 1 == repo.aggregate(DeliveryAttempt, :count) + end + + test "rolls back the attempt insert when the parent summary update fails", %{repo: repo} do + delivery = + repo.insert!(%Delivery{ + delivery_id: "del_forced_failure", + status: "pending", + attempt_count: 0, + endpoint_url: "https://receiver.example/hooks" + }) + + attempted_at = DateTime.utc_now() |> DateTime.truncate(:second) + Process.put(:force_delivery_update_failure, true) + + assert {:error, %Ecto.Changeset{}} = + Webhooks.persist_delivery_outcome(config(repo), delivery, %{ + attempt_number: 1, + attempted_at: attempted_at, + finished_at: attempted_at, + retryable: false, + response_status: 404, + error_category: "http_client_error", + error_detail: "forced update failure", + terminal_reason: "http_4xx_permanent", + endpoint_url: delivery.endpoint_url + }) + + assert 0 == repo.aggregate(DeliveryAttempt, :count) + + fetched_delivery = + repo.one!(from(delivery_row in Delivery, where: delivery_row.delivery_id == ^delivery.delivery_id)) + + assert fetched_delivery.status == "pending" + assert fetched_delivery.attempt_count == 0 + end + + defp config(repo) do + Sigra.Config.new!( + repo: repo, + user_schema: Delivery, + secret_key_base: String.duplicate("a", 64), + webhooks: [ + enabled: true, + webhook_subscription_schema: Delivery, + webhook_event_schema: Delivery, + webhook_delivery_schema: Delivery, + webhook_delivery_attempt_schema: DeliveryAttempt + ] + ) + end +end diff --git a/test/sigra/webhooks_replay_test.exs b/test/sigra/webhooks_replay_test.exs new file mode 100644 index 00000000..acb5e53d --- /dev/null +++ b/test/sigra/webhooks_replay_test.exs @@ -0,0 +1,484 @@ +defmodule Sigra.WebhooksReplayTest do + use ExUnit.Case, async: false + + alias Ecto.{Changeset, Multi} + alias Sigra.Webhooks + + defmodule MockUser do + defstruct [:id] + end + + defmodule Subscription do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_subscriptions" do + field :endpoint_url, :string + field :enabled, :boolean, default: true + field :signing_secret, :string + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [:endpoint_url, :enabled, :signing_secret]) + |> validate_required([:endpoint_url, :enabled, :signing_secret]) + end + end + + defmodule Event do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_events" do + field :payload, :map, default: %{} + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [:payload]) + |> validate_required([:payload]) + end + end + + defmodule Delivery do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_deliveries" do + field :delivery_id, :string + field :status, :string, default: "pending" + field :attempt_count, :integer, default: 0 + field :endpoint_url, :string + field :dispatched_at, :utc_datetime + field :last_attempted_at, :utc_datetime + field :next_attempt_at, :utc_datetime + field :last_http_status, :integer + field :last_error_category, :string + field :last_error_detail, :string + field :dead_lettered_at, :utc_datetime + field :terminal_reason, :string + field :replayed_from_webhook_delivery_id, :binary_id + field :replay_root_webhook_delivery_id, :binary_id + field :replayed_at, :utc_datetime + field :replayed_by_user_id, :binary_id + field :replay_source, :string + field :webhook_subscription_id, :binary_id + field :webhook_event_id, :binary_id + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :delivery_id, + :status, + :attempt_count, + :endpoint_url, + :dispatched_at, + :last_attempted_at, + :next_attempt_at, + :last_http_status, + :last_error_category, + :last_error_detail, + :dead_lettered_at, + :terminal_reason, + :replayed_from_webhook_delivery_id, + :replay_root_webhook_delivery_id, + :replayed_at, + :replayed_by_user_id, + :replay_source, + :webhook_subscription_id, + :webhook_event_id + ]) + |> validate_required([ + :delivery_id, + :status, + :attempt_count, + :endpoint_url, + :webhook_subscription_id, + :webhook_event_id + ]) + |> unique_constraint(:delivery_id) + |> unique_constraint(:replayed_from_webhook_delivery_id, + name: :webhook_deliveries_replayed_from_unique_index + ) + end + end + + defmodule DeliveryAttempt do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_delivery_attempts" do + field :delivery_id, :string + field :attempt_number, :integer + field :endpoint_url, :string + field :started_at, :utc_datetime + field :finished_at, :utc_datetime + field :response_status, :integer + field :retryable, :boolean, default: false + field :retry_after_seconds, :integer + field :error_category, :string + field :error_detail, :string + field :terminal_reason, :string + field :webhook_delivery_id, :binary_id + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :delivery_id, + :attempt_number, + :endpoint_url, + :started_at, + :finished_at, + :response_status, + :retryable, + :retry_after_seconds, + :error_category, + :error_detail, + :terminal_reason, + :webhook_delivery_id + ]) + |> validate_required([ + :delivery_id, + :attempt_number, + :endpoint_url, + :started_at, + :retryable + ]) + end + end + + defmodule MockRepo do + def get_by(Delivery, delivery_id: delivery_id), do: deliveries() |> Map.get(delivery_id) + + def get_by(Delivery, replayed_from_webhook_delivery_id: source_id) do + deliveries() + |> Map.values() + |> Enum.find(&(&1.replayed_from_webhook_delivery_id == source_id)) + end + + def get(Subscription, id), do: Process.get({:subscription, id}) + def get(Event, id), do: Process.get({:event, id}) + + def insert(%Changeset{} = changeset) do + struct = Changeset.apply_changes(changeset) + struct = Map.put_new(struct, :id, Ecto.UUID.generate()) + + case struct do + %Delivery{} = delivery -> + insert_delivery(changeset, delivery) + + %DeliveryAttempt{} = attempt -> + attempts = Process.get({:attempts, attempt.delivery_id}, []) + Process.put({:attempts, attempt.delivery_id}, attempts ++ [attempt]) + {:ok, attempt} + + job -> + jobs = Process.get(:queued_jobs, []) + Process.put(:queued_jobs, jobs ++ [job]) + {:ok, job} + end + end + + def insert(%Changeset{} = changeset, _opts), do: insert(changeset) + + def update(%Changeset{} = changeset) do + delivery = Changeset.apply_changes(changeset) + Process.put({:delivery, delivery.delivery_id}, delivery) + Process.put(:deliveries, Map.put(deliveries(), delivery.delivery_id, delivery)) + {:ok, delivery} + end + + def transaction(%Multi{} = multi) do + Enum.reduce_while(Multi.to_list(multi), {:ok, %{}}, fn + {name, {:run, run}}, {:ok, acc} -> + case run.(__MODULE__, acc) do + {:ok, value} -> {:cont, {:ok, Map.put(acc, name, value)}} + {:error, reason} -> {:halt, {:error, name, reason, acc}} + end + + {name, {:insert, %Changeset{} = changeset, _opts}}, {:ok, acc} -> + case insert(changeset) do + {:ok, value} -> {:cont, {:ok, Map.put(acc, name, value)}} + {:error, reason} -> {:halt, {:error, name, reason, acc}} + end + + {name, {:insert, fun, _opts}}, {:ok, acc} when is_function(fun, 1) -> + case insert(fun.(acc)) do + {:ok, value} -> {:cont, {:ok, Map.put(acc, name, value)}} + {:error, reason} -> {:halt, {:error, name, reason, acc}} + end + + {name, {:update, %Changeset{} = changeset, _opts}}, {:ok, acc} -> + case update(changeset) do + {:ok, value} -> {:cont, {:ok, Map.put(acc, name, value)}} + {:error, reason} -> {:halt, {:error, name, reason, acc}} + end + end) + end + + defp insert_delivery(changeset, delivery) do + deliveries = deliveries() + + cond do + Map.has_key?(deliveries, delivery.delivery_id) -> + {:error, Changeset.add_error(changeset, :delivery_id, "has already been taken")} + + delivery.replayed_from_webhook_delivery_id && + Enum.any?( + Map.values(deliveries), + &(&1.replayed_from_webhook_delivery_id == delivery.replayed_from_webhook_delivery_id) + ) -> + {:error, + Changeset.add_error( + changeset, + :replayed_from_webhook_delivery_id, + "has already been taken" + )} + + true -> + Process.put({:delivery, delivery.delivery_id}, delivery) + Process.put(:deliveries, Map.put(deliveries, delivery.delivery_id, delivery)) + + Process.put( + {:attempts, delivery.delivery_id}, + Process.get({:attempts, delivery.delivery_id}, []) + ) + + {:ok, delivery} + end + end + + defp deliveries do + Process.get(:deliveries, %{}) + end + end + + setup do + Application.put_env(:sigra, :repo, MockRepo) + Application.put_env(:sigra, :user_schema, MockUser) + Application.put_env(:sigra, :webhooks, webhooks_config()) + Process.put(:queued_jobs, []) + Process.put(:deliveries, %{}) + + on_exit(fn -> + Application.delete_env(:sigra, :repo) + Application.delete_env(:sigra, :user_schema) + Application.delete_env(:sigra, :webhooks) + Process.delete(:queued_jobs) + Process.delete(:deliveries) + end) + + :ok + end + + describe "replay_delivery/4" do + test "creates a replay child with fresh lineage and leaves the source immutable" do + source = store_source_delivery() + actor_id = Ecto.UUID.generate() + + assert {:ok, %{source_delivery: source_delivery, replay_delivery: replay_delivery}} = + Webhooks.replay_delivery( + config(), + source.delivery_id, + %{user: %{id: actor_id}}, + source: "admin.delivery_detail" + ) + + assert source_delivery.delivery_id == source.delivery_id + assert source_delivery.status == "dead_lettered" + assert source_delivery.attempt_count == 1 + assert source_delivery.delivery_id != replay_delivery.delivery_id + assert replay_delivery.status == "pending" + assert replay_delivery.attempt_count == 0 + assert replay_delivery.replayed_from_webhook_delivery_id == source.id + assert replay_delivery.replay_root_webhook_delivery_id == source.id + assert replay_delivery.replayed_by_user_id == actor_id + assert replay_delivery.replay_source == "admin.delivery_detail" + assert %DateTime{} = replay_delivery.replayed_at + assert [] = Process.get({:attempts, replay_delivery.delivery_id}) + assert [job] = Process.get(:queued_jobs) + assert job.args == %{"delivery_id" => replay_delivery.delivery_id} + end + + test "rejects pending, retry_scheduled, and delivered source rows" do + for status <- ~w[pending retry_scheduled delivered] do + source = + store_source_delivery(status: status, dead_lettered_at: nil, terminal_reason: nil) + + assert {:error, :not_dead_lettered} = + Webhooks.replay_delivery( + config(), + source.delivery_id, + %{user: %{id: Ecto.UUID.generate()}}, + source: "admin.delivery_detail" + ) + end + end + + test "rejects a source delivery that already has a replay child" do + source = store_source_delivery() + store_replay_child(source) + + assert {:error, :replay_already_exists} = + Webhooks.replay_delivery( + config(), + source.delivery_id, + %{user: %{id: Ecto.UUID.generate()}}, + source: "admin.delivery_detail" + ) + end + + test "rejects truth-gap context failures and disabled runtime preconditions" do + dependency_gap = store_source_delivery(terminal_reason: "delivery_dependency_missing") + + assert {:error, :delivery_context_incomplete} = + Webhooks.replay_delivery( + config(), + dependency_gap.delivery_id, + %{user: %{id: Ecto.UUID.generate()}}, + source: "admin.delivery_detail" + ) + + disabled_subscription = + store_source_delivery( + subscription_enabled: false, + terminal_reason: "subscription_disabled" + ) + + assert {:error, :subscription_disabled} = + Webhooks.replay_delivery( + config(), + disabled_subscription.delivery_id, + %{user: %{id: Ecto.UUID.generate()}}, + source: "admin.delivery_detail" + ) + + Application.put_env(:sigra, :webhooks, Keyword.put(webhooks_config(), :enabled, false)) + + assert {:error, :webhooks_disabled} = + Webhooks.replay_delivery( + config(), + dependency_gap.delivery_id, + %{user: %{id: Ecto.UUID.generate()}}, + source: "admin.delivery_detail" + ) + end + end + + defp config do + Sigra.Config.new!( + repo: MockRepo, + user_schema: MockUser, + webhooks: Application.fetch_env!(:sigra, :webhooks) + ) + end + + defp webhooks_config do + [ + enabled: true, + webhook_subscription_schema: Subscription, + webhook_event_schema: Event, + webhook_delivery_schema: Delivery, + webhook_delivery_attempt_schema: DeliveryAttempt, + oban_queue: "sigra_webhooks" + ] + end + + defp store_source_delivery(opts \\ []) do + subscription = + %Subscription{ + id: Keyword.get(opts, :subscription_id, "sub_1"), + endpoint_url: "https://hooks.example.test/inbound", + enabled: Keyword.get(opts, :subscription_enabled, true), + signing_secret: "whsec_phase104" + } + + event = + %Event{ + id: Keyword.get(opts, :event_id, "evt_row_1"), + payload: %{ + "id" => "evt_1", + "type" => "user.created", + "data" => %{"object" => %{"id" => "user_1"}} + } + } + + source = + %Delivery{ + id: Keyword.get(opts, :id, "del_row_1"), + delivery_id: Keyword.get(opts, :delivery_id, "del_1"), + status: Keyword.get(opts, :status, "dead_lettered"), + attempt_count: Keyword.get(opts, :attempt_count, 1), + endpoint_url: subscription.endpoint_url, + last_attempted_at: DateTime.utc_now() |> DateTime.truncate(:second), + last_http_status: Keyword.get(opts, :last_http_status, 404), + last_error_category: Keyword.get(opts, :last_error_category, "http_client_error"), + last_error_detail: Keyword.get(opts, :last_error_detail, "receiver rejected request"), + dead_lettered_at: + Keyword.get(opts, :dead_lettered_at, DateTime.utc_now() |> DateTime.truncate(:second)), + terminal_reason: Keyword.get(opts, :terminal_reason, "http_4xx_permanent"), + webhook_subscription_id: subscription.id, + webhook_event_id: event.id + } + + Process.put({:subscription, subscription.id}, subscription) + Process.put({:event, event.id}, event) + Process.put(:deliveries, %{source.delivery_id => source}) + Process.put({:delivery, source.delivery_id}, source) + + Process.put( + {:attempts, source.delivery_id}, + [ + %DeliveryAttempt{ + id: "attempt_row_1", + delivery_id: source.delivery_id, + attempt_number: 1, + endpoint_url: source.endpoint_url, + started_at: DateTime.utc_now() |> DateTime.truncate(:second), + finished_at: DateTime.utc_now() |> DateTime.truncate(:second), + response_status: 404, + retryable: false, + error_category: "http_client_error", + error_detail: "receiver rejected request", + terminal_reason: source.terminal_reason, + webhook_delivery_id: source.id + } + ] + ) + + source + end + + defp store_replay_child(source) do + replay = + %Delivery{ + id: "del_row_2", + delivery_id: "del_2", + status: "pending", + attempt_count: 0, + endpoint_url: source.endpoint_url, + replayed_from_webhook_delivery_id: source.id, + replay_root_webhook_delivery_id: source.id, + replayed_at: DateTime.utc_now() |> DateTime.truncate(:second), + replayed_by_user_id: Ecto.UUID.generate(), + replay_source: "admin.delivery_detail", + webhook_subscription_id: source.webhook_subscription_id, + webhook_event_id: source.webhook_event_id + } + + Process.put(:deliveries, %{ + source.delivery_id => source, + replay.delivery_id => replay + }) + + Process.put({:delivery, replay.delivery_id}, replay) + Process.put({:attempts, replay.delivery_id}, []) + replay + end +end diff --git a/test/sigra/webhooks_signature_test.exs b/test/sigra/webhooks_signature_test.exs new file mode 100644 index 00000000..6b4f7788 --- /dev/null +++ b/test/sigra/webhooks_signature_test.exs @@ -0,0 +1,105 @@ +defmodule Sigra.WebhooksSignatureTest do + use ExUnit.Case, async: true + + alias Sigra.Webhooks.Signature + + @delivery_id "del_123" + @secret "whsec_test_secret" + @rotated_secret "whsec_rotated_secret" + @raw_body ~s({"id":"evt_123","type":"user.created"}) + @timestamp 1_778_070_400 + + test "builds the exact Phase 97 header contract and canonical string" do + headers = Signature.headers(@delivery_id, @raw_body, @secret, timestamp: @timestamp) + + assert Signature.header_names() == %{ + id: "Sigra-Webhook-Id", + timestamp: "Sigra-Webhook-Timestamp", + signature: "Sigra-Webhook-Signature" + } + + assert Signature.canonical_string(@delivery_id, @timestamp, @raw_body) == + "#{@delivery_id}.#{@timestamp}.#{@raw_body}" + + expected_digest = + :crypto.mac(:hmac, :sha256, @secret, "#{@delivery_id}.#{@timestamp}.#{@raw_body}") + |> Base.encode16(case: :lower) + + assert headers == %{ + "Sigra-Webhook-Id" => @delivery_id, + "Sigra-Webhook-Timestamp" => Integer.to_string(@timestamp), + "Sigra-Webhook-Signature" => "v1=#{expected_digest}" + } + end + + test "verifies signatures case-insensitively on headers and supports multiple v1 values" do + current = Signature.sign(@delivery_id, @timestamp, @raw_body, @secret) + rotated = Signature.sign(@delivery_id, @timestamp, @raw_body, @rotated_secret) + + headers = %{ + "sigra-webhook-id" => @delivery_id, + "sigra-webhook-timestamp" => Integer.to_string(@timestamp), + "sigra-webhook-signature" => Enum.join([current, rotated], ", ") + } + + assert {:ok, %{delivery_id: @delivery_id, timestamp: @timestamp}} = + Signature.verify(headers, @raw_body, [@secret], now: @timestamp, tolerance: 300) + + assert {:ok, %{delivery_id: @delivery_id, timestamp: @timestamp}} = + Signature.verify(headers, @raw_body, [@rotated_secret], + now: @timestamp, + tolerance: 300 + ) + end + + test "emits one timestamp with two v1 signatures during overlap" do + headers = + Signature.headers(@delivery_id, @raw_body, [@secret, @rotated_secret], timestamp: @timestamp) + + assert headers["Sigra-Webhook-Timestamp"] == Integer.to_string(@timestamp) + + assert [first, second] = + headers["Sigra-Webhook-Signature"] + |> String.split(",") + |> Enum.map(&String.trim/1) + + assert first == Signature.sign(@delivery_id, @timestamp, @raw_body, @secret) + assert second == Signature.sign(@delivery_id, @timestamp, @raw_body, @rotated_secret) + refute headers["Sigra-Webhook-Signature"] =~ "kid=" + end + + test "rejects stale timestamps and malformed timestamps explicitly" do + headers = Signature.headers(@delivery_id, @raw_body, @secret, timestamp: @timestamp) + + assert {:error, :stale_timestamp} = + Signature.verify(headers, @raw_body, @secret, now: @timestamp + 301, tolerance: 300) + + malformed = + Map.put(headers, "Sigra-Webhook-Timestamp", "not-a-timestamp") + + assert {:error, :invalid_timestamp} = + Signature.verify(malformed, @raw_body, @secret, now: @timestamp, tolerance: 300) + end + + test "rejects missing and malformed signature headers without leaking payload details" do + headers = Signature.headers(@delivery_id, @raw_body, @secret, timestamp: @timestamp) + + assert {:error, :missing_signature} = + headers + |> Map.delete("Sigra-Webhook-Signature") + |> then(&Signature.verify(&1, @raw_body, @secret, now: @timestamp)) + + malformed = + Map.put(headers, "Sigra-Webhook-Signature", "v1=short") + + assert {:error, :malformed_signature} = + Signature.verify(malformed, @raw_body, @secret, now: @timestamp) + end + + test "rejects digest mismatches after constant-time comparison path" do + headers = Signature.headers(@delivery_id, @raw_body, @secret, timestamp: @timestamp) + + assert {:error, :invalid_signature} = + Signature.verify(headers, @raw_body <> " ", @secret, now: @timestamp) + end +end diff --git a/test/sigra/webhooks_test.exs b/test/sigra/webhooks_test.exs new file mode 100644 index 00000000..0340942b --- /dev/null +++ b/test/sigra/webhooks_test.exs @@ -0,0 +1,505 @@ +defmodule Sigra.WebhooksTest do + use ExUnit.Case, async: true + + alias Ecto.Changeset + alias Sigra.Webhooks + + defmodule Subscription do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_subscriptions" do + field :endpoint_url, :string + field :event_types, {:array, :string}, default: [] + field :enabled, :boolean, default: true + field :description, :string + field :signing_secret, :binary + field :next_signing_secret, :binary + field :rotation_state, Ecto.Enum, + values: [:stable, :prepared, :overlap_active, :completed], + default: :stable + + field :rotation_prepared_at, :utc_datetime_usec + field :rotation_overlap_started_at, :utc_datetime_usec + field :rotation_retire_after_at, :utc_datetime_usec + field :rotation_completed_at, :utc_datetime_usec + field :rotation_last_changed_by_user_id, :binary_id + field :signing_secret_fingerprint, :string + field :next_signing_secret_fingerprint, :string + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :endpoint_url, + :event_types, + :enabled, + :description, + :signing_secret, + :next_signing_secret, + :rotation_state, + :rotation_prepared_at, + :rotation_overlap_started_at, + :rotation_retire_after_at, + :rotation_completed_at, + :rotation_last_changed_by_user_id, + :signing_secret_fingerprint, + :next_signing_secret_fingerprint + ]) + |> validate_required([:endpoint_url, :event_types, :enabled, :signing_secret]) + end + end + + defmodule Event do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_events" do + field :event_id, :string + end + end + + defmodule Delivery do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_deliveries" do + field :delivery_id, :string + end + end + + defmodule DeliveryAttempt do + use Ecto.Schema + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_delivery_attempts" do + field :delivery_id, :string + end + end + + defmodule MockRepo do + def insert(changeset) do + if changeset.valid? do + record = Ecto.Changeset.apply_changes(changeset) + put_subscription(record) + {:ok, record} + else + {:error, changeset} + end + end + + def update(changeset) do + if changeset.valid? do + record = Ecto.Changeset.apply_changes(changeset) + put_subscription(record) + {:ok, record} + else + {:error, changeset} + end + end + + def transaction(%Ecto.Multi{} = multi), do: Sigra.Test.MultiStub.run(__MODULE__, multi) + def all(_schema), do: Process.get(:webhook_subscriptions, []) + def get_by(_schema, id: subscription_id), do: get_subscription(subscription_id) + + defp get_subscription(subscription_id) do + Process.get(:webhook_subscription_records, %{}) + |> Map.get(subscription_id) + end + + defp put_subscription(%{id: subscription_id} = subscription) when is_binary(subscription_id) do + records = Process.get(:webhook_subscription_records, %{}) + Process.put(:webhook_subscription_records, Map.put(records, subscription_id, subscription)) + end + + defp put_subscription(_subscription), do: :ok + end + + defp config(overrides \\ []) do + defaults = [ + repo: MockRepo, + user_schema: Sigra.TestUser, + secret_key_base: String.duplicate("a", 64), + webhooks: [ + enabled: false, + webhook_subscription_schema: Subscription, + webhook_event_schema: Event, + webhook_delivery_schema: Delivery, + webhook_delivery_attempt_schema: DeliveryAttempt, + endpoint_resolver: &public_test_resolver/1, + oban_queue: "sigra_webhooks", + oban_concurrency: 10, + signature_tolerance: 300 + ] + ] + + merged = + defaults + |> Keyword.merge(Keyword.drop(overrides, [:webhooks])) + |> Keyword.update!( + :webhooks, + &Keyword.merge(&1, Keyword.get(overrides, :webhooks, [])) + ) + + Sigra.Config.new!(merged) + end + + describe "config helpers" do + test "exposes the public event catalog and webhook config helpers" do + config = config() + + assert "user.created" in Webhooks.public_event_types() + assert Webhooks.enabled?(config) == false + assert Webhooks.queue_name(config) == "sigra_webhooks" + assert Webhooks.signature_tolerance(config) == 300 + assert Webhooks.subscription_schema!(config) == Subscription + assert Webhooks.event_schema!(config) == Event + assert Webhooks.delivery_schema!(config) == Delivery + assert Webhooks.delivery_attempt_schema!(config) == DeliveryAttempt + end + end + + describe "subscription_changeset/3" do + test "normalizes event types, preserves localhost http, and defaults enabled" do + changeset = + Webhooks.subscription_changeset(config(), %Subscription{}, %{ + endpoint_url: "http://localhost:4000/webhooks", + event_types: [" user.created ", "user.created", "session.created"], + signing_secret: String.duplicate("s", 16) + }) + + assert changeset.valid? + assert Changeset.get_field(changeset, :enabled) == true + assert Changeset.get_field(changeset, :event_types) == ["user.created", "session.created"] + end + + test "rejects unsupported event types" do + changeset = + Webhooks.subscription_changeset(config(), %Subscription{}, %{ + endpoint_url: "https://example.com/hooks", + event_types: ["user.created", "user.hacked"], + signing_secret: String.duplicate("s", 16) + }) + + refute changeset.valid? + assert errors_on(changeset).event_types == ["contains unsupported event types: user.hacked"] + end + + test "rejects insecure non-localhost http endpoints" do + changeset = + Webhooks.subscription_changeset(config(), %Subscription{}, %{ + endpoint_url: "http://example.com/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("s", 16) + }) + + refute changeset.valid? + assert errors_on(changeset).endpoint_url == ["must use HTTPS unless the host is localhost"] + end + + test "rejects embedded credentials and blocked resolved targets" do + resolver = fn + "private.example.test" -> {:ok, [{10, 0, 0, 8}]} + "metadata.example.test" -> {:ok, [{169, 254, 169, 254}]} + "mixed.example.test" -> {:ok, [{203, 0, 113, 10}, {10, 0, 0, 2}]} + end + + private_changeset = + Webhooks.subscription_changeset(config(webhooks: [endpoint_resolver: resolver]), %Subscription{}, %{ + endpoint_url: "https://private.example.test/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("s", 16) + }) + + metadata_changeset = + Webhooks.subscription_changeset(config(webhooks: [endpoint_resolver: resolver]), %Subscription{}, %{ + endpoint_url: "https://metadata.example.test/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("s", 16) + }) + + credentials_changeset = + Webhooks.subscription_changeset(config(), %Subscription{}, %{ + endpoint_url: "https://user:pass@example.com/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("s", 16) + }) + + mixed_changeset = + Webhooks.subscription_changeset(config(webhooks: [endpoint_resolver: resolver]), %Subscription{}, %{ + endpoint_url: "https://mixed.example.test/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("s", 16) + }) + + refute private_changeset.valid? + refute metadata_changeset.valid? + refute credentials_changeset.valid? + refute mixed_changeset.valid? + assert errors_on(private_changeset).endpoint_url == ["resolved target points at a private address"] + assert errors_on(metadata_changeset).endpoint_url == ["resolved target points at a metadata address"] + assert errors_on(credentials_changeset).endpoint_url == ["must not include embedded credentials"] + assert errors_on(mixed_changeset).endpoint_url == ["resolved target points at a private address"] + end + + test "accepts loopback http and surfaces host callback denials" do + policy = fn + %{uri: %URI{host: "callback.example.test"}} -> + {:error, :policy_denied, "custom outbound allowlist denied the destination"} + + _context -> + :ok + end + + localhost_changeset = + Webhooks.subscription_changeset(config(), %Subscription{}, %{ + endpoint_url: "http://127.0.0.1:4000/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("s", 16) + }) + + denied_changeset = + Webhooks.subscription_changeset(config(webhooks: [endpoint_policy: policy]), %Subscription{}, %{ + endpoint_url: "https://callback.example.test/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("s", 16) + }) + + assert localhost_changeset.valid? + refute denied_changeset.valid? + assert errors_on(denied_changeset).endpoint_url == ["custom outbound allowlist denied the destination"] + end + + test "rejects short signing secrets" do + changeset = + Webhooks.subscription_changeset(config(), %Subscription{}, %{ + endpoint_url: "https://example.com/hooks", + event_types: ["user.created"], + signing_secret: "short-secret" + }) + + refute changeset.valid? + assert errors_on(changeset).signing_secret == ["must be at least 16 bytes"] + end + + test "adds a base error when dependent schema modules are missing" do + bad_config = + Sigra.Config.new!( + repo: MockRepo, + user_schema: Sigra.TestUser, + secret_key_base: String.duplicate("a", 64), + webhooks: [enabled: false, webhook_subscription_schema: Subscription] + ) + + changeset = + Webhooks.subscription_changeset(bad_config, %Subscription{}, %{ + endpoint_url: "https://example.com/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("s", 16) + }) + + refute changeset.valid? + + assert errors_on(changeset).base == [ + "config.webhooks must declare webhook_subscription_schema, webhook_event_schema, webhook_delivery_schema, and webhook_delivery_attempt_schema" + ] + end + end + + describe "subscription CRUD" do + test "create_subscription/2 persists valid subscriptions" do + assert {:ok, subscription} = + Webhooks.create_subscription(config(), %{ + endpoint_url: "https://example.com/hooks", + event_types: ["user.created"], + signing_secret: String.duplicate("s", 16) + }) + + assert subscription.endpoint_url == "https://example.com/hooks" + assert subscription.enabled == true + end + + test "update_subscription/3 can disable a subscription" do + subscription = %Subscription{ + endpoint_url: "https://example.com/hooks", + event_types: ["user.created"], + enabled: true, + signing_secret: String.duplicate("s", 16) + } + + assert {:ok, updated} = Webhooks.disable_subscription(config(), subscription) + assert updated.enabled == false + end + + test "list_subscriptions/1 delegates to the configured repo" do + subscriptions = [%Subscription{endpoint_url: "https://example.com/hooks"}] + Process.put(:webhook_subscriptions, subscriptions) + + assert Webhooks.list_subscriptions(config()) == subscriptions + after + Process.delete(:webhook_subscriptions) + end + end + + describe "rotation lifecycle" do + test "prepare_secret/3 stages one next secret and records prepared metadata" do + subscription = put_subscription(subscription_fixture()) + scope = %{user: %{id: Ecto.UUID.generate()}} + + assert {:ok, prepared} = Webhooks.prepare_secret(config(), subscription, scope: scope) + + assert prepared.signing_secret == subscription.signing_secret + assert is_binary(prepared.next_signing_secret) + assert prepared.next_signing_secret != subscription.signing_secret + assert prepared.rotation_state == :prepared + assert %DateTime{} = prepared.rotation_prepared_at + assert prepared.rotation_last_changed_by_user_id == scope.user.id + assert is_binary(prepared.signing_secret_fingerprint) + assert is_binary(prepared.next_signing_secret_fingerprint) + assert prepared.signing_secret_fingerprint != prepared.next_signing_secret_fingerprint + end + + test "discard_prepared_secret/3 clears the next slot without replacing the active secret" do + subscription = + subscription_fixture(%{ + next_signing_secret: String.duplicate("n", 32), + rotation_state: :prepared, + rotation_prepared_at: DateTime.utc_now() |> DateTime.truncate(:second), + next_signing_secret_fingerprint: "nextfingerprint" + }) + |> put_subscription() + + scope = %{user: %{id: Ecto.UUID.generate()}} + + assert {:ok, discarded} = + Webhooks.discard_prepared_secret(config(), subscription, scope: scope) + + assert discarded.signing_secret == subscription.signing_secret + assert discarded.next_signing_secret == nil + assert discarded.rotation_state == :stable + assert discarded.rotation_prepared_at == nil + assert discarded.rotation_overlap_started_at == nil + assert discarded.rotation_retire_after_at == nil + assert discarded.rotation_completed_at == nil + assert discarded.next_signing_secret_fingerprint == nil + assert discarded.rotation_last_changed_by_user_id == scope.user.id + end + + test "start_secret_overlap/3 only allows the prepared state and records overlap metadata" do + prepared = + subscription_fixture(%{ + next_signing_secret: String.duplicate("n", 32), + rotation_state: :prepared, + rotation_prepared_at: DateTime.utc_now() |> DateTime.truncate(:second), + next_signing_secret_fingerprint: "nextfingerprint" + }) + |> put_subscription() + + retire_after_at = DateTime.utc_now() |> DateTime.add(3600, :second) |> DateTime.truncate(:second) + + assert {:ok, overlap} = + Webhooks.start_secret_overlap(config(), prepared, + retire_after_at: retire_after_at, + scope: %{user: %{id: Ecto.UUID.generate()}} + ) + + assert overlap.rotation_state == :overlap_active + assert %DateTime{} = overlap.rotation_overlap_started_at + assert DateTime.compare(overlap.rotation_retire_after_at, retire_after_at) == :eq + + assert {:error, changeset} = + Webhooks.start_secret_overlap(config(), subscription_fixture()) + + assert errors_on(changeset).rotation_state == ["can only start overlap from prepared"] + end + + test "complete_secret_rotation/3 promotes the next secret and records completion state" do + prepared_next = String.duplicate("n", 32) + + overlap = + subscription_fixture(%{ + next_signing_secret: prepared_next, + rotation_state: :overlap_active, + rotation_prepared_at: DateTime.utc_now() |> DateTime.truncate(:second), + rotation_overlap_started_at: DateTime.utc_now() |> DateTime.truncate(:second), + next_signing_secret_fingerprint: "nextfingerprint" + }) + |> put_subscription() + + assert {:ok, completed} = + Webhooks.complete_secret_rotation(config(), overlap, + scope: %{user: %{id: Ecto.UUID.generate()}} + ) + + assert completed.signing_secret == prepared_next + assert completed.next_signing_secret == nil + assert completed.rotation_state == :completed + assert %DateTime{} = completed.rotation_completed_at + assert completed.rotation_prepared_at == nil + assert completed.rotation_overlap_started_at == nil + assert completed.rotation_retire_after_at == nil + assert completed.next_signing_secret_fingerprint == nil + assert is_binary(completed.signing_secret_fingerprint) + end + + test "illegal lifecycle jumps are rejected" do + stable = put_subscription(subscription_fixture()) + + assert {:error, start_changeset} = Webhooks.start_secret_overlap(config(), stable) + assert errors_on(start_changeset).rotation_state == ["can only start overlap from prepared"] + + assert {:error, complete_changeset} = Webhooks.complete_secret_rotation(config(), stable) + assert errors_on(complete_changeset).rotation_state == ["can only complete rotation from overlap_active"] + + prepared = + subscription_fixture(%{ + next_signing_secret: String.duplicate("n", 32), + rotation_state: :prepared, + rotation_prepared_at: DateTime.utc_now() |> DateTime.truncate(:second), + next_signing_secret_fingerprint: "nextfingerprint" + }) + |> put_subscription() + + assert {:error, prepare_changeset} = Webhooks.prepare_secret(config(), prepared) + assert errors_on(prepare_changeset).rotation_state == ["can only prepare a next secret from stable or completed"] + end + end + + defp errors_on(%Changeset{} = changeset) do + Changeset.traverse_errors(changeset, fn {message, opts} -> + Regex.replace(~r"%{(\w+)}", message, fn _, key -> + opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string() + end) + end) + end + + defp subscription_fixture(overrides \\ %{}) do + base = %Subscription{ + id: Ecto.UUID.generate(), + endpoint_url: "https://example.com/hooks", + event_types: ["user.created"], + enabled: true, + signing_secret: String.duplicate("s", 32), + rotation_state: :stable, + signing_secret_fingerprint: "activefingerprint" + } + + struct(base, overrides) + end + + defp put_subscription(%Subscription{} = subscription) do + records = Process.get(:webhook_subscription_records, %{}) + Process.put(:webhook_subscription_records, Map.put(records, subscription.id, subscription)) + subscription + end + + defp public_test_resolver(host) do + case host do + "example.com" -> {:ok, [{93, 184, 216, 34}]} + "hooks.example.test" -> {:ok, [{203, 0, 113, 20}]} + "callback.example.test" -> {:ok, [{203, 0, 113, 21}]} + _other -> {:error, :nxdomain} + end + end +end diff --git a/test/sigra/workers/optional_deps_test.exs b/test/sigra/workers/optional_deps_test.exs new file mode 100644 index 00000000..6459cb47 --- /dev/null +++ b/test/sigra/workers/optional_deps_test.exs @@ -0,0 +1,40 @@ +defmodule Sigra.Workers.OptionalDepsTest do + use ExUnit.Case, async: true + + alias Sigra.OptionalDeps.MissingDependencyError + alias Sigra.Workers.AccountDeletion + alias Sigra.Workers.AuditCleanup + alias Sigra.Workers.CleanupExpiredInvitations + alias Sigra.Workers.TokenCleanup + + @workers [ + {AccountDeletion, %{"user_id" => 1, "repo" => "Elixir.Sigra.Repo", "user_schema" => "Elixir.Sigra.User"}}, + {AuditCleanup, %{"repo" => "Elixir.Sigra.Repo", "audit_schema" => "Elixir.Sigra.AuditEvent"}}, + {TokenCleanup, %{"repo" => "Elixir.Sigra.Repo", "token_schema" => "Elixir.Sigra.UserToken"}}, + {CleanupExpiredInvitations, + %{ + "organization_id" => nil, + "actor_id" => nil, + "repo" => "Elixir.Sigra.Repo", + "invitation_schema" => "Elixir.Sigra.OrganizationInvitation", + "scope_module" => "Elixir.Sigra.Scope", + "retention_days" => 30 + }} + ] + + describe "queue-backed workers stay loadable without compile-time disappearance" do + test "worker modules remain defined" do + Enum.each(@workers, fn {worker, _args} -> + assert Code.ensure_loaded?(worker) + end) + end + + test "first queue-backed interaction raises the tagged missing async dependency error" do + Enum.each(@workers, fn {worker, args} -> + assert_raise MissingDependencyError, ~r/optional dependency missing for lifecycle_jobs/, fn -> + worker.new(args, dependency_loaded?: fn _spec -> false end) + end + end) + end + end +end diff --git a/test/sigra/workers/token_cleanup_test.exs b/test/sigra/workers/token_cleanup_test.exs index 85cf89ea..28e2dc98 100644 --- a/test/sigra/workers/token_cleanup_test.exs +++ b/test/sigra/workers/token_cleanup_test.exs @@ -58,9 +58,9 @@ defmodule Sigra.Workers.TokenCleanupTest do assert changeset.changes[:max_attempts] == 1 end - test "uses sigra_mailer queue" do + test "uses sigra_lifecycle queue" do changeset = TokenCleanup.new(%{}) - assert changeset.changes[:queue] == "sigra_mailer" + assert changeset.changes[:queue] == "sigra_lifecycle" end end end diff --git a/test/sigra/workers/webhook_delivery_test.exs b/test/sigra/workers/webhook_delivery_test.exs new file mode 100644 index 00000000..ade416fb --- /dev/null +++ b/test/sigra/workers/webhook_delivery_test.exs @@ -0,0 +1,640 @@ +defmodule Sigra.Workers.WebhookDeliveryTest do + use ExUnit.Case, async: false + + alias Ecto.{Changeset, Multi} + alias Sigra.OptionalDeps.MissingDependencyError + alias Sigra.Webhooks + alias Sigra.Webhooks.Signature + alias Sigra.Workers.WebhookDelivery + + defmodule MockUser do + defstruct [:id] + end + + defmodule Subscription do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_subscriptions" do + field :endpoint_url, :string + field :enabled, :boolean, default: true + field :signing_secret, :string + field :next_signing_secret, :string + field :rotation_state, Ecto.Enum, + values: [:stable, :prepared, :overlap_active, :completed], + default: :stable + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [:endpoint_url, :enabled, :signing_secret, :next_signing_secret, :rotation_state]) + |> validate_required([:endpoint_url, :enabled, :signing_secret]) + end + end + + defmodule Event do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_events" do + field :payload, :map, default: %{} + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [:payload]) + |> validate_required([:payload]) + end + end + + defmodule Delivery do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_deliveries" do + field :delivery_id, :string + field :status, :string, default: "pending" + field :attempt_count, :integer, default: 0 + field :endpoint_url, :string + field :dispatched_at, :utc_datetime + field :last_attempted_at, :utc_datetime + field :next_attempt_at, :utc_datetime + field :last_http_status, :integer + field :last_error_category, :string + field :last_error_detail, :string + field :dead_lettered_at, :utc_datetime + field :terminal_reason, :string + field :replayed_from_webhook_delivery_id, :binary_id + field :replay_root_webhook_delivery_id, :binary_id + field :replayed_at, :utc_datetime + field :replayed_by_user_id, :binary_id + field :replay_source, :string + field :webhook_subscription_id, :binary_id + field :webhook_event_id, :binary_id + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :delivery_id, + :status, + :attempt_count, + :endpoint_url, + :dispatched_at, + :last_attempted_at, + :next_attempt_at, + :last_http_status, + :last_error_category, + :last_error_detail, + :dead_lettered_at, + :terminal_reason, + :replayed_from_webhook_delivery_id, + :replay_root_webhook_delivery_id, + :replayed_at, + :replayed_by_user_id, + :replay_source, + :webhook_subscription_id, + :webhook_event_id + ]) + |> validate_required([ + :delivery_id, + :status, + :attempt_count, + :endpoint_url, + :webhook_subscription_id, + :webhook_event_id + ]) + end + end + + defmodule DeliveryAttempt do + use Ecto.Schema + import Ecto.Changeset + + @primary_key {:id, :binary_id, autogenerate: false} + schema "webhook_delivery_attempts" do + field :delivery_id, :string + field :attempt_number, :integer + field :endpoint_url, :string + field :started_at, :utc_datetime + field :finished_at, :utc_datetime + field :response_status, :integer + field :retryable, :boolean, default: false + field :retry_after_seconds, :integer + field :error_category, :string + field :error_detail, :string + field :terminal_reason, :string + field :webhook_delivery_id, :binary_id + end + + def changeset(struct, attrs) do + struct + |> cast(attrs, [ + :delivery_id, + :attempt_number, + :endpoint_url, + :started_at, + :finished_at, + :response_status, + :retryable, + :retry_after_seconds, + :error_category, + :error_detail, + :terminal_reason, + :webhook_delivery_id + ]) + |> validate_required([ + :delivery_id, + :attempt_number, + :endpoint_url, + :started_at, + :retryable + ]) + end + end + + defmodule MockRepo do + def get_by(Delivery, delivery_id: delivery_id), do: Process.get({:delivery, delivery_id}) + def get(Subscription, id), do: Process.get({:subscription, id}) + def get(Event, id), do: Process.get({:event, id}) + + def insert(%Changeset{} = changeset) do + struct = Changeset.apply_changes(changeset) + struct = Map.put_new(struct, :id, Ecto.UUID.generate()) + + case struct do + %DeliveryAttempt{} = attempt -> + attempts = Process.get({:attempts, attempt.delivery_id}, []) + Process.put({:attempts, attempt.delivery_id}, attempts ++ [attempt]) + {:ok, attempt} + + other -> + {:ok, other} + end + end + + def update(%Changeset{} = changeset) do + delivery = Changeset.apply_changes(changeset) + Process.put({:delivery, delivery.delivery_id}, delivery) + {:ok, delivery} + end + + def transaction(%Multi{} = multi) do + Enum.reduce_while(Multi.to_list(multi), {:ok, %{}}, fn + {name, {:insert, changeset, _opts}}, {:ok, acc} -> + case insert(changeset) do + {:ok, value} -> {:cont, {:ok, Map.put(acc, name, value)}} + {:error, reason} -> {:halt, {:error, name, reason, acc}} + end + + {name, {:update, changeset, _opts}}, {:ok, acc} -> + case update(changeset) do + {:ok, value} -> {:cont, {:ok, Map.put(acc, name, value)}} + {:error, reason} -> {:halt, {:error, name, reason, acc}} + end + end) + end + end + + defmodule MockOban do + def insert(%Changeset{} = changeset) do + job = %{ + args: Changeset.get_change(changeset, :args), + queue: Changeset.get_change(changeset, :queue) + } + + jobs = Process.get(:queued_jobs, []) + Process.put(:queued_jobs, jobs ++ [job]) + {:ok, job} + end + end + + setup do + Application.put_env(:sigra, :repo, MockRepo) + Application.put_env(:sigra, :user_schema, MockUser) + Application.put_env(:sigra, :webhooks, webhooks_config()) + Application.put_env(:sigra, :webhook_delivery_oban, MockOban) + Process.put(:queued_jobs, []) + + on_exit(fn -> + Application.delete_env(:sigra, :repo) + Application.delete_env(:sigra, :user_schema) + Application.delete_env(:sigra, :webhooks) + Application.delete_env(:sigra, :webhook_delivery_requester) + Application.delete_env(:sigra, :webhook_delivery_oban) + + for key <- [ + {:delivery, "del_1"}, + {:delivery, "del_missing"}, + {:subscription, "sub_1"}, + {:event, "evt_row_1"}, + {:attempts, "del_1"}, + {:attempts, "del_missing"} + ] do + Process.delete(key) + end + + Process.delete(:queued_jobs) + end) + + :ok + end + + describe "enqueue helpers" do + test "new/2 hard-fails when webhook delivery is enabled but Oban is unavailable" do + assert_raise MissingDependencyError, fn -> + WebhookDelivery.new(%{"delivery_id" => "del_1"}, + webhooks: [enabled: true], + dependency_loaded?: fn _spec -> false end + ) + end + end + + test "enqueue_delivery/3 stores only the delivery_id and uses the configured webhook queue" do + delivery = %Delivery{delivery_id: "del_1"} + + assert {:ok, %{args: %{"delivery_id" => "del_1"}, queue: "sigra_webhooks"}} = + Webhooks.enqueue_delivery(config(), delivery, oban: MockOban) + end + end + + describe "perform/1" do + test "records a delivered attempt after a 2xx response" do + secret = "whsec_phase98" + store_fixture_rows(secret: secret) + + Application.put_env(:sigra, :webhook_delivery_requester, fn request -> + send(self(), {:webhook_request, request}) + {:ok, %{status: 202}} + end) + + assert {:ok, :delivered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => "del_1"}}) + + assert_receive {:webhook_request, request} + + header_map = Map.new(request.headers) + timestamp = String.to_integer(header_map["Sigra-Webhook-Timestamp"]) + + assert {:ok, %{delivery_id: "del_1", timestamp: ^timestamp}} = + Signature.verify(header_map, request.body, secret, now: timestamp, tolerance: 0) + + assert %Delivery{status: "delivered", attempt_count: 1, last_http_status: 202} = + Process.get({:delivery, "del_1"}) + + assert [attempt] = Process.get({:attempts, "del_1"}) + assert attempt.attempt_number == 1 + assert attempt.response_status == 202 + assert attempt.retryable == false + end + + test "signs one overlap-window request with both the current and next secret" do + current_secret = "whsec_current_secret" + next_secret = "whsec_next_secret" + + store_fixture_rows( + secret: current_secret, + next_secret: next_secret, + rotation_state: :overlap_active + ) + + Application.put_env(:sigra, :webhook_delivery_requester, fn request -> + send(self(), {:webhook_request, request}) + {:ok, %{status: 202}} + end) + + assert {:ok, :delivered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => "del_1"}}) + + assert_receive {:webhook_request, request} + + header_map = Map.new(request.headers) + timestamp = String.to_integer(header_map["Sigra-Webhook-Timestamp"]) + + assert {:ok, %{delivery_id: "del_1", timestamp: ^timestamp}} = + Signature.verify(header_map, request.body, current_secret, now: timestamp, tolerance: 0) + + assert {:ok, %{delivery_id: "del_1", timestamp: ^timestamp}} = + Signature.verify(header_map, request.body, next_secret, now: timestamp, tolerance: 0) + + signatures = + header_map["Sigra-Webhook-Signature"] + |> String.split(",") + |> Enum.map(&String.trim/1) + + assert length(signatures) == 2 + assert Enum.all?(signatures, &String.starts_with?(&1, "v1=")) + end + + test "schedules exactly one follow-up attempt for retryable 429 responses and honors Retry-After" do + store_fixture_rows() + + Application.put_env(:sigra, :webhook_delivery_requester, fn _request -> + {:ok, %{status: 429, headers: [{"Retry-After", "120"}]}} + end) + + assert {:ok, :retry_scheduled} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => "del_1"}}) + + assert %Delivery{ + status: "retry_scheduled", + attempt_count: 1, + last_http_status: 429, + last_error_category: "http_backpressure", + next_attempt_at: %DateTime{}, + terminal_reason: nil + } = Process.get({:delivery, "del_1"}) + + assert [attempt] = Process.get({:attempts, "del_1"}) + assert attempt.retryable == true + assert attempt.retry_after_seconds == 120 + assert attempt.terminal_reason == nil + assert [%{args: %{"delivery_id" => "del_1"}}] = Process.get(:queued_jobs) + end + + test "dead-letters permanent 4xx responses without scheduling another job" do + store_fixture_rows() + + Application.put_env(:sigra, :webhook_delivery_requester, fn _request -> + {:ok, %{status: 404}} + end) + + assert {:ok, :dead_lettered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => "del_1"}}) + + assert %Delivery{ + status: "dead_lettered", + attempt_count: 1, + dead_lettered_at: %DateTime{}, + terminal_reason: "http_4xx_permanent" + } = Process.get({:delivery, "del_1"}) + + assert [attempt] = Process.get({:attempts, "del_1"}) + assert attempt.retryable == false + assert attempt.response_status == 404 + assert attempt.terminal_reason == "http_4xx_permanent" + assert [] = Process.get(:queued_jobs) + end + + test "dead-letters a retryable failure when the retry budget is exhausted" do + store_fixture_rows(attempt_count: 5, next_attempt_at: DateTime.utc_now() |> DateTime.truncate(:second)) + + Application.put_env(:sigra, :webhook_delivery_requester, fn _request -> + {:ok, %{status: 503}} + end) + + assert {:ok, :dead_lettered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => "del_1"}}) + + assert %Delivery{ + status: "dead_lettered", + attempt_count: 6, + dead_lettered_at: %DateTime{}, + terminal_reason: "retry_budget_exhausted", + next_attempt_at: nil + } = Process.get({:delivery, "del_1"}) + + assert attempts = Process.get({:attempts, "del_1"}) + assert List.last(attempts).terminal_reason == "retry_budget_exhausted" + assert List.last(attempts).retryable == false + assert [] = Process.get(:queued_jobs) + end + + test "persists a terminal local failure when the subscription was disabled before execution" do + store_fixture_rows(enabled: false) + + assert {:ok, :dead_lettered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => "del_1"}}) + + assert %Delivery{status: "dead_lettered", terminal_reason: "subscription_disabled"} = + Process.get({:delivery, "del_1"}) + + assert [attempt] = Process.get({:attempts, "del_1"}) + assert attempt.retryable == false + assert attempt.terminal_reason == "subscription_disabled" + end + + test "blocks denied destinations before requester execution" do + resolver = fn + "private.example.test" -> {:ok, [{10, 0, 0, 8}]} + "metadata.example.test" -> {:ok, [{169, 254, 169, 254}]} + "mixed.example.test" -> {:ok, [{203, 0, 113, 8}, {10, 0, 0, 2}]} + "ipv6-link-local.example.test" -> {:ok, [{0xFE80, 0, 0, 0, 0, 0, 0, 1}]} + end + + Application.put_env(:sigra, :webhooks, webhooks_config(endpoint_resolver: resolver)) + Application.put_env(:sigra, :webhook_delivery_requester, fn request -> + send(self(), {:webhook_request, request}) + {:ok, %{status: 202}} + end) + + for {delivery_id, endpoint_url, terminal_reason} <- [ + {"del_private", "https://private.example.test/hooks", "blocked_private_ip"}, + {"del_metadata", "https://metadata.example.test/hooks", "blocked_metadata_ip"}, + {"del_mixed", "https://mixed.example.test/hooks", "blocked_private_ip"}, + {"del_ipv6", "https://ipv6-link-local.example.test/hooks", "blocked_link_local_ip"} + ] do + store_fixture_rows(delivery_id: delivery_id, endpoint_url: endpoint_url) + + assert {:ok, :dead_lettered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => delivery_id}}) + + assert %Delivery{ + status: "dead_lettered", + last_error_category: "local_policy_error", + terminal_reason: ^terminal_reason + } = Process.get({:delivery, delivery_id}) + + assert [attempt] = Process.get({:attempts, delivery_id}) + assert attempt.retryable == false + assert attempt.error_category == "local_policy_error" + assert attempt.terminal_reason == terminal_reason + refute_received {:webhook_request, _request} + end + end + + test "persists callback denials as local policy errors and still allows public https delivery" do + policy = fn + %{uri: %URI{host: "callback.example.test"}} -> + {:error, :policy_denied, "blocked by deployment callback"} + + _context -> + :ok + end + + Application.put_env(:sigra, :webhooks, webhooks_config(endpoint_policy: policy)) + Application.put_env(:sigra, :webhook_delivery_requester, fn request -> + send(self(), {:webhook_request, request}) + {:ok, %{status: 202}} + end) + + store_fixture_rows(delivery_id: "del_policy", endpoint_url: "https://callback.example.test/hooks") + + assert {:ok, :dead_lettered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => "del_policy"}}) + + assert %Delivery{ + status: "dead_lettered", + last_error_category: "local_policy_error", + terminal_reason: "policy_denied", + last_error_detail: "blocked by deployment callback" + } = Process.get({:delivery, "del_policy"}) + + refute_received {:webhook_request, _request} + + store_fixture_rows(delivery_id: "del_public", endpoint_url: "https://hooks.example.test/inbound") + + assert {:ok, :delivered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => "del_public"}}) + + assert_receive {:webhook_request, request} + assert request.url == "https://hooks.example.test/inbound" + end + + test "persists an orphan terminal issue when the parent delivery row is missing" do + assert {:ok, :dead_lettered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => "del_missing"}}) + + assert [attempt] = Process.get({:attempts, "del_missing"}) + assert attempt.delivery_id == "del_missing" + assert attempt.retryable == false + assert attempt.terminal_reason == "delivery_dependency_missing" + assert attempt.webhook_delivery_id == nil + end + + test "processes a replay child as a fresh first attempt without mutating the source ledger" do + secret = "whsec_phase104" + store_fixture_rows(secret: secret) + + source = Process.get({:delivery, "del_1"}) + + replay_child = + %Delivery{ + id: "del_row_2", + delivery_id: "del_replay_1", + status: "pending", + attempt_count: 0, + endpoint_url: source.endpoint_url, + replayed_from_webhook_delivery_id: source.id, + replay_root_webhook_delivery_id: source.id, + replayed_at: DateTime.utc_now() |> DateTime.truncate(:second), + replayed_by_user_id: Ecto.UUID.generate(), + replay_source: "admin.delivery_detail", + webhook_subscription_id: source.webhook_subscription_id, + webhook_event_id: source.webhook_event_id + } + + Process.put({:delivery, replay_child.delivery_id}, replay_child) + Process.put({:attempts, replay_child.delivery_id}, []) + Process.put({:attempts, source.delivery_id}, [ + %DeliveryAttempt{ + id: "attempt_row_1", + delivery_id: source.delivery_id, + attempt_number: 1, + endpoint_url: source.endpoint_url, + started_at: DateTime.utc_now() |> DateTime.truncate(:second), + finished_at: DateTime.utc_now() |> DateTime.truncate(:second), + response_status: 404, + retryable: false, + error_category: "http_client_error", + error_detail: "receiver rejected request", + terminal_reason: "http_4xx_permanent", + webhook_delivery_id: source.id + } + ]) + + Application.put_env(:sigra, :webhook_delivery_requester, fn _request -> + {:ok, %{status: 202}} + end) + + assert {:ok, :delivered} = + WebhookDelivery.perform(%Oban.Job{args: %{"delivery_id" => replay_child.delivery_id}}) + + assert %Delivery{attempt_count: 0} = Process.get({:delivery, "del_1"}) + assert [%DeliveryAttempt{attempt_number: 1}] = Process.get({:attempts, "del_1"}) + assert [%DeliveryAttempt{attempt_number: 1, delivery_id: "del_replay_1"}] = + Process.get({:attempts, "del_replay_1"}) + end + end + + describe "module configuration" do + test "uses the dedicated webhook queue and stays single-shot" do + source = File.read!("lib/sigra/workers/webhook_delivery.ex") + assert source =~ "queue: :sigra_webhooks" + assert source =~ "max_attempts: 1" + end + end + + defp config do + Sigra.Config.new!( + repo: MockRepo, + user_schema: MockUser, + webhooks: webhooks_config() + ) + end + + defp webhooks_config(overrides \\ []) do + Keyword.merge( + [ + enabled: true, + webhook_subscription_schema: Subscription, + webhook_event_schema: Event, + webhook_delivery_schema: Delivery, + webhook_delivery_attempt_schema: DeliveryAttempt, + endpoint_resolver: &public_test_resolver/1, + oban_queue: "sigra_webhooks" + ], + overrides + ) + end + + defp store_fixture_rows(opts \\ []) do + delivery = + %Delivery{ + id: Keyword.get(opts, :delivery_row_id, "del_row_1"), + delivery_id: Keyword.get(opts, :delivery_id, "del_1"), + status: "pending", + attempt_count: Keyword.get(opts, :attempt_count, 0), + endpoint_url: Keyword.get(opts, :endpoint_url, "https://hooks.example.test/inbound"), + next_attempt_at: Keyword.get(opts, :next_attempt_at), + webhook_subscription_id: "sub_1", + webhook_event_id: "evt_row_1" + } + + subscription = + %Subscription{ + id: "sub_1", + endpoint_url: Keyword.get(opts, :endpoint_url, "https://hooks.example.test/inbound"), + enabled: Keyword.get(opts, :enabled, true), + signing_secret: Keyword.get(opts, :secret, "whsec_phase98"), + next_signing_secret: Keyword.get(opts, :next_secret), + rotation_state: Keyword.get(opts, :rotation_state, :stable) + } + + event = + %Event{ + id: "evt_row_1", + payload: %{ + "id" => "evt_1", + "type" => "user.created", + "data" => %{"object" => %{"id" => "user_1", "email" => "user@example.com"}} + } + } + + Process.put({:delivery, delivery.delivery_id}, delivery) + Process.put({:subscription, subscription.id}, subscription) + Process.put({:event, event.id}, event) + Process.put({:attempts, delivery.delivery_id}, []) + end + + defp public_test_resolver(host) do + case host do + "hooks.example.test" -> {:ok, [{203, 0, 113, 20}]} + "callback.example.test" -> {:ok, [{203, 0, 113, 21}]} + _other -> {:error, :nxdomain} + end + end +end diff --git a/test/support/install_fixture.ex b/test/support/install_fixture.ex index 278c03b5..ceb0de67 100644 --- a/test/support/install_fixture.ex +++ b/test/support/install_fixture.ex @@ -76,7 +76,7 @@ defmodule Sigra.Test.InstallFixture do System.cmd("mix", ["compile"], cd: app_dir, stderr_to_stdout: true, - env: [{"MIX_ENV", "dev"}] + env: subprocess_env([{"MIX_ENV", "dev"}]) ) if compile_status != 0 do @@ -95,7 +95,7 @@ defmodule Sigra.Test.InstallFixture do ["sigra.install", "Accounts", "User", "users", "--yes"], cd: app_dir, stderr_to_stdout: true, - env: [{"MIX_ENV", "dev"}] + env: subprocess_env([{"MIX_ENV", "dev"}]) ) if install_status != 0 do @@ -153,7 +153,7 @@ defmodule Sigra.Test.InstallFixture do System.cmd("mix", ["compile"], cd: app_dir, stderr_to_stdout: true, - env: [{"MIX_ENV", "dev"}] + env: subprocess_env([{"MIX_ENV", "dev"}]) ) if compile_status != 0 do @@ -169,7 +169,8 @@ defmodule Sigra.Test.InstallFixture do "sh", ["-c", "echo n | mix deps.get"], cd: app_dir, - stderr_to_stdout: true + stderr_to_stdout: true, + env: subprocess_env([{"MIX_ENV", "dev"}]) ) if status != 0 do @@ -195,7 +196,7 @@ defmodule Sigra.Test.InstallFixture do System.cmd("mix", args, cd: app_dir, stderr_to_stdout: true, - env: [{"MIX_ENV", "dev"}] + env: subprocess_env([{"MIX_ENV", "dev"}]) ) if status != 0 do @@ -228,7 +229,7 @@ defmodule Sigra.Test.InstallFixture do System.cmd("mix", args, cd: app_dir, stderr_to_stdout: true, - env: [{"MIX_ENV", "dev"}] + env: subprocess_env([{"MIX_ENV", "dev"}]) ) if status != 0 do @@ -254,7 +255,7 @@ defmodule Sigra.Test.InstallFixture do System.cmd("mix", args, cd: app_dir, stderr_to_stdout: true, - env: [{"MIX_ENV", "dev"}] + env: subprocess_env([{"MIX_ENV", "dev"}]) ) if status != 0 do @@ -303,7 +304,7 @@ defmodule Sigra.Test.InstallFixture do """ @spec snapshot_paths(Path.t()) :: %{String.t() => binary()} def snapshot_paths(app_dir) do - tracked_dirs = ["lib", "priv/repo/migrations", "config", "test/support"] + tracked_dirs = ["lib", "priv/repo/migrations", "config", "test/support", "docs"] for sub <- tracked_dirs, abs_sub = Path.join(app_dir, sub), @@ -335,7 +336,8 @@ defmodule Sigra.Test.InstallFixture do "lib", "priv/repo/migrations", "config", - "test/support" + "test/support", + "docs" ] tracked_dirs @@ -528,6 +530,12 @@ defmodule Sigra.Test.InstallFixture do File.write!(mix_exs, patched) end + defp subprocess_env(overrides) when is_list(overrides) do + System.get_env() + |> Map.merge(Enum.into(overrides, %{})) + |> Map.to_list() + end + defp sigra_repo_root do # This module lives at test/support/install_fixture.ex, so two levels up # from __ENV__.file is the sigra repo root. diff --git a/test/support/oauth_helpers.ex b/test/support/oauth_helpers.ex index 06a563df..39c0e167 100644 --- a/test/support/oauth_helpers.ex +++ b/test/support/oauth_helpers.ex @@ -85,6 +85,17 @@ defmodule Sigra.Test.MockRepo do def get_by(Sigra.Test.MockUser, _clauses), do: nil def get_by(_, _), do: nil + def get!(Sigra.Test.MockIdentity, id) do + %Sigra.Test.MockIdentity{ + id: id, + provider: "google", + provider_uid: "uid_123", + user_id: 1, + encrypted_access_token: "expired", + encrypted_refresh_token: "refresh_me" + } + end + def insert(%Ecto.Changeset{} = changeset) do result = changeset diff --git a/test/support/sigra/testing/fixtures/README.md b/test/support/sigra/testing/fixtures/README.md new file mode 100644 index 00000000..90abae7e --- /dev/null +++ b/test/support/sigra/testing/fixtures/README.md @@ -0,0 +1,26 @@ +# Sigra.Testing.OAuthIssuer Fixtures + +These PEM files are test fixtures for `Sigra.Testing.OAuthIssuer`. + +- `oauth_issuer_rsa_kid1_private.pem` / `oauth_issuer_rsa_kid1_public.pem` + Primary signing keypair for `kid=1`. +- `oauth_issuer_rsa_kid2_private.pem` / `oauth_issuer_rsa_kid2_public.pem` + Secondary signing keypair used for multi-kid JWKS rotation coverage when + `kid_count: 2`. + +Regenerate them with: + +```bash +mkdir -p test/support/sigra/testing/fixtures +for kid in 1 2; do + openssl genpkey -algorithm RSA -pkeyopt rsa_keygen_bits:2048 \ + -out test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid${kid}_private.pem + openssl rsa -in test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid${kid}_private.pem \ + -pubout -out test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid${kid}_public.pem +done +``` + +Threat-model note: these keys are TEST FIXTURES ONLY and must never be reused +for production signing. This follows D-87-02 and mitigation T-87-03. Sigra's +Hex package file list in `mix.exs` excludes `test/`, so these fixtures do not +ship to adopters. diff --git a/test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid1_private.pem b/test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid1_private.pem new file mode 100644 index 00000000..940269d9 --- /dev/null +++ b/test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid1_private.pem @@ -0,0 +1,29 @@ +# TEST FIXTURE — Sigra.Testing.OAuthIssuer; never use for production signing +-----BEGIN PRIVATE KEY----- +MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDlBe2UlRRePwu/ +nozmBCYF/QZaHqT0ibJfZvINgbN8bKVQWMuRcIZsjUsbBIimhvGsRQ6bu4lA2RGq +ov3SoZZi2MLhUyp0aA5QMxKoH1AbONqfbL+KlaikrIkRgff6Mz85xQjvz3Bcj1Bk +LI2RbcPnaG1igumcve3gOCTtaGNpf7e38RkcBxQ+pQ2KFVqdStob6s2Of31BsAFJ +GyxP4ieVTrxLgvP4FIbfRHx7dETPp062mjZcF9oPB4CprpfoTXKCdPtenxNdf/ON +M6T+CG3Q9Vt/VoJ8QNIcFlP219nhthmNrpKS/yEuH2DNJmE+sX5WX5OFm01MOvPN +8xPRWZivAgMBAAECggEAP117BNuUdZkC8qL2/+MQ9CI0IjYNVL1OVVgBy5vhoaDb +wlW3CQf1oU4chB6mglCeyBeZOZxTFtaYLTqIeMENf07S6I3elrN9llHzLQHw439A ++dAYVMsgjGNSTz5C8n5AVYb++H7P60QZrYWoK58Pj1SUwydOZHgmOx29ldQGgVb3 +aiZbLyFAY4822WNR846fpcjkoHTcM8RgaIqIuYQ75R2JKdTmOHTxp4TNlW/ZwuwU +7DG+HgkJ+LqR55gPuJMM9KWYPXznTIgJJm0s5x1wnHGqEceWqE+CFNsmCad59xQI +e5ZYMxsbbNSLU5OSFLbNcYGzokfEFOl8mWe2ZLexkQKBgQD02ell2nMJ2ar6pEZt +cGhseYrWknScrgMksmlDabFYtnEAXNbaJ4GiXK7/bfN2L+2TsHK7r4seIeTTTpNk +yuVqB5NziHxCO4fl1CNrzq1mCZFKq+XPTWbrlECe3kzCaB16pWUk2zhVFZEUa/Ia +Wo6eCYpUL+MZmg43ZNbRnRvL8QKBgQDvc4SQlPKeac+X2SYb9UHhERcH6hzIuOIF +ijjl6ZYxXKdyAPRfJQjAJiGBV2DItilk9HubEITplpGL2ALj/KmHCJtWBqT5/CJe +4ej6F+ELjNMFbuptw8hhmDrd0qhWCEoxVzPdq2swtCsLZgbtPjZtg5a4+KVl9/7t +036RMJrOnwKBgGkpvukULhyo9Jq6O9V9VhxhB5SpSpSQ2KDGUBe4KYektFwng9Am +77LAhBkJLGwyoaOxQVYDS4khnZp0QTIlQuuLXXVdxaDc2L2Jo70GA8uziEe+FPI4 +mF/OSQLzD5zgAulOaGawET3aCXnv8wgGpQKTrmoCN1Qjqr93/BwDkpDBAoGAEThE +e0VK4VuIo0npdK9Bipb5CgerBEBPeMiE6PvQYkJghFFPQZxfMbpMRIntGuIGvgza +6r7YYBgE5YKmSpD7/AsBaMFXkeaw7hPe9kVLWNJKxqRAVZ5zxZj1+sfQdUdpVn0H +7NQMBFeglNREgUEtFtkUuL6g3mFkQuQnwPc22s8CgYAF+MI1IAStwcsqA14AyyYV +hDbvvHOhRsEVCQjPjbs4gw/zQ1qoraAuTeaAqkZPB4C+2fLZDt15gyPq/t86lyrf ++7D9JKMRw0rT5qY5w0vH2EaXq30sNip1kKpt8doD+zdwKvK0OAiVH0oGTcKqeMAr +DJl7ss6TnxFa05nN9A8U1Q== +-----END PRIVATE KEY----- diff --git a/test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid1_public.pem b/test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid1_public.pem new file mode 100644 index 00000000..5d48fc4e --- /dev/null +++ b/test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid1_public.pem @@ -0,0 +1,10 @@ +# TEST FIXTURE — Sigra.Testing.OAuthIssuer; never use for production signing +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5QXtlJUUXj8Lv56M5gQm +Bf0GWh6k9ImyX2byDYGzfGylUFjLkXCGbI1LGwSIpobxrEUOm7uJQNkRqqL90qGW +YtjC4VMqdGgOUDMSqB9QGzjan2y/ipWopKyJEYH3+jM/OcUI789wXI9QZCyNkW3D +52htYoLpnL3t4Dgk7WhjaX+3t/EZHAcUPqUNihVanUraG+rNjn99QbABSRssT+In +lU68S4Lz+BSG30R8e3REz6dOtpo2XBfaDweAqa6X6E1ygnT7Xp8TXX/zjTOk/ght +0PVbf1aCfEDSHBZT9tfZ4bYZja6Skv8hLh9gzSZhPrF+Vl+ThZtNTDrzzfMT0VmY +rwIDAQAB +-----END PUBLIC KEY----- diff --git a/test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid2_private.pem b/test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid2_private.pem new file mode 100644 index 00000000..cb2c52ed --- /dev/null +++ b/test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid2_private.pem @@ -0,0 +1,29 @@ +# TEST FIXTURE — Sigra.Testing.OAuthIssuer; never use for production signing +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDVrF6CM60ZAS1G +QI71k8Ixj7QnJcfvTDUI9PoYRvp//iHhFAw/HzKwesVYqLzA1vhu4OifYW7YFHkv +8absFEwklHPVCaiG2/qlcuQOeu4RcSMW9UTjv3c27NPCuJA7KqcsOY4qRgOgkON1 +xAXOle5u9dR6bUTRtjTKI4uCBIn7Ba1FIrvu//mjheFv3YtUDiXuJ8/xZS30Jt01 ++fCsOE8XotsI+OMRPKYHapaQEfDP3Gy3Vnr5ZyMjFh5pqxI0yGh6QFVJvQKsUdH+ +dLBgoViM0WW2xOcLhk37RiGaGd5FrMc5iPonxeybGc9CeA5+IIiIG9V1zOVrW7pd +iObdvbfPAgMBAAECggEARHbJhYCXWya0Ygk3hVqF46l++PgzGurZJ3iPVg4QH8jH +BD6POf5+GGwOJb1TVZrL2YM5JjBq+tN8jS8p5AUQ7Lugbcd9d1Cu/CpXBoi/FVmh ++641F6B2y2OQ6piGpl6hWBtNASCT8vPZ3hckITCLSIR+Q4gVf/iY65f+EHfx2js+ +QlmGjWpfwvBXYVfNPFnCG5JI+yRH5uoJK9mlwdiy4PA7+QeK3Z1O84qsAWF1tGYV +jQ9rgTGunUdQ9srjHXKfzZYWBWx27OmXlyRGx/g9S6TLtr9K/xEnyMg2Lpv9tBm4 +HxDan3T3DcwhcnPB3dFFvs27ptLfTNLD2W/kzsjcWQKBgQD4woze/TDbJdQ2JfvB +XJG7wrxILSIqz+hfN0EMQyCkjfVhp2BWy00ShoEt0QhPjoX2WUq3oONXG0W4uwv7 +/AQ5Kuq2Hz9Q+TBfW6a9elHhd9J3mI3YICeC7F8ztVXeqnkO8eh11xRlQqLTQPHw +/MHKrET9V8493vU6zlWpQDya1QKBgQDb5GWZkLhqzZ4JoHkMaxgHKXoUaN/ZhNjH ++MNVIpxCaxz/v3LUgNdFw1VON8WtCJQpc8VQ9jdQCSfp8QpFjjd122L86LO+ujUU +2nhxEvdL1F6y4AQmzBxOE4XEWULhEQkfIEyJVYnQ8iSbQCi3NyMVfLTi9d4aVLkY +v+5SWA5SEwKBgEQrzL8vU7w62bUdI6kR3T4/V6nP9JUW9O9jDQh3PPLblGt2mwgu +Hqj9A1my9zwWKtAgGEHKbYLpjmnZmKctoVqpUDkoxwlBwOfhDgjPBLFtTNhJjlW0 +Oh++9zgMccPbo+Fcmf/xOT2mzUhne+Y23kTUgPOMpJCAEWRUN1VyrSkhAoGBAIZF +Fs0Ik7OTzpauSHwOwONOrl7cEyQtfHnPKudHdQcRhOmdq66a5diRh/t1Dt2zyVTu +fmQLlIbosFineNA0ISV0SyOHrIogBd2v8a+KFztUeGbdZ2uRYw9B2IKmxrHLxzgc +bt/FPZw636N1L+eAYYnzVjjoTTDi3wt/1zSs1EHFAoGAFDZjiEoroZsiePbTdL1Z +Yi/dlMXPCUXBjc3jgCpJ/0nyI4mihj0mbHEiaqY7Oc6n2h4DHMjcvpRGyUbiw7sl +5e9hMS3FL5lkR800631ESV+mFMMYsyqfRXy4DjrUfM+6gY4kJRsjupx/l7vwfVTX +7Qu2/ysDi/Pfv7iCUBzwQUE= +-----END PRIVATE KEY----- diff --git a/test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid2_public.pem b/test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid2_public.pem new file mode 100644 index 00000000..ae36d858 --- /dev/null +++ b/test/support/sigra/testing/fixtures/oauth_issuer_rsa_kid2_public.pem @@ -0,0 +1,10 @@ +# TEST FIXTURE — Sigra.Testing.OAuthIssuer; never use for production signing +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1axegjOtGQEtRkCO9ZPC +MY+0JyXH70w1CPT6GEb6f/4h4RQMPx8ysHrFWKi8wNb4buDon2Fu2BR5L/Gm7BRM +JJRz1Qmohtv6pXLkDnruEXEjFvVE4793NuzTwriQOyqnLDmOKkYDoJDjdcQFzpXu +bvXUem1E0bY0yiOLggSJ+wWtRSK77v/5o4Xhb92LVA4l7ifP8WUt9CbdNfnwrDhP +F6LbCPjjETymB2qWkBHwz9xst1Z6+WcjIxYeaasSNMhoekBVSb0CrFHR/nSwYKFY +jNFltsTnC4ZN+0YhmhneRazHOYj6J8XsmxnPQngOfiCIiBvVdczla1u6XYjm3b23 +zwIDAQAB +-----END PUBLIC KEY----- diff --git a/test/support/sigra/testing/oauth_issuer.ex b/test/support/sigra/testing/oauth_issuer.ex new file mode 100644 index 00000000..552dcb73 --- /dev/null +++ b/test/support/sigra/testing/oauth_issuer.ex @@ -0,0 +1,504 @@ +defmodule Sigra.Testing.OAuthIssuer do + @moduledoc """ + In-process OIDC issuer for testing Sigra's OAuth ceremony end-to-end. + + Mirrors Assent's own OIDC test-server precedent with RS256 ID tokens, + JWKS exposure, real PKCE verification, `email_verified` boolean shape, + configurable expiration, and kid rotation. + + This module lives under test/support and is not exported as adopter + public API in v0.x. It complements `Sigra.Testing.mock_oauth_callback/1` + rather than replacing it. + """ + + @typedoc "Issuer handle returned by start_link/1" + @type t :: %__MODULE__{ + base_url: String.t(), + state: pid(), + server: pid() + } + + @fixture_dir Path.expand("fixtures", __DIR__) + @kid1_private_path Path.join(@fixture_dir, "oauth_issuer_rsa_kid1_private.pem") + @kid1_public_path Path.join(@fixture_dir, "oauth_issuer_rsa_kid1_public.pem") + @kid2_private_path Path.join(@fixture_dir, "oauth_issuer_rsa_kid2_private.pem") + @kid2_public_path Path.join(@fixture_dir, "oauth_issuer_rsa_kid2_public.pem") + + @external_resource @kid1_private_path + @external_resource @kid1_public_path + @external_resource @kid2_private_path + @external_resource @kid2_public_path + + @default_user %{ + sub: "provider_123", + email: "oauth@example.com", + email_verified: true, + name: "OAuth User", + picture: "https://example.com/avatar.jpg" + } + + @discovery_path "/.well-known/openid-configuration" + @authorize_path "/oauth2/v2/auth" + @token_path "/token" + @userinfo_path "/userinfo" + @jwks_path "/jwks" + + defstruct [:base_url, :state, :server] + + defmodule HTTPPlug do + @moduledoc false + + def init(opts), do: opts + + def call(conn, opts) do + Sigra.Testing.OAuthIssuer.dispatch(conn, Keyword.fetch!(opts, :state)) + end + end + + @spec start_link(keyword()) :: {:ok, t()} | {:error, term()} + def start_link(opts \\ []) do + provider = Keyword.get(opts, :provider, :google) + user_claims = normalize_user_claims(Keyword.get(opts, :user, @default_user)) + kid_count = Keyword.get(opts, :kid_count, 1) + exp_offset = normalize_exp(Keyword.get(opts, :exp, 3600)) + refresh_rotation = Keyword.get(opts, :refresh_rotation, true) + pkce_required = Keyword.get(opts, :pkce_required, true) + + with :ok <- validate_provider(provider), + :ok <- validate_kid_count(kid_count), + {:ok, state} <- + Agent.start_link(fn -> + %{ + base_url: nil, + provider: provider, + user_claims: user_claims, + kid_count: kid_count, + exp_offset: exp_offset, + refresh_rotation?: refresh_rotation, + pkce_required?: pkce_required, + codes: %{}, + access_tokens: %{}, + refresh_tokens: %{} + } + end), + {:ok, server, base_url} <- start_http_server(state) do + Agent.update(state, &Map.put(&1, :base_url, base_url)) + {:ok, %__MODULE__{base_url: base_url, state: state, server: server}} + end + end + + @spec set_user(t(), map()) :: :ok + def set_user(%__MODULE__{state: state}, user_claims) do + Agent.update(state, &Map.put(&1, :user_claims, normalize_user_claims(user_claims))) + end + + @spec set_kid_count(t(), 1 | 2) :: :ok + def set_kid_count(%__MODULE__{state: state}, kid_count) when kid_count in [1, 2] do + Agent.update(state, &Map.put(&1, :kid_count, kid_count)) + end + + @spec url(t()) :: String.t() + def url(%__MODULE__{base_url: base_url}), do: base_url + + @spec openid_config(t()) :: map() + def openid_config(%__MODULE__{base_url: base_url}) do + %{ + "issuer" => base_url, + "authorization_endpoint" => base_url <> @authorize_path, + "token_endpoint" => base_url <> @token_path, + "userinfo_endpoint" => base_url <> @userinfo_path, + "jwks_uri" => base_url <> @jwks_path, + "token_endpoint_auth_methods_supported" => [ + "none", + "client_secret_post", + "client_secret_basic" + ] + } + end + + @spec stop(t()) :: :ok + def stop(%__MODULE__{state: state, server: server}) do + if Process.alive?(state) do + if is_pid(server) and Process.alive?(server) do + GenServer.stop(server) + end + + Agent.stop(state) + end + + :ok + end + + def dispatch(conn, state) do + case {conn.method, conn.request_path} do + {"GET", @discovery_path} -> handle_discovery(conn, state) + {"GET", @authorize_path} -> handle_authorize(conn, state) + {"POST", @token_path} -> handle_token(conn, state) + {"GET", @userinfo_path} -> handle_userinfo(conn, state) + {"GET", @jwks_path} -> handle_jwks(conn, state) + _other -> Plug.Conn.send_resp(conn, 404, "") + end + end + + defp start_http_server(state) do + unless Code.ensure_loaded?(Bandit) do + {:error, {:missing_dependency, :bandit}} + else + bandit_opts = [ + scheme: :http, + ip: {127, 0, 0, 1}, + port: 0, + plug: {HTTPPlug, state: state} + ] + + with {:ok, server} <- apply(Bandit, :start_link, [bandit_opts]), + {:ok, {{127, 0, 0, 1}, port}} <- ThousandIsland.listener_info(server) do + {:ok, server, "http://127.0.0.1:#{port}"} + end + end + end + + defp handle_discovery(conn, state) do + conn + |> json(200, state |> agent_issuer() |> openid_config()) + end + + defp handle_authorize(conn, state) do + params = Plug.Conn.fetch_query_params(conn).params + + with {:ok, redirect_uri} <- fetch_required(params, "redirect_uri"), + {:ok, oauth_state} <- fetch_required(params, "state"), + :ok <- validate_pkce_request(params, state) do + code = random_token("code") + + stored_code = %{ + code_challenge: Map.get(params, "code_challenge"), + code_challenge_method: Map.get(params, "code_challenge_method"), + client_id: Map.get(params, "client_id", "sigra-client"), + nonce: Map.get(params, "nonce"), + redirect_uri: redirect_uri + } + + Agent.update(state, &put_in(&1, [:codes, code], stored_code)) + + conn + |> Plug.Conn.put_resp_header( + "location", + redirect_with_code(redirect_uri, code, oauth_state) + ) + |> Plug.Conn.send_resp(302, "") + else + {:error, message} -> json(conn, 400, %{error: message}) + end + end + + defp handle_token(conn, state) do + params = read_form_body(conn) + + case params["grant_type"] || "authorization_code" do + "authorization_code" -> exchange_code(conn, state, params) + "refresh_token" -> exchange_refresh_token(conn, state, params) + other -> json(conn, 400, %{error: "unsupported_grant_type", grant_type: other}) + end + end + + defp exchange_code(conn, state, params) do + with {:ok, code} <- fetch_required(params, "code"), + {:ok, code_data} <- fetch_code(state, code), + :ok <- validate_redirect_uri(code_data, params), + :ok <- validate_code_verifier(state, code_data, params) do + token_payload = issue_tokens(state, code_data) + Agent.update(state, &update_in(&1.codes, fn codes -> Map.delete(codes, code) end)) + json(conn, 200, token_payload) + else + {:error, reason} -> + json(conn, 400, %{error: "invalid_grant", error_description: reason}) + end + end + + defp exchange_refresh_token(conn, state, params) do + with {:ok, refresh_token} <- fetch_required(params, "refresh_token"), + {:ok, refresh_data} <- fetch_refresh_token(state, refresh_token) do + {refresh_token, state_update} = + if refresh_data.refresh_rotation? do + new_token = random_token("refresh") + + {new_token, + fn current_state -> + current_state + |> update_in([:refresh_tokens], fn tokens -> + tokens + |> Map.delete(refresh_token) + |> Map.put(new_token, %{refresh_data | refresh_token: new_token}) + end) + end} + else + {refresh_token, fn current_state -> current_state end} + end + + Agent.update(state, state_update) + token_payload = issue_tokens_from_refresh(state, refresh_data, refresh_token) + json(conn, 200, token_payload) + else + {:error, reason} -> + json(conn, 400, %{error: "invalid_grant", error_description: reason}) + end + end + + defp handle_userinfo(conn, state) do + with {:ok, token} <- bearer_token(conn), + {:ok, user_claims} <- fetch_access_token(state, token) do + json(conn, 200, stringify_claims(user_claims)) + else + {:error, _reason} -> + json(conn, 401, %{error: "invalid_token"}) + end + end + + defp handle_jwks(conn, state) do + keys = + state + |> Agent.get(& &1.kid_count) + |> public_jwks() + + json(conn, 200, %{"keys" => keys}) + end + + defp validate_provider(:google), do: :ok + defp validate_provider(provider), do: {:error, {:unsupported_provider, provider}} + + defp validate_kid_count(kid_count) when kid_count in [1, 2], do: :ok + defp validate_kid_count(kid_count), do: {:error, {:invalid_kid_count, kid_count}} + + defp validate_pkce_request(params, state) do + if Agent.get(state, & &1.pkce_required?) do + with {:ok, _challenge} <- fetch_required(params, "code_challenge"), + {:ok, "S256"} <- fetch_required(params, "code_challenge_method") do + :ok + else + {:error, _reason} -> {:error, "missing_pkce"} + end + else + :ok + end + end + + defp validate_redirect_uri(%{redirect_uri: redirect_uri}, %{"redirect_uri" => redirect_uri}), + do: :ok + + defp validate_redirect_uri(%{redirect_uri: _redirect_uri}, _params), + do: {:error, "redirect_uri mismatch"} + + defp validate_code_verifier(state, code_data, params) do + if Agent.get(state, & &1.pkce_required?) do + with {:ok, verifier} <- fetch_required(params, "code_verifier"), + true <- code_data.code_challenge == pkce_challenge(verifier) do + :ok + else + _any -> {:error, "invalid code_verifier"} + end + else + :ok + end + end + + defp fetch_code(state, code) do + case Agent.get(state, &get_in(&1, [:codes, code])) do + nil -> {:error, "unknown code"} + code_data -> {:ok, code_data} + end + end + + defp fetch_access_token(state, token) do + case Agent.get(state, &get_in(&1, [:access_tokens, token])) do + nil -> {:error, :invalid_token} + claims -> {:ok, claims} + end + end + + defp fetch_refresh_token(state, refresh_token) do + case Agent.get(state, &get_in(&1, [:refresh_tokens, refresh_token])) do + nil -> {:error, "unknown refresh_token"} + data -> {:ok, data} + end + end + + defp issue_tokens(state, code_data) do + base_state = Agent.get(state, & &1) + refresh_token = random_token("refresh") + + claims = build_id_token_claims(base_state, code_data.client_id, code_data.nonce) + id_token = sign_id_token(claims, current_kid(base_state)) + access_token = random_token("access") + + Agent.update(state, fn current_state -> + current_state + |> put_in([:access_tokens, access_token], current_state.user_claims) + |> put_in([:refresh_tokens, refresh_token], %{ + client_id: code_data.client_id, + nonce: code_data.nonce, + refresh_rotation?: current_state.refresh_rotation? + }) + end) + + %{ + "access_token" => access_token, + "refresh_token" => refresh_token, + "id_token" => id_token, + "token_type" => "Bearer", + "expires_in" => base_state.exp_offset + } + end + + defp issue_tokens_from_refresh(state, refresh_data, refresh_token) do + base_state = Agent.get(state, & &1) + claims = build_id_token_claims(base_state, refresh_data.client_id, refresh_data.nonce) + id_token = sign_id_token(claims, current_kid(base_state)) + access_token = random_token("access") + + Agent.update(state, &put_in(&1, [:access_tokens, access_token], &1.user_claims)) + + %{ + "access_token" => access_token, + "refresh_token" => refresh_token, + "id_token" => id_token, + "token_type" => "Bearer", + "expires_in" => base_state.exp_offset + } + end + + defp build_id_token_claims(base_state, client_id, nonce) do + now = DateTime.utc_now() |> DateTime.to_unix() + exp = now + base_state.exp_offset + + base_state.user_claims + |> stringify_claims() + |> Map.merge(%{ + "iss" => base_state.base_url, + "aud" => client_id, + "iat" => now, + "exp" => exp + }) + |> maybe_put("nonce", nonce) + end + + defp sign_id_token(claims, kid) do + private_jwk = + kid + |> private_key_path() + |> File.read!() + |> JOSE.JWK.from_pem() + + {_, token} = + private_jwk + |> JOSE.JWT.sign(%{"alg" => "RS256", "kid" => kid}, claims) + |> JOSE.JWS.compact() + + token + end + + defp public_jwks(kid_count) do + 1..kid_count + |> Enum.map(fn index -> + kid = "kid#{index}" + + public_key_path(kid) + |> File.read!() + |> JOSE.JWK.from_pem() + |> JOSE.JWK.to_public() + |> JOSE.JWK.to_map() + |> elem(1) + |> Map.merge(%{"kid" => kid, "alg" => "RS256", "use" => "sig"}) + end) + end + + defp current_kid(%{kid_count: 1}), do: "kid1" + defp current_kid(%{kid_count: 2}), do: "kid2" + + defp agent_issuer(state) do + base_url = Agent.get(state, & &1.base_url) + %__MODULE__{base_url: base_url, state: state} + end + + defp read_form_body(conn) do + {:ok, body, conn} = Plug.Conn.read_body(conn) + _ = conn + URI.decode_query(body) + end + + defp bearer_token(conn) do + case Plug.Conn.get_req_header(conn, "authorization") do + ["Bearer " <> token] -> {:ok, token} + _other -> {:error, :missing_bearer} + end + end + + defp redirect_with_code(redirect_uri, code, oauth_state) do + uri = URI.parse(redirect_uri) + + query = + URI.decode_query(uri.query || "") |> Map.merge(%{"code" => code, "state" => oauth_state}) + + %{uri | query: URI.encode_query(query)} |> URI.to_string() + end + + defp fetch_required(params, key) do + case Map.fetch(params, key) do + {:ok, value} when value not in [nil, ""] -> {:ok, value} + _other -> {:error, "#{key} missing"} + end + end + + defp pkce_challenge(verifier) do + verifier + |> then(&:crypto.hash(:sha256, &1)) + |> Base.url_encode64(padding: false) + end + + defp json(conn, status, payload) do + body = Jason.encode!(payload) + + conn + |> Plug.Conn.put_resp_content_type("application/json") + |> Plug.Conn.send_resp(status, body) + end + + defp maybe_put(map, _key, nil), do: map + defp maybe_put(map, key, value), do: Map.put(map, key, value) + + defp stringify_claims(user_claims) do + Map.new(user_claims, fn {key, value} -> {to_string(key), value} end) + end + + defp normalize_user_claims(user_claims) when is_map(user_claims) do + user_claims + |> Enum.reduce(%{}, fn + {key, value}, acc when is_atom(key) -> Map.put(acc, key, value) + {key, value}, acc when is_binary(key) -> Map.put(acc, String.to_existing_atom(key), value) + end) + |> then(&Map.merge(@default_user, &1)) + |> Map.update!(:email_verified, &(&1 == true)) + rescue + ArgumentError -> + @default_user + end + + defp normalize_exp(%DateTime{} = exp) do + diff = DateTime.diff(exp, DateTime.utc_now(), :second) + if diff > 0, do: diff, else: 0 + end + + defp normalize_exp(exp) when is_integer(exp) and exp >= 0, do: exp + defp normalize_exp(_exp), do: 3600 + + defp private_key_path("kid1"), do: @kid1_private_path + defp private_key_path("kid2"), do: @kid2_private_path + defp public_key_path("kid1"), do: @kid1_public_path + defp public_key_path("kid2"), do: @kid2_public_path + + defp random_token(prefix) do + encoded = :crypto.strong_rand_bytes(24) |> Base.url_encode64(padding: false) + prefix <> "_" <> encoded + end + +end